Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
04/24/17 13:17:43 (7 years ago)
Author:
bwerth
Message:

#2699 checked and reformulated gradient functions for kernels

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/RadialBasisFunctions/KernelFunctions/ThinPlatePolysplineKernel.cs

    r14872 r14883  
    4646
    4747    protected override double Get(double norm) {
    48       if (Math.Pow(norm, Beta) <= 0) return 0;
    49       return Math.Pow(norm, Beta * 2) * Math.Log(1 + Math.Pow(norm, Beta));
     48      if (Math.Pow(norm, Beta) <= 0) return double.NaN;
     49      return Math.Pow(norm, 2 * Beta) * Math.Log(1 + Math.Pow(norm, Beta));
    5050    }
    5151
    5252    protected override double GetGradient(double norm) {
    5353      if (Math.Pow(norm, Beta) <= 0) return double.NaN;
    54       var ax = Math.Pow(norm, Beta);
    55       var a2x = ax * ax;
    56       var a3x = a2x * ax;
    57       var loga = Math.Log(norm);
    58       return 2 * a2x * loga * Math.Log(ax + 1) + a3x * loga / (ax + 1);
     54      return 2 * Math.Log(norm) * Math.Pow(norm, 2 * Beta) * Math.Log(1 + Math.Pow(norm, Beta)) + Math.Pow(norm, 3 * Beta) * Math.Log(norm) / (Math.Pow(norm, Beta) + 1);
    5955    }
    6056  }
Note: See TracChangeset for help on using the changeset viewer.