Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
04/26/17 11:06:51 (8 years ago)
Author:
bwerth
Message:

#2699 reworked kenel functions (beta is always a scaling factor now), added LU-Decomposition as a fall-back if Cholesky-decomposition fails

Location:
branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/CicularKernel.cs

    r14887 r14891  
    2727namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    2828  [StorableClass]
    29   [Item("CircularKernel", "A circular kernel function 2*pi*(acos(-d)-d*(1-n²)^(0.5)) where n = ||x-c|| and d = n/beta")]
     29  [Item("CircularKernel", "A circular kernel function 2*pi*(acos(-d)-d*(1-d²)^(0.5)) where n = ||x-c|| and d = n/beta \n  As described in http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/")]
    3030  public class CircularKernel : KernelBase {
    3131
     
    4545    protected override double Get(double norm) {
    4646      var beta = Beta.Value;
    47       if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    48       if (norm >= beta) return 0;
     47      if (Math.Abs(Beta.Value) < double.Epsilon) return double.NaN;
     48      if (norm >= Beta.Value) return 0;
    4949      var d = norm / beta;
    50       return Math.Acos(-d) - d * Math.Sqrt(1 - d * d) - Math.PI / 2;
     50      return 2 * Math.PI * (Math.Acos(-d) - d * Math.Sqrt(1 - d * d));
    5151    }
    5252
     53    // 4*pi*n^3 / (beta^4 * sqrt(1-n^2/beta^2)
    5354    protected override double GetGradient(double norm) {
    5455      var beta = Beta.Value;
    5556      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    5657      if (beta < norm) return 0;
    57       return -2 * Math.Pow(norm, 3) / (Math.Pow(beta, 4) * Math.Sqrt(1 - norm * norm / (beta * beta)));
     58      var d = norm / beta;
     59      return -4 * Math.PI * d * d * d / beta * Math.Sqrt(1 - d * d);
    5860    }
    5961  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/GaussianKernel.cs

    r14887 r14891  
    2929namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3030  [StorableClass]
    31   [Item("GaussianKernel", "A kernel function that uses Gaussian function exp(-||x-c||/beta²). Positive definite beta > 0")]
     31  [Item("GaussianKernel", "A kernel function that uses Gaussian function exp(-n²/beta²). As described in http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/")]
    3232  public class GaussianKernel : KernelBase {
    3333
     
    4848      var beta = Beta.Value;
    4949      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    50       return Math.Exp(-norm * norm / (beta * beta));
     50      var d = norm / beta;
     51      return Math.Exp(-d * d);
    5152    }
    5253
     54    //2 * n²/b²* 1/b * exp(-n²/b²)
    5355    protected override double GetGradient(double norm) {
    5456      var beta = Beta.Value;
    5557      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    56       return 2 * norm * norm / Math.Pow(beta, 3) * Math.Exp(-norm * norm / (beta * beta));
     58      var d = norm / beta;
     59      return 2 * d * d / beta * Math.Exp(-d * d);
    5760    }
    5861  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/InverseMultiquadraticKernel.cs

    r14887 r14891  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;     
     24using HeuristicLab.Core;
    2525using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2626
    2727namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    2828  [StorableClass]
    29   [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multi-quadratic function  1 / sqrt(1+||x-c||^2/beta). Positive definite: beta > 0")]
     29  [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multi-quadratic function  1 / sqrt(1+||x-c||²/beta²). Similar to http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/ with beta as a scaling factor.")]
    3030  public class InverseMultiquadraticKernel : KernelBase {
     31
     32    private const double C = 1.0;
    3133    #region HLConstructors & Boilerplate
    3234    [StorableConstructor]
     
    3537    private void AfterDeserialization() { }
    3638    protected InverseMultiquadraticKernel(InverseMultiquadraticKernel original, Cloner cloner) : base(original, cloner) { }
    37     public InverseMultiquadraticKernel() {
    38     }
     39    public InverseMultiquadraticKernel() { }
    3940    public override IDeepCloneable Clone(Cloner cloner) {
    4041      return new InverseMultiquadraticKernel(this, cloner);
     
    4546      var beta = Beta.Value;
    4647      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    47       return 1 / Math.Sqrt(1 + norm * norm / beta);
     48      var d = norm / beta;
     49      return 1 / Math.Sqrt(C + d * d);
    4850    }
    4951
     52    //n²/(b³(n²/b² + C)^1.5)
    5053    protected override double GetGradient(double norm) {
    5154      var beta = Beta.Value;
    5255      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    53       return norm * norm / (2 * beta * beta * Math.Pow((norm * norm + beta) / beta, 1.5));
     56      var d = norm / beta;
     57      return d * d / (beta * Math.Pow(d * d + C, 1.5));
    5458    }
    5559  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/MultiquadraticKernel.cs

    r14887 r14891  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;           
     24using HeuristicLab.Core;
    2525using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2626
     
    2828  [StorableClass]
    2929  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
    30   [Item("MultiquadraticKernel", "A kernel function that uses the multi-quadratic function (sqrt(1+||x-c||²/β).")]
     30  [Item("MultiquadraticKernel", "A kernel function that uses the multi-quadratic function sqrt(1+||x-c||²/beta²). Similar to http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/ with beta as a scaling factor.")]
    3131  public class MultiquadraticKernel : KernelBase {
    3232
     33    private const double C = 1.0;
    3334    #region HLConstructors & Boilerplate
    3435    [StorableConstructor]
     
    4849      var beta = Beta.Value;
    4950      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    50       return Math.Sqrt(1 + norm * norm / beta);
     51      var d = norm / beta;
     52      return Math.Sqrt(C + d * d);
    5153    }
    5254
     55    //-n²/(d³*sqrt(C+n²/d²))
    5356    protected override double GetGradient(double norm) {
    5457      var beta = Beta.Value;
    5558      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    56       var dividend = 2 * beta * beta * Math.Sqrt((beta + norm * norm) / beta);
    57       return -norm * norm / dividend;
     59      var d = norm / beta;
     60      return -d * d / (beta * Math.Sqrt(C + d * d));
    5861    }
    5962  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/PolysplineKernel.cs

    r14887 r14891  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;         
     24using HeuristicLab.Core;
     25using HeuristicLab.Data;
     26using HeuristicLab.Parameters;
    2527using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2628
     
    2830  [StorableClass]
    2931  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
    30   [Item("PolysplineKernel", "A kernel function that uses the poly-spline function ||x-c||^Beta.")]
     32  [Item("PolysplineKernel", "A kernel function that uses the polyharmonic function (||x-c||/Beta)^Degree as given in http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf with beta as a scaling parameters.")]
    3133  public class PolysplineKernel : KernelBase {
     34
     35    #region Parameternames
     36    private const string DegreeParameterName = "Degree";
     37    #endregion
     38    #region Parameterproperties
     39    public IFixedValueParameter<DoubleValue> DegreeParameter
     40    {
     41      get { return Parameters[DegreeParameterName] as IFixedValueParameter<DoubleValue>; }
     42    }
     43    #endregion
     44    #region Properties
     45    public DoubleValue Degree
     46    {
     47      get { return DegreeParameter.Value; }
     48    }
     49    #endregion
    3250
    3351    #region HLConstructors & Boilerplate
     
    3654    [StorableHook(HookType.AfterDeserialization)]
    3755    private void AfterDeserialization() { }
    38     protected PolysplineKernel(PolysplineKernel original, Cloner cloner)
    39                 : base(original, cloner) { }
     56    protected PolysplineKernel(PolysplineKernel original, Cloner cloner) : base(original, cloner) { }
    4057    public PolysplineKernel() {
     58      Parameters.Add(new FixedValueParameter<DoubleValue>(DegreeParameterName, "The degree of the kernel. Needs to be greater than zero.", new DoubleValue(1.0)));
    4159    }
    4260    public override IDeepCloneable Clone(Cloner cloner) {
     
    4664
    4765    protected override double Get(double norm) {
    48       return Math.Pow(norm, Beta.Value);
     66      var beta = Beta.Value;
     67      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     68      var d = norm / beta;
     69      return Math.Pow(d, Degree.Value);
    4970    }
    5071
     72    //-degree/beta * (norm/beta)^degree
    5173    protected override double GetGradient(double norm) {
    52       return Math.Pow(norm, Beta.Value) * Math.Log(norm);
     74      var beta = Beta.Value;
     75      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     76      var d = norm / beta;
     77      return -Degree.Value / beta * Math.Pow(d, Degree.Value);
    5378    }
    5479  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/ThinPlatePolysplineKernel.cs

    r14887 r14891  
    2323using HeuristicLab.Common;
    2424using HeuristicLab.Core;
     25using HeuristicLab.Data;
     26using HeuristicLab.Parameters;
    2527using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2628
     
    2830  [StorableClass]
    2931  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
    30   [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function ||x-c||^(2*Beta)*log(||x-c||^Beta)")]
     32  [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function (||x-c||/Beta)^(Degree)*log(||x-c||/Beta) as described in \"Thin-Plate Spline Radial Basis Function Scheme for Advection-Diffusion Problems\" with beta as a scaling parameter.")]
    3133  public class ThinPlatePolysplineKernel : KernelBase {
     34
     35    #region Parameternames
     36    private const string DegreeParameterName = "Degree";
     37    #endregion
     38    #region Parameterproperties
     39    public IFixedValueParameter<DoubleValue> DegreeParameter
     40    {
     41      get { return Parameters[DegreeParameterName] as IFixedValueParameter<DoubleValue>; }
     42    }
     43    #endregion
     44    #region Properties
     45    public DoubleValue Degree
     46    {
     47      get { return DegreeParameter.Value; }
     48    }
     49    #endregion
     50
    3251    #region HLConstructors & Boilerplate
    3352    [StorableConstructor]
     
    3756    protected ThinPlatePolysplineKernel(ThinPlatePolysplineKernel original, Cloner cloner) : base(original, cloner) { }
    3857    public ThinPlatePolysplineKernel() {
     58      Parameters.Add(new FixedValueParameter<DoubleValue>(DegreeParameterName, "The degree of the kernel. Needs to be greater than zero.", new DoubleValue(2.0)));
    3959    }
    4060    public override IDeepCloneable Clone(Cloner cloner) {
     
    4565    protected override double Get(double norm) {
    4666      var beta = Beta.Value;
    47       if (Math.Pow(norm, beta) < 0) return double.NaN;
    48       return Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta));
     67      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     68      var d = norm / beta;
     69      if (Math.Abs(d) < double.Epsilon) return 0;
     70      return Math.Pow(d, Degree.Value) * Math.Log(d);
    4971    }
    5072
     73    // (Degree/beta) * (norm/beta)^Degree * log(norm/beta)
    5174    protected override double GetGradient(double norm) {
    5275      var beta = Beta.Value;
    53       if (Math.Pow(norm, beta) <= 0) return double.NaN;
    54       return 2 * Math.Log(norm) * Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta)) + Math.Pow(norm, 3 * beta) * Math.Log(norm) / (Math.Pow(norm, beta) + 1);
     76      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     77      var d = norm / beta;
     78      if (Math.Abs(d) < double.Epsilon) return 0;
     79      return Degree.Value / beta * Math.Pow(d, Degree.Value) * Math.Log(d);
    5580    }
    5681  }
Note: See TracChangeset for help on using the changeset viewer.