Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
04/24/17 18:31:44 (8 years ago)
Author:
gkronber
Message:

#2699: worked on kernel ridge regression. moved beta parameter to algorithm. reintroduced IKernel interface to restrict choice of kernel in kernel ridge regression. speed-up by cholesky decomposition and optimization of the calculation of the covariance matrix.

Location:
branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions
Files:
1 added
1 deleted
7 edited

Legend:

Unmodified
Added
Removed
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/CicularKernel.cs

    r14883 r14887  
    2323using HeuristicLab.Common;
    2424using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("CircularKernel", "A circular kernel function")]
     29  [Item("CircularKernel", "A circular kernel function 2*pi*(acos(-d)-d*(1-n²)^(0.5)) where n = ||x-c|| and d = n/beta")]
    3230  public class CircularKernel : KernelBase {
    3331
     
    3937    protected CircularKernel(CircularKernel original, Cloner cloner) : base(original, cloner) { }
    4038    public CircularKernel() {
    41       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernel function 2*pi*(acos(-d)-d*(1-n²)^(0.5)) where n = ||x-c|| and d = n/beta", new DoubleValue(2)));
    4239    }
    4340    public override IDeepCloneable Clone(Cloner cloner) {
     
    4744
    4845    protected override double Get(double norm) {
    49       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    50       if (norm >= Beta) return 0;
    51       var d = norm / Beta;
     46      var beta = Beta.Value;
     47      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     48      if (norm >= beta) return 0;
     49      var d = norm / beta;
    5250      return Math.Acos(-d) - d * Math.Sqrt(1 - d * d) - Math.PI / 2;
    5351    }
    5452
    5553    protected override double GetGradient(double norm) {
    56       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    57       if (Beta < norm) return 0;
    58       return -2*Math.Pow(norm,3)/(Math.Pow(Beta,4)*Math.Sqrt(1-norm*norm/(Beta*Beta)));
     54      var beta = Beta.Value;
     55      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     56      if (beta < norm) return 0;
     57      return -2 * Math.Pow(norm, 3) / (Math.Pow(beta, 4) * Math.Sqrt(1 - norm * norm / (beta * beta)));
    5958    }
    6059  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/GaussianKernel.cs

    r14883 r14887  
    2121
    2222using System;
     23
    2324using HeuristicLab.Common;
    2425using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     26
    2727using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2828
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     29namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3030  [StorableClass]
    31   [Item("GaussianKernel", "A kernel function that uses Gaussian function")]
     31  [Item("GaussianKernel", "A kernel function that uses Gaussian function exp(-||x-c||/beta²). Positive definite beta > 0")]
    3232  public class GaussianKernel : KernelBase {
    3333
     
    3939    protected GaussianKernel(GaussianKernel original, Cloner cloner) : base(original, cloner) { }
    4040    public GaussianKernel() {
    41       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernelfunction exp(-||x-c||/beta²)", new DoubleValue(2)));
    4241    }
    4342    public override IDeepCloneable Clone(Cloner cloner) {
     
    4746
    4847    protected override double Get(double norm) {
    49       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    50       return Math.Exp(-norm * norm / (Beta * Beta));
     48      var beta = Beta.Value;
     49      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     50      return Math.Exp(-norm * norm / (beta * beta));
    5151    }
    5252
    5353    protected override double GetGradient(double norm) {
    54       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    55       return 2 * norm * norm / Math.Pow(Beta, 3) * Math.Exp(-norm * norm / (Beta * Beta));
     54      var beta = Beta.Value;
     55      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     56      return 2 * norm * norm / Math.Pow(beta, 3) * Math.Exp(-norm * norm / (beta * beta));
    5657    }
    5758  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/InverseMultiquadraticKernel.cs

    r14883 r14887  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     24using HeuristicLab.Core;     
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multiquadratic function")]
     29  [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multi-quadratic function  1 / sqrt(1+||x-c||^2/beta). Positive definite: beta > 0")]
    3230  public class InverseMultiquadraticKernel : KernelBase {
    3331    #region HLConstructors & Boilerplate
     
    3836    protected InverseMultiquadraticKernel(InverseMultiquadraticKernel original, Cloner cloner) : base(original, cloner) { }
    3937    public InverseMultiquadraticKernel() {
    40       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernel function 1 / sqrt(1+||x-c||^2/beta)", new DoubleValue(2)));
    4138    }
    4239    public override IDeepCloneable Clone(Cloner cloner) {
     
    4643
    4744    protected override double Get(double norm) {
    48       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    49       return 1 / Math.Sqrt(1 + norm * norm / Beta);
     45      var beta = Beta.Value;
     46      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     47      return 1 / Math.Sqrt(1 + norm * norm / beta);
    5048    }
    5149
    5250    protected override double GetGradient(double norm) {
    53       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    54       return norm * norm / (2 * Beta * Beta * Math.Pow((norm * norm + Beta) / Beta, 1.5));
     51      var beta = Beta.Value;
     52      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     53      return norm * norm / (2 * beta * beta * Math.Pow((norm * norm + beta) / beta, 1.5));
    5554    }
    5655  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/KernelBase.cs

    r14872 r14887  
    2121
    2222using System;
    23 using System.Collections;
    2423using System.Collections.Generic;
     24using System.Linq;
    2525using HeuristicLab.Common;
    2626using HeuristicLab.Core;
    27 using HeuristicLab.Data;
    2827using HeuristicLab.Parameters;
    2928using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3029
    31 namespace HeuristicLab.Algorithms.DataAnalysis {
     30namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3231  [StorableClass]
    33   public abstract class KernelBase : ParameterizedNamedItem, ICovarianceFunction {
     32  public abstract class KernelBase : ParameterizedNamedItem, IKernel {
    3433
    3534    #region Parameternames
    3635    private const string DistanceParameterName = "Distance";
    37     protected const string BetaParameterName = "Beta";
    3836    #endregion
    3937    #region Parameterproperties
     
    4240    }
    4341
    44     public IFixedValueParameter<DoubleValue> BetaParameter {
    45       get { return Parameters[BetaParameterName] as FixedValueParameter<DoubleValue>; }
    46     }
    47 
     42    [Storable]
     43    public double? Beta { get; set; }
    4844    #endregion
    4945    #region Properties
    5046    public IDistance Distance {
    5147      get { return DistanceParameter.Value; }
    52     }
    53 
    54     public double Beta {
    55       get { return BetaParameter.Value.Value; }
     48      set { DistanceParameter.Value = value; }
    5649    }
    5750
     
    6457
    6558    protected KernelBase(KernelBase original, Cloner cloner)
    66       : base(original, cloner) { }
     59      : base(original, cloner) {
     60      Beta = original.Beta;
     61    }
    6762
    6863    protected KernelBase() {
     
    7873
    7974    public int GetNumberOfParameters(int numberOfVariables) {
    80       return 1;
     75      return Beta.HasValue ? 0 : 1;
    8176    }
    8277
    8378    public void SetParameter(double[] p) {
    84       if (p != null && p.Length == 1) BetaParameter.Value.Value = p[0];
     79      if (p != null && p.Length == 1) Beta = new double?(p[0]);
    8580    }
    8681
    8782    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, int[] columnIndices) {
    88       if (p == null || p.Length != 1) throw new ArgumentException("Illegal parametrization");
     83      if (p.Length != GetNumberOfParameters(columnIndices.Length)) throw new ArgumentException("Illegal parametrization");
    8984      var myClone = (KernelBase)Clone(new Cloner());
    90       myClone.BetaParameter.Value.Value = p[0];
     85      myClone.SetParameter(p);
    9186      var cov = new ParameterizedCovarianceFunction {
    9287        Covariance = (x, i, j) => myClone.Get(GetNorm(x, x, i, j, columnIndices)),
     
    10297      var dist = Distance as IDistance<IEnumerable<double>>;
    10398      if (dist == null) throw new ArgumentException("The distance needs to apply to double vectors");
    104       var r1 = new IndexedEnumerable(x, i, columnIndices);
    105       var r2 = new IndexedEnumerable(xt, j, columnIndices);
     99      var r1 = columnIndices.Select(c => x[i, c]);
     100      var r2 = columnIndices.Select(c => xt[j, c]);
    106101      return dist.Get(r1, r2);
    107     }
    108     internal class IndexedEnumerable : IEnumerable<double> {
    109       private readonly double[,] data;
    110       private readonly int row;
    111       private readonly int[] columnIndices;
    112 
    113       public IndexedEnumerable(double[,] data, int row, int[] columnIndices) {
    114         this.data = data;
    115         this.row = row;
    116         this.columnIndices = columnIndices;
    117       }
    118 
    119       public IEnumerator<double> GetEnumerator() {
    120         return new IndexedEnumerator(data, row, columnIndices);
    121       }
    122 
    123       IEnumerator IEnumerable.GetEnumerator() {
    124         return new IndexedEnumerator(data, row, columnIndices);
    125       }
    126     }
    127     internal class IndexedEnumerator : IEnumerator<double> {
    128       private readonly IEnumerator<int> column;
    129       private readonly double[,] data;
    130       private readonly int row;
    131 
    132       public IndexedEnumerator(double[,] data, int row, int[] columnIndices) {
    133         this.data = data;
    134         this.row = row;
    135         column = ((IEnumerable<int>)columnIndices).GetEnumerator();
    136       }
    137 
    138       public double Current {
    139         get { return data[row, column.Current]; }
    140       }
    141 
    142       object IEnumerator.Current {
    143         get {
    144           return data[row, column.Current];
    145         }
    146       }
    147 
    148       public void Dispose() { }
    149 
    150       public bool MoveNext() {
    151         return column.MoveNext();
    152       }
    153 
    154       public void Reset() {
    155         column.Reset();
    156       }
    157102    }
    158103  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/MultiquadraticKernel.cs

    r14883 r14887  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     24using HeuristicLab.Core;           
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("MultiquadraticKernel", "A kernel function that uses the multiquadratic function")]
     29  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
     30  [Item("MultiquadraticKernel", "A kernel function that uses the multi-quadratic function (sqrt(1+||x-c||²/β).")]
    3231  public class MultiquadraticKernel : KernelBase {
    3332
     
    4140
    4241    public MultiquadraticKernel() {
    43       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernel function sqrt(1+||x-c||²/beta)", new DoubleValue(2)));
    4442    }
    4543    public override IDeepCloneable Clone(Cloner cloner) {
     
    4846    #endregion
    4947    protected override double Get(double norm) {
    50       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    51       return Math.Sqrt(1 + norm * norm / Beta);
     48      var beta = Beta.Value;
     49      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     50      return Math.Sqrt(1 + norm * norm / beta);
    5251    }
    5352
    5453    protected override double GetGradient(double norm) {
    55       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    56       var dividend = 2 * Beta * Beta * Math.Sqrt((Beta + norm * norm) / Beta);
     54      var beta = Beta.Value;
     55      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     56      var dividend = 2 * beta * beta * Math.Sqrt((beta + norm * norm) / beta);
    5757      return -norm * norm / dividend;
    5858    }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/PolysplineKernel.cs

    r14883 r14887  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     24using HeuristicLab.Core;         
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("PolysplineKernel", "A kernel function that uses the multiquadratic function")]
     29  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
     30  [Item("PolysplineKernel", "A kernel function that uses the poly-spline function ||x-c||^Beta.")]
    3231  public class PolysplineKernel : KernelBase {
    3332
     
    4039                : base(original, cloner) { }
    4140    public PolysplineKernel() {
    42       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The Beta in the kernelfunction ||x-c||^Beta", new DoubleValue(1.5)));
    4341    }
    4442    public override IDeepCloneable Clone(Cloner cloner) {
     
    4846
    4947    protected override double Get(double norm) {
    50       return Math.Pow(norm, Beta);
     48      return Math.Pow(norm, Beta.Value);
    5149    }
    5250
    5351    protected override double GetGradient(double norm) {
    54       return Math.Pow(norm, Beta) * Math.Log(norm);
     52      return Math.Pow(norm, Beta.Value) * Math.Log(norm);
    5553    }
    5654  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/ThinPlatePolysplineKernel.cs

    r14883 r14887  
    2323using HeuristicLab.Common;
    2424using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function")]
     29  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
     30  [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function ||x-c||^(2*Beta)*log(||x-c||^Beta)")]
    3231  public class ThinPlatePolysplineKernel : KernelBase {
    3332    #region HLConstructors & Boilerplate
     
    3837    protected ThinPlatePolysplineKernel(ThinPlatePolysplineKernel original, Cloner cloner) : base(original, cloner) { }
    3938    public ThinPlatePolysplineKernel() {
    40       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The Beta in the kernelfunction ||x-c||^(2*Beta)*log(||x-c||^Beta)", new DoubleValue(1)));
    4139    }
    4240    public override IDeepCloneable Clone(Cloner cloner) {
     
    4644
    4745    protected override double Get(double norm) {
    48       if (Math.Pow(norm, Beta) <= 0) return double.NaN;
    49       return Math.Pow(norm, 2 * Beta) * Math.Log(1 + Math.Pow(norm, Beta));
     46      var beta = Beta.Value;
     47      if (Math.Pow(norm, beta) < 0) return double.NaN;
     48      return Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta));
    5049    }
    5150
    5251    protected override double GetGradient(double norm) {
    53       if (Math.Pow(norm, Beta) <= 0) return double.NaN;
    54       return 2 * Math.Log(norm) * Math.Pow(norm, 2 * Beta) * Math.Log(1 + Math.Pow(norm, Beta)) + Math.Pow(norm, 3 * Beta) * Math.Log(norm) / (Math.Pow(norm, Beta) + 1);
     52      var beta = Beta.Value;
     53      if (Math.Pow(norm, beta) <= 0) return double.NaN;
     54      return 2 * Math.Log(norm) * Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta)) + Math.Pow(norm, 3 * beta) * Math.Log(norm) / (Math.Pow(norm, beta) + 1);
    5555    }
    5656  }
Note: See TracChangeset for help on using the changeset viewer.