Free cookie consent management tool by TermsFeed Policy Generator

Changeset 14887 for branches


Ignore:
Timestamp:
04/24/17 18:31:44 (7 years ago)
Author:
gkronber
Message:

#2699: worked on kernel ridge regression. moved beta parameter to algorithm. reintroduced IKernel interface to restrict choice of kernel in kernel ridge regression. speed-up by cholesky decomposition and optimization of the calculation of the covariance matrix.

Location:
branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4
Files:
1 added
2 deleted
10 edited

Legend:

Unmodified
Added
Removed
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r14885 r14887  
    309309    <Compile Include="KernelRidgeRegression\KernelFunctions\CicularKernel.cs" />
    310310    <Compile Include="KernelRidgeRegression\KernelFunctions\GaussianKernel.cs" />
     311    <Compile Include="KernelRidgeRegression\KernelFunctions\IKernel.cs" />
    311312    <Compile Include="KernelRidgeRegression\KernelFunctions\InverseMultiquadraticKernel.cs" />
    312313    <Compile Include="KernelRidgeRegression\KernelFunctions\KernelBase.cs" />
    313     <Compile Include="KernelRidgeRegression\KernelFunctions\LaplacianKernel.cs" />
    314314    <Compile Include="KernelRidgeRegression\KernelFunctions\MultiquadraticKernel.cs" />
    315315    <Compile Include="KernelRidgeRegression\KernelFunctions\PolysplineKernel.cs" />
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/CicularKernel.cs

    r14883 r14887  
    2323using HeuristicLab.Common;
    2424using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("CircularKernel", "A circular kernel function")]
     29  [Item("CircularKernel", "A circular kernel function 2*pi*(acos(-d)-d*(1-n²)^(0.5)) where n = ||x-c|| and d = n/beta")]
    3230  public class CircularKernel : KernelBase {
    3331
     
    3937    protected CircularKernel(CircularKernel original, Cloner cloner) : base(original, cloner) { }
    4038    public CircularKernel() {
    41       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernel function 2*pi*(acos(-d)-d*(1-n²)^(0.5)) where n = ||x-c|| and d = n/beta", new DoubleValue(2)));
    4239    }
    4340    public override IDeepCloneable Clone(Cloner cloner) {
     
    4744
    4845    protected override double Get(double norm) {
    49       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    50       if (norm >= Beta) return 0;
    51       var d = norm / Beta;
     46      var beta = Beta.Value;
     47      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     48      if (norm >= beta) return 0;
     49      var d = norm / beta;
    5250      return Math.Acos(-d) - d * Math.Sqrt(1 - d * d) - Math.PI / 2;
    5351    }
    5452
    5553    protected override double GetGradient(double norm) {
    56       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    57       if (Beta < norm) return 0;
    58       return -2*Math.Pow(norm,3)/(Math.Pow(Beta,4)*Math.Sqrt(1-norm*norm/(Beta*Beta)));
     54      var beta = Beta.Value;
     55      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     56      if (beta < norm) return 0;
     57      return -2 * Math.Pow(norm, 3) / (Math.Pow(beta, 4) * Math.Sqrt(1 - norm * norm / (beta * beta)));
    5958    }
    6059  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/GaussianKernel.cs

    r14883 r14887  
    2121
    2222using System;
     23
    2324using HeuristicLab.Common;
    2425using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     26
    2727using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2828
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     29namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3030  [StorableClass]
    31   [Item("GaussianKernel", "A kernel function that uses Gaussian function")]
     31  [Item("GaussianKernel", "A kernel function that uses Gaussian function exp(-||x-c||/beta²). Positive definite beta > 0")]
    3232  public class GaussianKernel : KernelBase {
    3333
     
    3939    protected GaussianKernel(GaussianKernel original, Cloner cloner) : base(original, cloner) { }
    4040    public GaussianKernel() {
    41       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernelfunction exp(-||x-c||/beta²)", new DoubleValue(2)));
    4241    }
    4342    public override IDeepCloneable Clone(Cloner cloner) {
     
    4746
    4847    protected override double Get(double norm) {
    49       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    50       return Math.Exp(-norm * norm / (Beta * Beta));
     48      var beta = Beta.Value;
     49      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     50      return Math.Exp(-norm * norm / (beta * beta));
    5151    }
    5252
    5353    protected override double GetGradient(double norm) {
    54       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    55       return 2 * norm * norm / Math.Pow(Beta, 3) * Math.Exp(-norm * norm / (Beta * Beta));
     54      var beta = Beta.Value;
     55      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     56      return 2 * norm * norm / Math.Pow(beta, 3) * Math.Exp(-norm * norm / (beta * beta));
    5657    }
    5758  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/InverseMultiquadraticKernel.cs

    r14883 r14887  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     24using HeuristicLab.Core;     
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multiquadratic function")]
     29  [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multi-quadratic function  1 / sqrt(1+||x-c||^2/beta). Positive definite: beta > 0")]
    3230  public class InverseMultiquadraticKernel : KernelBase {
    3331    #region HLConstructors & Boilerplate
     
    3836    protected InverseMultiquadraticKernel(InverseMultiquadraticKernel original, Cloner cloner) : base(original, cloner) { }
    3937    public InverseMultiquadraticKernel() {
    40       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernel function 1 / sqrt(1+||x-c||^2/beta)", new DoubleValue(2)));
    4138    }
    4239    public override IDeepCloneable Clone(Cloner cloner) {
     
    4643
    4744    protected override double Get(double norm) {
    48       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    49       return 1 / Math.Sqrt(1 + norm * norm / Beta);
     45      var beta = Beta.Value;
     46      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     47      return 1 / Math.Sqrt(1 + norm * norm / beta);
    5048    }
    5149
    5250    protected override double GetGradient(double norm) {
    53       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    54       return norm * norm / (2 * Beta * Beta * Math.Pow((norm * norm + Beta) / Beta, 1.5));
     51      var beta = Beta.Value;
     52      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     53      return norm * norm / (2 * beta * beta * Math.Pow((norm * norm + beta) / beta, 1.5));
    5554    }
    5655  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/KernelBase.cs

    r14872 r14887  
    2121
    2222using System;
    23 using System.Collections;
    2423using System.Collections.Generic;
     24using System.Linq;
    2525using HeuristicLab.Common;
    2626using HeuristicLab.Core;
    27 using HeuristicLab.Data;
    2827using HeuristicLab.Parameters;
    2928using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3029
    31 namespace HeuristicLab.Algorithms.DataAnalysis {
     30namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3231  [StorableClass]
    33   public abstract class KernelBase : ParameterizedNamedItem, ICovarianceFunction {
     32  public abstract class KernelBase : ParameterizedNamedItem, IKernel {
    3433
    3534    #region Parameternames
    3635    private const string DistanceParameterName = "Distance";
    37     protected const string BetaParameterName = "Beta";
    3836    #endregion
    3937    #region Parameterproperties
     
    4240    }
    4341
    44     public IFixedValueParameter<DoubleValue> BetaParameter {
    45       get { return Parameters[BetaParameterName] as FixedValueParameter<DoubleValue>; }
    46     }
    47 
     42    [Storable]
     43    public double? Beta { get; set; }
    4844    #endregion
    4945    #region Properties
    5046    public IDistance Distance {
    5147      get { return DistanceParameter.Value; }
    52     }
    53 
    54     public double Beta {
    55       get { return BetaParameter.Value.Value; }
     48      set { DistanceParameter.Value = value; }
    5649    }
    5750
     
    6457
    6558    protected KernelBase(KernelBase original, Cloner cloner)
    66       : base(original, cloner) { }
     59      : base(original, cloner) {
     60      Beta = original.Beta;
     61    }
    6762
    6863    protected KernelBase() {
     
    7873
    7974    public int GetNumberOfParameters(int numberOfVariables) {
    80       return 1;
     75      return Beta.HasValue ? 0 : 1;
    8176    }
    8277
    8378    public void SetParameter(double[] p) {
    84       if (p != null && p.Length == 1) BetaParameter.Value.Value = p[0];
     79      if (p != null && p.Length == 1) Beta = new double?(p[0]);
    8580    }
    8681
    8782    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, int[] columnIndices) {
    88       if (p == null || p.Length != 1) throw new ArgumentException("Illegal parametrization");
     83      if (p.Length != GetNumberOfParameters(columnIndices.Length)) throw new ArgumentException("Illegal parametrization");
    8984      var myClone = (KernelBase)Clone(new Cloner());
    90       myClone.BetaParameter.Value.Value = p[0];
     85      myClone.SetParameter(p);
    9186      var cov = new ParameterizedCovarianceFunction {
    9287        Covariance = (x, i, j) => myClone.Get(GetNorm(x, x, i, j, columnIndices)),
     
    10297      var dist = Distance as IDistance<IEnumerable<double>>;
    10398      if (dist == null) throw new ArgumentException("The distance needs to apply to double vectors");
    104       var r1 = new IndexedEnumerable(x, i, columnIndices);
    105       var r2 = new IndexedEnumerable(xt, j, columnIndices);
     99      var r1 = columnIndices.Select(c => x[i, c]);
     100      var r2 = columnIndices.Select(c => xt[j, c]);
    106101      return dist.Get(r1, r2);
    107     }
    108     internal class IndexedEnumerable : IEnumerable<double> {
    109       private readonly double[,] data;
    110       private readonly int row;
    111       private readonly int[] columnIndices;
    112 
    113       public IndexedEnumerable(double[,] data, int row, int[] columnIndices) {
    114         this.data = data;
    115         this.row = row;
    116         this.columnIndices = columnIndices;
    117       }
    118 
    119       public IEnumerator<double> GetEnumerator() {
    120         return new IndexedEnumerator(data, row, columnIndices);
    121       }
    122 
    123       IEnumerator IEnumerable.GetEnumerator() {
    124         return new IndexedEnumerator(data, row, columnIndices);
    125       }
    126     }
    127     internal class IndexedEnumerator : IEnumerator<double> {
    128       private readonly IEnumerator<int> column;
    129       private readonly double[,] data;
    130       private readonly int row;
    131 
    132       public IndexedEnumerator(double[,] data, int row, int[] columnIndices) {
    133         this.data = data;
    134         this.row = row;
    135         column = ((IEnumerable<int>)columnIndices).GetEnumerator();
    136       }
    137 
    138       public double Current {
    139         get { return data[row, column.Current]; }
    140       }
    141 
    142       object IEnumerator.Current {
    143         get {
    144           return data[row, column.Current];
    145         }
    146       }
    147 
    148       public void Dispose() { }
    149 
    150       public bool MoveNext() {
    151         return column.MoveNext();
    152       }
    153 
    154       public void Reset() {
    155         column.Reset();
    156       }
    157102    }
    158103  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/MultiquadraticKernel.cs

    r14883 r14887  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     24using HeuristicLab.Core;           
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("MultiquadraticKernel", "A kernel function that uses the multiquadratic function")]
     29  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
     30  [Item("MultiquadraticKernel", "A kernel function that uses the multi-quadratic function (sqrt(1+||x-c||²/β).")]
    3231  public class MultiquadraticKernel : KernelBase {
    3332
     
    4140
    4241    public MultiquadraticKernel() {
    43       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta in the kernel function sqrt(1+||x-c||²/beta)", new DoubleValue(2)));
    4442    }
    4543    public override IDeepCloneable Clone(Cloner cloner) {
     
    4846    #endregion
    4947    protected override double Get(double norm) {
    50       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    51       return Math.Sqrt(1 + norm * norm / Beta);
     48      var beta = Beta.Value;
     49      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     50      return Math.Sqrt(1 + norm * norm / beta);
    5251    }
    5352
    5453    protected override double GetGradient(double norm) {
    55       if (Math.Abs(Beta) < double.Epsilon) return double.NaN;
    56       var dividend = 2 * Beta * Beta * Math.Sqrt((Beta + norm * norm) / Beta);
     54      var beta = Beta.Value;
     55      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     56      var dividend = 2 * beta * beta * Math.Sqrt((beta + norm * norm) / beta);
    5757      return -norm * norm / dividend;
    5858    }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/PolysplineKernel.cs

    r14883 r14887  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
     24using HeuristicLab.Core;         
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("PolysplineKernel", "A kernel function that uses the multiquadratic function")]
     29  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
     30  [Item("PolysplineKernel", "A kernel function that uses the poly-spline function ||x-c||^Beta.")]
    3231  public class PolysplineKernel : KernelBase {
    3332
     
    4039                : base(original, cloner) { }
    4140    public PolysplineKernel() {
    42       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The Beta in the kernelfunction ||x-c||^Beta", new DoubleValue(1.5)));
    4341    }
    4442    public override IDeepCloneable Clone(Cloner cloner) {
     
    4846
    4947    protected override double Get(double norm) {
    50       return Math.Pow(norm, Beta);
     48      return Math.Pow(norm, Beta.Value);
    5149    }
    5250
    5351    protected override double GetGradient(double norm) {
    54       return Math.Pow(norm, Beta) * Math.Log(norm);
     52      return Math.Pow(norm, Beta.Value) * Math.Log(norm);
    5553    }
    5654  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/ThinPlatePolysplineKernel.cs

    r14883 r14887  
    2323using HeuristicLab.Common;
    2424using HeuristicLab.Core;
    25 using HeuristicLab.Data;
    26 using HeuristicLab.Parameters;
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
    29 namespace HeuristicLab.Algorithms.DataAnalysis {
     27namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3028  [StorableClass]
    31   [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function")]
     29  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
     30  [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function ||x-c||^(2*Beta)*log(||x-c||^Beta)")]
    3231  public class ThinPlatePolysplineKernel : KernelBase {
    3332    #region HLConstructors & Boilerplate
     
    3837    protected ThinPlatePolysplineKernel(ThinPlatePolysplineKernel original, Cloner cloner) : base(original, cloner) { }
    3938    public ThinPlatePolysplineKernel() {
    40       Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The Beta in the kernelfunction ||x-c||^(2*Beta)*log(||x-c||^Beta)", new DoubleValue(1)));
    4139    }
    4240    public override IDeepCloneable Clone(Cloner cloner) {
     
    4644
    4745    protected override double Get(double norm) {
    48       if (Math.Pow(norm, Beta) <= 0) return double.NaN;
    49       return Math.Pow(norm, 2 * Beta) * Math.Log(1 + Math.Pow(norm, Beta));
     46      var beta = Beta.Value;
     47      if (Math.Pow(norm, beta) < 0) return double.NaN;
     48      return Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta));
    5049    }
    5150
    5251    protected override double GetGradient(double norm) {
    53       if (Math.Pow(norm, Beta) <= 0) return double.NaN;
    54       return 2 * Math.Log(norm) * Math.Pow(norm, 2 * Beta) * Math.Log(1 + Math.Pow(norm, Beta)) + Math.Pow(norm, 3 * Beta) * Math.Log(norm) / (Math.Pow(norm, Beta) + 1);
     52      var beta = Beta.Value;
     53      if (Math.Pow(norm, beta) <= 0) return double.NaN;
     54      return 2 * Math.Log(norm) * Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta)) + Math.Pow(norm, 3 * beta) * Math.Log(norm) / (Math.Pow(norm, beta) + 1);
    5555    }
    5656  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelRidgeRegression.cs

    r14885 r14887  
    3131using HeuristicLab.Problems.DataAnalysis;
    3232
    33 namespace HeuristicLab.Algorithms.DataAnalysis {
    34   /// <summary>
    35   /// Linear regression data analysis algorithm.
    36   /// </summary>
    37   [Item("Radial Basis Function Regression (RBF-R)", "Radial basis function regression data analysis algorithm.")]
     33namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
     34  [Item("Kernel Ridge Regression", "Kernelized ridge regression e.g. for radial basis function (RBF) regression.")]
    3835  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 100)]
    3936  [StorableClass]
    40   public sealed class RadialBasisRegression : BasicAlgorithm {
    41     private const string RBFRegressionSolutionResultName = "RBF regression solution";
     37  public sealed class KernelRidgeRegression : BasicAlgorithm {
     38    private const string SolutionResultName = "Kernel ridge regression solution";
    4239
    4340    public override bool SupportsPause {
     
    5552    private const string KernelParameterName = "Kernel";
    5653    private const string ScaleInputVariablesParameterName = "ScaleInputVariables";
     54    private const string LambdaParameterName = "LogLambda";
     55    private const string BetaParameterName = "Beta";
    5756    #endregion
    5857
    5958    #region parameter properties
    60     public ValueParameter<ICovarianceFunction> KernelParameter {
    61       get { return (ValueParameter<ICovarianceFunction>)Parameters[KernelParameterName]; }
     59    public ValueParameter<IKernel> KernelParameter {
     60      get { return (ValueParameter<IKernel>)Parameters[KernelParameterName]; }
    6261    }
    6362
     
    6564      get { return (IFixedValueParameter<BoolValue>)Parameters[ScaleInputVariablesParameterName]; }
    6665    }
     66
     67    public IFixedValueParameter<DoubleValue> LogLambdaParameter {
     68      get { return (IFixedValueParameter<DoubleValue>)Parameters[LambdaParameterName]; }
     69    }
     70
     71    public IFixedValueParameter<DoubleValue> BetaParameter {
     72      get { return (IFixedValueParameter<DoubleValue>)Parameters[BetaParameterName]; }
     73    }
    6774    #endregion
    6875
    6976    #region properties
    70     public ICovarianceFunction Kernel {
     77    public IKernel Kernel {
    7178      get { return KernelParameter.Value; }
    7279    }
     
    7784    }
    7885
     86    public double LogLambda {
     87      get { return LogLambdaParameter.Value.Value; }
     88      set { LogLambdaParameter.Value.Value = value; }
     89    }
     90
     91    public double Beta {
     92      get { return BetaParameter.Value.Value; }
     93      set { BetaParameter.Value.Value = value; }
     94    }
    7995    #endregion
    8096
    8197    [StorableConstructor]
    82     private RadialBasisRegression(bool deserializing) : base(deserializing) { }
    83     private RadialBasisRegression(RadialBasisRegression original, Cloner cloner)
     98    private KernelRidgeRegression(bool deserializing) : base(deserializing) { }
     99    private KernelRidgeRegression(KernelRidgeRegression original, Cloner cloner)
    84100      : base(original, cloner) {
    85101    }
    86     public RadialBasisRegression() {
     102    public KernelRidgeRegression() {
    87103      Problem = new RegressionProblem();
    88       Parameters.Add(new ValueParameter<ICovarianceFunction>(KernelParameterName, "The radial basis function"));
     104      Parameters.Add(new ValueParameter<IKernel>(KernelParameterName, "The kernel", new GaussianKernel()));
    89105      Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputVariablesParameterName, "Set to true if the input variables should be scaled to the interval [0..1]", new BoolValue(true)));
    90       var kernel = new GaussianKernel();
    91       KernelParameter.Value = kernel;
     106      Parameters.Add(new FixedValueParameter<DoubleValue>(LambdaParameterName, "The log10-transformed weight for the regularization term lambda [-inf..+inf]. Small values produce more complex models, large values produce models with larger errors. Set to very small value (e.g. -1.0e15) for almost exact approximation", new DoubleValue(-2)));
     107      Parameters.Add(new FixedValueParameter<DoubleValue>(BetaParameterName, "The beta parameter for the kernel", new DoubleValue(2)));
    92108    }
    93109    [StorableHook(HookType.AfterDeserialization)]
     
    95111
    96112    public override IDeepCloneable Clone(Cloner cloner) {
    97       return new RadialBasisRegression(this, cloner);
     113      return new KernelRidgeRegression(this, cloner);
    98114    }
    99115
    100116    protected override void Run(CancellationToken cancellationToken) {
    101       double loocvrmse, rmsError;
    102       var solution = CreateRadialBasisRegressionSolution(Problem.ProblemData, Kernel, ScaleInputVariables, out loocvrmse, out rmsError);
    103       Results.Add(new Result(RBFRegressionSolutionResultName, "The RBF regression solution.", solution));
    104       Results.Add(new Result("LOOCVRMSE", "The root mean squared error of a leave-one-out-cross-validation on the training set", new DoubleValue(loocvrmse)));
     117      double rmsError;
     118      var kernel = Kernel;
     119      kernel.Beta = Beta;
     120      var solution = CreateRadialBasisRegressionSolution(Problem.ProblemData, kernel, Math.Pow(10, LogLambda), ScaleInputVariables, out rmsError);
     121      Results.Add(new Result(SolutionResultName, "The kernel ridge regression solution.", solution));
    105122      Results.Add(new Result("RMSE (test)", "The root mean squared error of the solution on the test set.", new DoubleValue(rmsError)));
    106123    }
    107124
    108     public static IRegressionSolution CreateRadialBasisRegressionSolution(IRegressionProblemData problemData, ICovarianceFunction kernel, bool scaleInputs, out double loocvRmsError, out double rmsError) {
    109       var model = new RadialBasisFunctionModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, scaleInputs, kernel);
    110       loocvRmsError = model.LeaveOneOutCrossValidationRootMeanSquaredError();
    111       rmsError = Math.Sqrt(model.GetEstimatedValues(problemData.Dataset, problemData.TestIndices)
    112         .Zip(problemData.TargetVariableTestValues, (a, b) => (a - b) * (a - b))
    113         .Average());
     125    public static IRegressionSolution CreateRadialBasisRegressionSolution(IRegressionProblemData problemData, ICovarianceFunction kernel, double lambda, bool scaleInputs, out double rmsError) {
     126      var model = new KernelRidgeRegressionModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, scaleInputs, kernel, lambda);
     127      rmsError = double.NaN;
     128      if (problemData.TestIndices.Any()) {
     129        rmsError = Math.Sqrt(model.GetEstimatedValues(problemData.Dataset, problemData.TestIndices)
     130          .Zip(problemData.TargetVariableTestValues, (a, b) => (a - b) * (a - b))
     131          .Average());
     132      }
    114133      var solution = model.CreateRegressionSolution((IRegressionProblemData)problemData.Clone());
    115       solution.Model.Name = "RBF Regression Model";
    116       solution.Name = "RBF Regression Solution";
     134      solution.Model.Name = "Kernel ridge regression model";
     135      solution.Name = SolutionResultName;
    117136      return solution;
    118137    }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelRidgeRegressionModel.cs

    r14885 r14887  
    2121
    2222using System;
    23 using System.Collections.Generic;
    24 using System.Diagnostics;
     23using System.Collections.Generic;   
    2524using System.Linq;
    2625using HeuristicLab.Common;
     
    2928using HeuristicLab.Problems.DataAnalysis;
    3029
    31 namespace HeuristicLab.Algorithms.DataAnalysis {
    32   /// <summary>
    33   /// Represents an RBF regression model.
    34   /// </summary>
     30namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3531  [StorableClass]
    36   [Item("RBFModel", "An RBF regression model")]
    37   public sealed class RadialBasisFunctionModel : RegressionModel, IConfidenceRegressionModel {
     32  [Item("KernelRidgeRegressionModel", "A kernel ridge regression model")]
     33  public sealed class KernelRidgeRegressionModel : RegressionModel {
    3834    public override IEnumerable<string> VariablesUsedForPrediction {
    3935      get { return allowedInputVariables; }
     
    5854    private readonly ICovarianceFunction kernel;
    5955
    60     private double[,] gramInvert; // not storable as it can be large (recreate after deserialization as required)
     56    [Storable]
     57    private readonly double lambda;
    6158
    6259    [Storable]
    63     private readonly double meanOffset; // implementation works for zero-mean target variables
     60    private readonly double yOffset; // implementation works for zero-mean target variables
     61
     62    [Storable]
     63    private readonly double yScale;
    6464
    6565    [StorableConstructor]
    66     private RadialBasisFunctionModel(bool deserializing) : base(deserializing) { }
    67     private RadialBasisFunctionModel(RadialBasisFunctionModel original, Cloner cloner)
     66    private KernelRidgeRegressionModel(bool deserializing) : base(deserializing) { }
     67    private KernelRidgeRegressionModel(KernelRidgeRegressionModel original, Cloner cloner)
    6868      : base(original, cloner) {
    6969      // shallow copies of arrays because they cannot be modified
     
    7171      alpha = original.alpha;
    7272      trainX = original.trainX;
    73       gramInvert = original.gramInvert;
    7473      scaling = original.scaling;
     74      lambda = original.lambda;
    7575
    76       meanOffset = original.meanOffset;
     76      yOffset = original.yOffset;
     77      yScale = original.yScale;
    7778      if (original.kernel != null)
    7879        kernel = cloner.Clone(original.kernel);
    7980    }
    80     public RadialBasisFunctionModel(IDataset dataset, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows,
    81       bool scaleInputs, ICovarianceFunction kernel)
    82       : base(targetVariable) {
     81    public override IDeepCloneable Clone(Cloner cloner) {
     82      return new KernelRidgeRegressionModel(this, cloner);
     83    }
     84
     85    public KernelRidgeRegressionModel(IDataset dataset, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows,
     86      bool scaleInputs, ICovarianceFunction kernel, double lambda = 0.1) : base(targetVariable) {
    8387      if (kernel.GetNumberOfParameters(allowedInputVariables.Count()) > 0) throw new ArgumentException("All parameters in the kernel function must be specified.");
    8488      name = ItemName;
     
    8791      var trainingRows = rows.ToArray();
    8892      this.kernel = (ICovarianceFunction)kernel.Clone();
     93      this.lambda = lambda;
    8994      try {
    9095        if (scaleInputs)
     
    9297        trainX = ExtractData(dataset, trainingRows, scaling);
    9398        var y = dataset.GetDoubleValues(targetVariable, trainingRows).ToArray();
    94         meanOffset = y.Average();
    95         for (int i = 0; i < y.Length; i++) y[i] -= meanOffset;
     99        yOffset = y.Average();
     100        yScale = 1.0 / y.StandardDeviation();
     101        for (int i = 0; i < y.Length; i++) {
     102          y[i] -= yOffset;
     103          y[i] *= yScale;
     104        }
    96105        int info;
    97         // TODO: improve efficiency by decomposing matrix once instead of solving the system and then inverting the matrix
    98         alglib.densesolverlsreport denseSolveRep;
    99         gramInvert = BuildGramMatrix(trainX);
     106        alglib.densesolverreport denseSolveRep;
     107        var gram = BuildGramMatrix(trainX, lambda);
    100108        int n = trainX.GetLength(0);
    101         alglib.rmatrixsolvels(gramInvert, n, n, y, 0.0, out info, out denseSolveRep, out alpha);
     109
     110        // cholesky decomposition
     111        var res = alglib.trfac.spdmatrixcholesky(ref gram, n, false);
     112        if(res == false) throw new ArgumentException("Could not decompose matrix. Is it quadratic symmetric positive definite?");
     113
     114        alglib.spdmatrixcholeskysolve(gram, n, false, y, out info, out denseSolveRep, out alpha);
    102115        if (info != 1) throw new ArgumentException("Could not create model.");
    103 
    104         alglib.matinvreport report;
    105         alglib.rmatrixinverse(ref gramInvert, out info, out report);
    106         if (info != 1) throw new ArgumentException("Could not invert matrix. Is it quadratic symmetric positive definite?");
    107 
    108116      } catch (alglib.alglibexception ae) {
    109117        // wrap exception so that calling code doesn't have to know about alglib implementation
    110         throw new ArgumentException("There was a problem in the calculation of the RBF model", ae);
     118        throw new ArgumentException("There was a problem in the calculation of the kernel ridge regression model", ae);
    111119      }
     120    }
     121
     122
     123    #region IRegressionModel Members
     124    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
     125      var newX = ExtractData(dataset, rows, scaling);
     126      var dim = newX.GetLength(1);
     127      var cov = kernel.GetParameterizedCovarianceFunction(new double[0], Enumerable.Range(0, dim).ToArray());
     128
     129      var pred = new double[newX.GetLength(0)];
     130      for (int i = 0; i < pred.Length; i++) {
     131        double sum = 0.0;
     132        for (int j = 0; j < alpha.Length; j++) {
     133          sum += alpha[j] * cov.CrossCovariance(trainX, newX, j, i);
     134        }
     135        pred[i] = sum / yScale + yOffset;
     136      }
     137      return pred;
     138    }
     139    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     140      return new RegressionSolution(this, new RegressionProblemData(problemData));
     141    }
     142    #endregion
     143
     144    #region helpers
     145    private double[,] BuildGramMatrix(double[,] data, double lambda) {
     146      var n = data.GetLength(0);
     147      var dim = data.GetLength(1);
     148      var cov = kernel.GetParameterizedCovarianceFunction(new double[0], Enumerable.Range(0, dim).ToArray());
     149      var gram = new double[n, n];
     150      // G = (K + λ I)
     151      for (var i = 0; i < n; i++) {
     152        for (var j = i; j < n; j++) {
     153          gram[j, i] = cov.Covariance(data, i, j); // symmetric matrix --> fill only lower triangle
     154        }
     155        gram[i, i] += lambda;
     156      }
     157      return gram;
    112158    }
    113159
     
    145191      return res;
    146192    }
    147 
    148     public override IDeepCloneable Clone(Cloner cloner) {
    149       return new RadialBasisFunctionModel(this, cloner);
    150     }
    151 
    152     #region IRegressionModel Members
    153     public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    154       var newX = ExtractData(dataset, rows, scaling);
    155       var dim = newX.GetLength(1);
    156       var cov = kernel.GetParameterizedCovarianceFunction(new double[0], Enumerable.Range(0, dim).ToArray());
    157 
    158       var pred = new double[newX.GetLength(0)];
    159       for (int i = 0; i < pred.Length; i++) {
    160         double sum = meanOffset;
    161         for (int j = 0; j < alpha.Length; j++) {
    162           sum += alpha[j] * cov.CrossCovariance(trainX, newX, j, i);
    163         }
    164         pred[i] = sum;
    165       }
    166       return pred;
    167     }
    168     public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    169       return new ConfidenceRegressionSolution(this, new RegressionProblemData(problemData));
    170     }
    171     #endregion
    172 
    173     public IEnumerable<double> GetEstimatedVariances(IDataset dataset, IEnumerable<int> rows) {
    174       if (gramInvert == null) CreateAndInvertGramMatrix();
    175       int n = gramInvert.GetLength(0);
    176       var newData = ExtractData(dataset, rows, scaling);
    177       var dim = newData.GetLength(1);
    178       var cov = kernel.GetParameterizedCovarianceFunction(new double[0], Enumerable.Range(0, dim).ToArray());
    179 
    180       // TODO perf (matrix matrix multiplication)
    181       for (int i = 0; i < newData.GetLength(0); i++) {
    182         double[] p = new double[n];
    183 
    184         for (int j = 0; j < trainX.GetLength(0); j++) {
    185           p[j] = cov.CrossCovariance(trainX, newData, j, i);
    186         }
    187 
    188         var Ap = new double[n];
    189         alglib.ablas.rmatrixmv(n, n, gramInvert, 0, 0, 0, p, 0, ref Ap, 0);
    190         var res = 0.0;
    191         // dot product
    192         for (int j = 0; j < p.Length; j++) res += p[j] * Ap[j];
    193         yield return res > 0 ? res : 0;
    194       }
    195     }
    196     public double LeaveOneOutCrossValidationRootMeanSquaredError() {
    197       if (gramInvert == null) CreateAndInvertGramMatrix();
    198       var n = gramInvert.GetLength(0);
    199       var s = 1.0 / n;
    200 
    201       var sum = 0.0;
    202       for (int i = 0; i < alpha.Length; i++) {
    203         var x = alpha[i] / gramInvert[i, i];
    204         sum += x * x;
    205       }
    206       sum *= s;
    207       return Math.Sqrt(sum);
    208     }
    209 
    210     private void CreateAndInvertGramMatrix() {
    211       try {
    212         gramInvert = BuildGramMatrix(trainX);
    213         int info = 0;
    214         alglib.matinvreport report;
    215         alglib.rmatrixinverse(ref gramInvert, out info, out report);
    216         if (info != 1)
    217           throw new ArgumentException("Could not invert matrix. Is it quadratic symmetric positive definite?");
    218       } catch (alglib.alglibexception) {
    219         // wrap exception so that calling code doesn't have to know about alglib implementation
    220         throw new ArgumentException("Could not invert matrix. Is it quadratic symmetric positive definite?");
    221       }
    222     }
    223     #region helpers
    224     private double[,] BuildGramMatrix(double[,] data) {
    225       var n = data.GetLength(0);
    226       var dim = data.GetLength(1);
    227       var cov = kernel.GetParameterizedCovarianceFunction(new double[0], Enumerable.Range(0, dim).ToArray());
    228       var gram = new double[n, n];
    229       for (var i = 0; i < n; i++)
    230         for (var j = i; j < n; j++) {
    231           gram[j, i] = gram[i, j] = cov.Covariance(data, i, j); // symmetric matrix --> half of the work
    232         }
    233       return gram;
    234     }
    235 
    236193    #endregion
    237194  }
Note: See TracChangeset for help on using the changeset viewer.