Free cookie consent management tool by TermsFeed Policy Generator

Changeset 14891


Ignore:
Timestamp:
04/26/17 11:06:51 (8 years ago)
Author:
bwerth
Message:

#2699 reworked kenel functions (beta is always a scaling factor now), added LU-Decomposition as a fall-back if Cholesky-decomposition fails

Location:
branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4
Files:
8 edited

Legend:

Unmodified
Added
Removed
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs

    r14185 r14891  
    2626using HeuristicLab.Core;
    2727using HeuristicLab.Data;
    28 using HeuristicLab.Optimization;
    2928using HeuristicLab.Parameters;
    3029using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     
    3938  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 160)]
    4039  [StorableClass]
    41   public sealed class GaussianProcessRegression : GaussianProcessBase, IStorableContent {
     40  public sealed class GaussianProcessRegression : GaussianProcessBase, IStorableContent, IDataAnalysisAlgorithm<IRegressionProblem> {
    4241    public string Filename { get; set; }
    4342
    4443    public override Type ProblemType { get { return typeof(IRegressionProblem); } }
    45     public new IRegressionProblem Problem {
     44    public new IRegressionProblem Problem
     45    {
    4646      get { return (IRegressionProblem)base.Problem; }
    4747      set { base.Problem = value; }
     
    5353
    5454    #region parameter properties
    55     public IConstrainedValueParameter<IGaussianProcessRegressionModelCreator> GaussianProcessModelCreatorParameter {
     55    public IConstrainedValueParameter<IGaussianProcessRegressionModelCreator> GaussianProcessModelCreatorParameter
     56    {
    5657      get { return (IConstrainedValueParameter<IGaussianProcessRegressionModelCreator>)Parameters[ModelCreatorParameterName]; }
    5758    }
    58     public IFixedValueParameter<GaussianProcessRegressionSolutionCreator> GaussianProcessSolutionCreatorParameter {
     59    public IFixedValueParameter<GaussianProcessRegressionSolutionCreator> GaussianProcessSolutionCreatorParameter
     60    {
    5961      get { return (IFixedValueParameter<GaussianProcessRegressionSolutionCreator>)Parameters[SolutionCreatorParameterName]; }
    6062    }
    61     public IFixedValueParameter<BoolValue> CreateSolutionParameter {
     63    public IFixedValueParameter<BoolValue> CreateSolutionParameter
     64    {
    6265      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
    6366    }
    6467    #endregion
    6568    #region properties
    66     public bool CreateSolution {
     69    public bool CreateSolution
     70    {
    6771      get { return CreateSolutionParameter.Value.Value; }
    6872      set { CreateSolutionParameter.Value.Value = value; }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/CicularKernel.cs

    r14887 r14891  
    2727namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    2828  [StorableClass]
    29   [Item("CircularKernel", "A circular kernel function 2*pi*(acos(-d)-d*(1-n²)^(0.5)) where n = ||x-c|| and d = n/beta")]
     29  [Item("CircularKernel", "A circular kernel function 2*pi*(acos(-d)-d*(1-d²)^(0.5)) where n = ||x-c|| and d = n/beta \n  As described in http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/")]
    3030  public class CircularKernel : KernelBase {
    3131
     
    4545    protected override double Get(double norm) {
    4646      var beta = Beta.Value;
    47       if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    48       if (norm >= beta) return 0;
     47      if (Math.Abs(Beta.Value) < double.Epsilon) return double.NaN;
     48      if (norm >= Beta.Value) return 0;
    4949      var d = norm / beta;
    50       return Math.Acos(-d) - d * Math.Sqrt(1 - d * d) - Math.PI / 2;
     50      return 2 * Math.PI * (Math.Acos(-d) - d * Math.Sqrt(1 - d * d));
    5151    }
    5252
     53    // 4*pi*n^3 / (beta^4 * sqrt(1-n^2/beta^2)
    5354    protected override double GetGradient(double norm) {
    5455      var beta = Beta.Value;
    5556      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    5657      if (beta < norm) return 0;
    57       return -2 * Math.Pow(norm, 3) / (Math.Pow(beta, 4) * Math.Sqrt(1 - norm * norm / (beta * beta)));
     58      var d = norm / beta;
     59      return -4 * Math.PI * d * d * d / beta * Math.Sqrt(1 - d * d);
    5860    }
    5961  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/GaussianKernel.cs

    r14887 r14891  
    2929namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    3030  [StorableClass]
    31   [Item("GaussianKernel", "A kernel function that uses Gaussian function exp(-||x-c||/beta²). Positive definite beta > 0")]
     31  [Item("GaussianKernel", "A kernel function that uses Gaussian function exp(-n²/beta²). As described in http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/")]
    3232  public class GaussianKernel : KernelBase {
    3333
     
    4848      var beta = Beta.Value;
    4949      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    50       return Math.Exp(-norm * norm / (beta * beta));
     50      var d = norm / beta;
     51      return Math.Exp(-d * d);
    5152    }
    5253
     54    //2 * n²/b²* 1/b * exp(-n²/b²)
    5355    protected override double GetGradient(double norm) {
    5456      var beta = Beta.Value;
    5557      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    56       return 2 * norm * norm / Math.Pow(beta, 3) * Math.Exp(-norm * norm / (beta * beta));
     58      var d = norm / beta;
     59      return 2 * d * d / beta * Math.Exp(-d * d);
    5760    }
    5861  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/InverseMultiquadraticKernel.cs

    r14887 r14891  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;     
     24using HeuristicLab.Core;
    2525using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2626
    2727namespace HeuristicLab.Algorithms.DataAnalysis.KernelRidgeRegression {
    2828  [StorableClass]
    29   [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multi-quadratic function  1 / sqrt(1+||x-c||^2/beta). Positive definite: beta > 0")]
     29  [Item("InverseMultiquadraticKernel", "A kernel function that uses the inverse multi-quadratic function  1 / sqrt(1+||x-c||²/beta²). Similar to http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/ with beta as a scaling factor.")]
    3030  public class InverseMultiquadraticKernel : KernelBase {
     31
     32    private const double C = 1.0;
    3133    #region HLConstructors & Boilerplate
    3234    [StorableConstructor]
     
    3537    private void AfterDeserialization() { }
    3638    protected InverseMultiquadraticKernel(InverseMultiquadraticKernel original, Cloner cloner) : base(original, cloner) { }
    37     public InverseMultiquadraticKernel() {
    38     }
     39    public InverseMultiquadraticKernel() { }
    3940    public override IDeepCloneable Clone(Cloner cloner) {
    4041      return new InverseMultiquadraticKernel(this, cloner);
     
    4546      var beta = Beta.Value;
    4647      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    47       return 1 / Math.Sqrt(1 + norm * norm / beta);
     48      var d = norm / beta;
     49      return 1 / Math.Sqrt(C + d * d);
    4850    }
    4951
     52    //n²/(b³(n²/b² + C)^1.5)
    5053    protected override double GetGradient(double norm) {
    5154      var beta = Beta.Value;
    5255      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    53       return norm * norm / (2 * beta * beta * Math.Pow((norm * norm + beta) / beta, 1.5));
     56      var d = norm / beta;
     57      return d * d / (beta * Math.Pow(d * d + C, 1.5));
    5458    }
    5559  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/MultiquadraticKernel.cs

    r14887 r14891  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;           
     24using HeuristicLab.Core;
    2525using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2626
     
    2828  [StorableClass]
    2929  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
    30   [Item("MultiquadraticKernel", "A kernel function that uses the multi-quadratic function (sqrt(1+||x-c||²/β).")]
     30  [Item("MultiquadraticKernel", "A kernel function that uses the multi-quadratic function sqrt(1+||x-c||²/beta²). Similar to http://crsouza.com/2010/03/17/kernel-functions-for-machine-learning-applications/ with beta as a scaling factor.")]
    3131  public class MultiquadraticKernel : KernelBase {
    3232
     33    private const double C = 1.0;
    3334    #region HLConstructors & Boilerplate
    3435    [StorableConstructor]
     
    4849      var beta = Beta.Value;
    4950      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    50       return Math.Sqrt(1 + norm * norm / beta);
     51      var d = norm / beta;
     52      return Math.Sqrt(C + d * d);
    5153    }
    5254
     55    //-n²/(d³*sqrt(C+n²/d²))
    5356    protected override double GetGradient(double norm) {
    5457      var beta = Beta.Value;
    5558      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
    56       var dividend = 2 * beta * beta * Math.Sqrt((beta + norm * norm) / beta);
    57       return -norm * norm / dividend;
     59      var d = norm / beta;
     60      return -d * d / (beta * Math.Sqrt(C + d * d));
    5861    }
    5962  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/PolysplineKernel.cs

    r14887 r14891  
    2222using System;
    2323using HeuristicLab.Common;
    24 using HeuristicLab.Core;         
     24using HeuristicLab.Core;
     25using HeuristicLab.Data;
     26using HeuristicLab.Parameters;
    2527using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2628
     
    2830  [StorableClass]
    2931  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
    30   [Item("PolysplineKernel", "A kernel function that uses the poly-spline function ||x-c||^Beta.")]
     32  [Item("PolysplineKernel", "A kernel function that uses the polyharmonic function (||x-c||/Beta)^Degree as given in http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf with beta as a scaling parameters.")]
    3133  public class PolysplineKernel : KernelBase {
     34
     35    #region Parameternames
     36    private const string DegreeParameterName = "Degree";
     37    #endregion
     38    #region Parameterproperties
     39    public IFixedValueParameter<DoubleValue> DegreeParameter
     40    {
     41      get { return Parameters[DegreeParameterName] as IFixedValueParameter<DoubleValue>; }
     42    }
     43    #endregion
     44    #region Properties
     45    public DoubleValue Degree
     46    {
     47      get { return DegreeParameter.Value; }
     48    }
     49    #endregion
    3250
    3351    #region HLConstructors & Boilerplate
     
    3654    [StorableHook(HookType.AfterDeserialization)]
    3755    private void AfterDeserialization() { }
    38     protected PolysplineKernel(PolysplineKernel original, Cloner cloner)
    39                 : base(original, cloner) { }
     56    protected PolysplineKernel(PolysplineKernel original, Cloner cloner) : base(original, cloner) { }
    4057    public PolysplineKernel() {
     58      Parameters.Add(new FixedValueParameter<DoubleValue>(DegreeParameterName, "The degree of the kernel. Needs to be greater than zero.", new DoubleValue(1.0)));
    4159    }
    4260    public override IDeepCloneable Clone(Cloner cloner) {
     
    4664
    4765    protected override double Get(double norm) {
    48       return Math.Pow(norm, Beta.Value);
     66      var beta = Beta.Value;
     67      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     68      var d = norm / beta;
     69      return Math.Pow(d, Degree.Value);
    4970    }
    5071
     72    //-degree/beta * (norm/beta)^degree
    5173    protected override double GetGradient(double norm) {
    52       return Math.Pow(norm, Beta.Value) * Math.Log(norm);
     74      var beta = Beta.Value;
     75      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     76      var d = norm / beta;
     77      return -Degree.Value / beta * Math.Pow(d, Degree.Value);
    5378    }
    5479  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelFunctions/ThinPlatePolysplineKernel.cs

    r14887 r14891  
    2323using HeuristicLab.Common;
    2424using HeuristicLab.Core;
     25using HeuristicLab.Data;
     26using HeuristicLab.Parameters;
    2527using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2628
     
    2830  [StorableClass]
    2931  // conditionally positive definite. (need to add polynomials) see http://num.math.uni-goettingen.de/schaback/teaching/sc.pdf
    30   [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function ||x-c||^(2*Beta)*log(||x-c||^Beta)")]
     32  [Item("ThinPlatePolysplineKernel", "A kernel function that uses the ThinPlatePolyspline function (||x-c||/Beta)^(Degree)*log(||x-c||/Beta) as described in \"Thin-Plate Spline Radial Basis Function Scheme for Advection-Diffusion Problems\" with beta as a scaling parameter.")]
    3133  public class ThinPlatePolysplineKernel : KernelBase {
     34
     35    #region Parameternames
     36    private const string DegreeParameterName = "Degree";
     37    #endregion
     38    #region Parameterproperties
     39    public IFixedValueParameter<DoubleValue> DegreeParameter
     40    {
     41      get { return Parameters[DegreeParameterName] as IFixedValueParameter<DoubleValue>; }
     42    }
     43    #endregion
     44    #region Properties
     45    public DoubleValue Degree
     46    {
     47      get { return DegreeParameter.Value; }
     48    }
     49    #endregion
     50
    3251    #region HLConstructors & Boilerplate
    3352    [StorableConstructor]
     
    3756    protected ThinPlatePolysplineKernel(ThinPlatePolysplineKernel original, Cloner cloner) : base(original, cloner) { }
    3857    public ThinPlatePolysplineKernel() {
     58      Parameters.Add(new FixedValueParameter<DoubleValue>(DegreeParameterName, "The degree of the kernel. Needs to be greater than zero.", new DoubleValue(2.0)));
    3959    }
    4060    public override IDeepCloneable Clone(Cloner cloner) {
     
    4565    protected override double Get(double norm) {
    4666      var beta = Beta.Value;
    47       if (Math.Pow(norm, beta) < 0) return double.NaN;
    48       return Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta));
     67      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     68      var d = norm / beta;
     69      if (Math.Abs(d) < double.Epsilon) return 0;
     70      return Math.Pow(d, Degree.Value) * Math.Log(d);
    4971    }
    5072
     73    // (Degree/beta) * (norm/beta)^Degree * log(norm/beta)
    5174    protected override double GetGradient(double norm) {
    5275      var beta = Beta.Value;
    53       if (Math.Pow(norm, beta) <= 0) return double.NaN;
    54       return 2 * Math.Log(norm) * Math.Pow(norm, 2 * beta) * Math.Log(1 + Math.Pow(norm, beta)) + Math.Pow(norm, 3 * beta) * Math.Log(norm) / (Math.Pow(norm, beta) + 1);
     76      if (Math.Abs(beta) < double.Epsilon) return double.NaN;
     77      var d = norm / beta;
     78      if (Math.Abs(d) < double.Epsilon) return 0;
     79      return Degree.Value / beta * Math.Pow(d, Degree.Value) * Math.Log(d);
    5580    }
    5681  }
  • branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelRidgeRegressionModel.cs

    r14888 r14891  
    3232  [Item("KernelRidgeRegressionModel", "A kernel ridge regression model")]
    3333  public sealed class KernelRidgeRegressionModel : RegressionModel {
    34     public override IEnumerable<string> VariablesUsedForPrediction {
     34    public override IEnumerable<string> VariablesUsedForPrediction
     35    {
    3536      get { return allowedInputVariables; }
    3637    }
     
    3839    [Storable]
    3940    private readonly string[] allowedInputVariables;
    40     public string[] AllowedInputVariables {
     41    public string[] AllowedInputVariables
     42    {
    4143      get { return allowedInputVariables; }
    4244    }
     
    114116        var l = new double[n, n]; Array.Copy(gram, l, l.Length);
    115117
     118        double[,] invG;
    116119        // cholesky decomposition
    117120        var res = alglib.trfac.spdmatrixcholesky(ref l, n, false);
    118         if (res == false) throw new ArgumentException("Could not decompose matrix. Is it quadratic symmetric positive definite?");
    119 
    120         alglib.spdmatrixcholeskysolve(l, n, false, y, out info, out denseSolveRep, out alpha);
    121         if (info != 1) throw new ArgumentException("Could not create model.");
    122 
    123 
    124         {
     121        if (res == false) { //throw new ArgumentException("Could not decompose matrix. Is it quadratic symmetric positive definite?");
     122          int[] pivots;
     123          var lua = new double[n, n];
     124          Array.Copy(gram, lua, lua.Length);
     125          alglib.rmatrixlu(ref lua, n, n, out pivots);
     126          alglib.rmatrixlusolve(lua, pivots, n, y, out info, out denseSolveRep, out alpha);
     127          if (info != 1) throw new ArgumentException("Could not create model.");
     128          alglib.matinvreport rep;
     129          invG = lua;  // rename
     130          alglib.rmatrixluinverse(ref invG, pivots, n, out info, out rep);
     131          if (info != 1) throw new ArgumentException("Could not invert Gram matrix.");
     132        } else {
     133          alglib.spdmatrixcholeskysolve(l, n, false, y, out info, out denseSolveRep, out alpha);
     134          if (info != 1) throw new ArgumentException("Could not create model.");
    125135          // for LOO-CV we need to build the inverse of the gram matrix
    126136          alglib.matinvreport rep;
    127           var invG = l;   // rename
    128           alglib.spdmatrixcholeskyinverse(ref invG, n, false, out info, out rep);         
     137          invG = l;   // rename
     138          alglib.spdmatrixcholeskyinverse(ref invG, n, false, out info, out rep);
    129139          if (info != 1) throw new ArgumentException("Could not invert Gram matrix.");
    130           var ssqLooError = 0.0;
    131           for (int i = 0; i < n; i++) {
    132             var pred_i = Util.ScalarProd(Util.GetRow(gram, i).ToArray(), alpha);
    133             var looPred_i = pred_i - alpha[i] / invG[i, i];
    134             var error = (y[i] - looPred_i) / yScale;
    135             ssqLooError += error * error;
    136           }
    137           LooCvRMSE = Math.Sqrt(ssqLooError / n);
    138         }
    139       } catch (alglib.alglibexception ae) {
     140        }
     141
     142        var ssqLooError = 0.0;
     143        for (int i = 0; i < n; i++) {
     144          var pred_i = Util.ScalarProd(Util.GetRow(gram, i).ToArray(), alpha);
     145          var looPred_i = pred_i - alpha[i] / invG[i, i];
     146          var error = (y[i] - looPred_i) / yScale;
     147          ssqLooError += error * error;
     148        }
     149        LooCvRMSE = Math.Sqrt(ssqLooError / n);
     150      }
     151      catch (alglib.alglibexception ae) {
    140152        // wrap exception so that calling code doesn't have to know about alglib implementation
    141153        throw new ArgumentException("There was a problem in the calculation of the kernel ridge regression model", ae);
Note: See TracChangeset for help on using the changeset viewer.