Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/KernelRidgeRegression/KernelRidgeRegressionModel.cs @ 15008

Last change on this file since 15008 was 14936, checked in by gkronber, 8 years ago

#2699: changed namespace for KernelRidgeRegression

File size: 9.1 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  [StorableClass]
32  [Item("KernelRidgeRegressionModel", "A kernel ridge regression model")]
33  public sealed class KernelRidgeRegressionModel : RegressionModel {
34    public override IEnumerable<string> VariablesUsedForPrediction {
35      get { return allowedInputVariables; }
36    }
37
38    [Storable]
39    private readonly string[] allowedInputVariables;
40    public string[] AllowedInputVariables {
41      get { return allowedInputVariables; }
42    }
43
44
45    [Storable]
46    public double LooCvRMSE { get; private set; }
47
48    [Storable]
49    private readonly double[] alpha;
50
51    [Storable]
52    private readonly double[,] trainX; // it is better to store the original training dataset completely because this is more efficient in persistence
53
54    [Storable]
55    private readonly ITransformation<double>[] scaling;
56
57    [Storable]
58    private readonly ICovarianceFunction kernel;
59
60    [Storable]
61    private readonly double lambda;
62
63    [Storable]
64    private readonly double yOffset; // implementation works for zero-mean, unit-variance target variables
65
66    [Storable]
67    private readonly double yScale;
68
69    [StorableConstructor]
70    private KernelRidgeRegressionModel(bool deserializing) : base(deserializing) { }
71    private KernelRidgeRegressionModel(KernelRidgeRegressionModel original, Cloner cloner)
72      : base(original, cloner) {
73      // shallow copies of arrays because they cannot be modified
74      allowedInputVariables = original.allowedInputVariables;
75      alpha = original.alpha;
76      trainX = original.trainX;
77      scaling = original.scaling;
78      lambda = original.lambda;
79      LooCvRMSE = original.LooCvRMSE;
80
81      yOffset = original.yOffset;
82      yScale = original.yScale;
83      if (original.kernel != null)
84        kernel = cloner.Clone(original.kernel);
85    }
86    public override IDeepCloneable Clone(Cloner cloner) {
87      return new KernelRidgeRegressionModel(this, cloner);
88    }
89
90    public KernelRidgeRegressionModel(IDataset dataset, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows,
91      bool scaleInputs, ICovarianceFunction kernel, double lambda = 0.1) : base(targetVariable) {
92      if (kernel.GetNumberOfParameters(allowedInputVariables.Count()) > 0) throw new ArgumentException("All parameters in the kernel function must be specified.");
93      name = ItemName;
94      description = ItemDescription;
95      this.allowedInputVariables = allowedInputVariables.ToArray();
96      var trainingRows = rows.ToArray();
97      this.kernel = (ICovarianceFunction)kernel.Clone();
98      this.lambda = lambda;
99      try {
100        if (scaleInputs)
101          scaling = CreateScaling(dataset, trainingRows);
102        trainX = ExtractData(dataset, trainingRows, scaling);
103        var y = dataset.GetDoubleValues(targetVariable, trainingRows).ToArray();
104        yOffset = y.Average();
105        yScale = 1.0 / y.StandardDeviation();
106        for (int i = 0; i < y.Length; i++) {
107          y[i] -= yOffset;
108          y[i] *= yScale;
109        }
110        int info;
111        int n = trainX.GetLength(0);
112        alglib.densesolverreport denseSolveRep;
113        var gram = BuildGramMatrix(trainX, lambda);
114        var l = new double[n, n]; Array.Copy(gram, l, l.Length);
115
116        double[,] invG;
117        // cholesky decomposition
118        var res = alglib.trfac.spdmatrixcholesky(ref l, n, false);
119        if (res == false) { //throw new ArgumentException("Could not decompose matrix. Is it quadratic symmetric positive definite?");
120          int[] pivots;
121          var lua = new double[n, n];
122          Array.Copy(gram, lua, lua.Length);
123          alglib.rmatrixlu(ref lua, n, n, out pivots);
124          alglib.rmatrixlusolve(lua, pivots, n, y, out info, out denseSolveRep, out alpha);
125          if (info != 1) throw new ArgumentException("Could not create model.");
126          alglib.matinvreport rep;
127          invG = lua;  // rename
128          alglib.rmatrixluinverse(ref invG, pivots, n, out info, out rep);
129          if (info != 1) throw new ArgumentException("Could not invert Gram matrix.");
130        } else {
131          alglib.spdmatrixcholeskysolve(l, n, false, y, out info, out denseSolveRep, out alpha);
132          if (info != 1) throw new ArgumentException("Could not create model.");
133          // for LOO-CV we need to build the inverse of the gram matrix
134          alglib.matinvreport rep;
135          invG = l;   // rename
136          alglib.spdmatrixcholeskyinverse(ref invG, n, false, out info, out rep);
137          if (info != 1) throw new ArgumentException("Could not invert Gram matrix.");
138        }
139
140        var ssqLooError = 0.0;
141        for (int i = 0; i < n; i++) {
142          var pred_i = Util.ScalarProd(Util.GetRow(gram, i).ToArray(), alpha);
143          var looPred_i = pred_i - alpha[i] / invG[i, i];
144          var error = (y[i] - looPred_i) / yScale;
145          ssqLooError += error * error;
146        }
147        LooCvRMSE = Math.Sqrt(ssqLooError / n);
148      } catch (alglib.alglibexception ae) {
149        // wrap exception so that calling code doesn't have to know about alglib implementation
150        throw new ArgumentException("There was a problem in the calculation of the kernel ridge regression model", ae);
151      }
152    }
153
154
155    #region IRegressionModel Members
156    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
157      var newX = ExtractData(dataset, rows, scaling);
158      var dim = newX.GetLength(1);
159      var cov = kernel.GetParameterizedCovarianceFunction(new double[0], Enumerable.Range(0, dim).ToArray());
160
161      var pred = new double[newX.GetLength(0)];
162      for (int i = 0; i < pred.Length; i++) {
163        double sum = 0.0;
164        for (int j = 0; j < alpha.Length; j++) {
165          sum += alpha[j] * cov.CrossCovariance(trainX, newX, j, i);
166        }
167        pred[i] = sum / yScale + yOffset;
168      }
169      return pred;
170    }
171    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
172      return new RegressionSolution(this, new RegressionProblemData(problemData));
173    }
174    #endregion
175
176    #region helpers
177    private double[,] BuildGramMatrix(double[,] data, double lambda) {
178      var n = data.GetLength(0);
179      var dim = data.GetLength(1);
180      var cov = kernel.GetParameterizedCovarianceFunction(new double[0], Enumerable.Range(0, dim).ToArray());
181      var gram = new double[n, n];
182      // G = (K + λ I)
183      for (var i = 0; i < n; i++) {
184        for (var j = i; j < n; j++) {
185          gram[i, j] = gram[j, i] = cov.Covariance(data, i, j); // symmetric matrix
186        }
187        gram[i, i] += lambda;
188      }
189      return gram;
190    }
191
192    private ITransformation<double>[] CreateScaling(IDataset dataset, int[] rows) {
193      var trans = new ITransformation<double>[allowedInputVariables.Length];
194      int i = 0;
195      foreach (var variable in allowedInputVariables) {
196        var lin = new LinearTransformation(allowedInputVariables);
197        var max = dataset.GetDoubleValues(variable, rows).Max();
198        var min = dataset.GetDoubleValues(variable, rows).Min();
199        lin.Multiplier = 1.0 / (max - min);
200        lin.Addend = -min / (max - min);
201        trans[i] = lin;
202        i++;
203      }
204      return trans;
205    }
206
207    private double[,] ExtractData(IDataset dataset, IEnumerable<int> rows, ITransformation<double>[] scaling = null) {
208      double[][] variables;
209      if (scaling != null) {
210        variables =
211          allowedInputVariables.Select((var, i) => scaling[i].Apply(dataset.GetDoubleValues(var, rows)).ToArray())
212            .ToArray();
213      } else {
214        variables =
215        allowedInputVariables.Select(var => dataset.GetDoubleValues(var, rows).ToArray()).ToArray();
216      }
217      int n = variables.First().Length;
218      var res = new double[n, variables.Length];
219      for (int r = 0; r < n; r++)
220        for (int c = 0; c < variables.Length; c++) {
221          res[r, c] = variables[c][r];
222        }
223      return res;
224    }
225    #endregion
226  }
227}
Note: See TracBrowser for help on using the repository browser.