Free cookie consent management tool by TermsFeed Policy Generator

source: branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs @ 8742

Last change on this file since 8742 was 8742, checked in by mkommend, 12 years ago

#1081: Merged trunk changes and fixed compilation errors due to the merge.

File size: 15.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Threading;
26using HeuristicLab.Analysis;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Data;
30using HeuristicLab.Optimization;
31using HeuristicLab.Parameters;
32using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
33using HeuristicLab.PluginInfrastructure;
34using HeuristicLab.Problems.DataAnalysis;
35
36namespace HeuristicLab.Algorithms.DataAnalysis {
37  internal delegate void Reporter(double quality, double[] coefficients, double[] gradients);
38  /// <summary>
39  /// Neighborhood Components Analysis
40  /// </summary>
41  [Item("Neighborhood Components Analysis (NCA)", @"Implementation of Neighborhood Components Analysis
42based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005.
43Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520
44with additional regularizations described in Z. Yang, J. Laaksonen. 2007.
45Regularized Neighborhood Component Analysis. Lecture Notes in Computer Science, 4522. pp. 253-262.")]
46  [Creatable("Data Analysis")]
47  [StorableClass]
48  public sealed class NcaAlgorithm : FixedDataAnalysisAlgorithm<IClassificationProblem> {
49    #region Parameter Properties
50    public IFixedValueParameter<IntValue> KParameter {
51      get { return (IFixedValueParameter<IntValue>)Parameters["K"]; }
52    }
53    public IFixedValueParameter<IntValue> DimensionsParameter {
54      get { return (IFixedValueParameter<IntValue>)Parameters["Dimensions"]; }
55    }
56    public IConstrainedValueParameter<INCAInitializer> InitializationParameter {
57      get { return (IConstrainedValueParameter<INCAInitializer>)Parameters["Initialization"]; }
58    }
59    public IFixedValueParameter<IntValue> NeighborSamplesParameter {
60      get { return (IFixedValueParameter<IntValue>)Parameters["NeighborSamples"]; }
61    }
62    public IFixedValueParameter<IntValue> IterationsParameter {
63      get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; }
64    }
65    public IFixedValueParameter<DoubleValue> RegularizationParameter {
66      get { return (IFixedValueParameter<DoubleValue>)Parameters["Regularization"]; }
67    }
68    #endregion
69
70    #region Properties
71    public int K {
72      get { return KParameter.Value.Value; }
73      set { KParameter.Value.Value = value; }
74    }
75    public int Dimensions {
76      get { return DimensionsParameter.Value.Value; }
77      set { DimensionsParameter.Value.Value = value; }
78    }
79    public int NeighborSamples {
80      get { return NeighborSamplesParameter.Value.Value; }
81      set { NeighborSamplesParameter.Value.Value = value; }
82    }
83    public int Iterations {
84      get { return IterationsParameter.Value.Value; }
85      set { IterationsParameter.Value.Value = value; }
86    }
87    public double Regularization {
88      get { return RegularizationParameter.Value.Value; }
89      set { RegularizationParameter.Value.Value = value; }
90    }
91    #endregion
92
93    [StorableConstructor]
94    private NcaAlgorithm(bool deserializing) : base(deserializing) { }
95    private NcaAlgorithm(NcaAlgorithm original, Cloner cloner) : base(original, cloner) { }
96    public NcaAlgorithm()
97      : base() {
98      Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(3)));
99      Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
100      Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
101      Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(60)));
102      Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50)));
103      Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
104
105      INCAInitializer defaultInitializer = null;
106      foreach (var initializer in ApplicationManager.Manager.GetInstances<INCAInitializer>().OrderBy(x => x.ItemName)) {
107        if (initializer is LDAInitializer) defaultInitializer = initializer;
108        InitializationParameter.ValidValues.Add(initializer);
109      }
110      if (defaultInitializer != null) InitializationParameter.Value = defaultInitializer;
111
112      Problem = new ClassificationProblem();
113    }
114
115    public override IDeepCloneable Clone(Cloner cloner) {
116      return new NcaAlgorithm(this, cloner);
117    }
118
119    [StorableHook(HookType.AfterDeserialization)]
120    private void AfterDeserialization() {
121      if (!Parameters.ContainsKey("Regularization")) {
122        Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
123      }
124    }
125
126    public override void Prepare() {
127      if (Problem != null) base.Prepare();
128    }
129
130    protected override void Run() {
131      var initializer = InitializationParameter.Value;
132
133      var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone();
134      var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Regularization, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
135      var solution = model.CreateClassificationSolution(clonedProblem);
136      if (!Results.ContainsKey("ClassificationSolution"))
137        Results.Add(new Result("ClassificationSolution", "The classification solution.", solution));
138      else Results["ClassificationSolution"].Value = solution;
139    }
140
141    public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
142      var clonedProblem = (IClassificationProblemData)data.Clone();
143      var model = Train(clonedProblem, k, dimensions, neighborSamples, regularization, iterations, initializer);
144      return model.CreateClassificationSolution(clonedProblem);
145    }
146
147    public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
148      return Train(problemData, k, dimensions, neighborSamples, regularization, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
149    }
150
151    public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, double regularization, int iterations, double[,] initalMatrix) {
152      var matrix = new double[initalMatrix.Length];
153      for (int i = 0; i < initalMatrix.GetLength(0); i++)
154        for (int j = 0; j < initalMatrix.GetLength(1); j++)
155          matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j];
156      return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, regularization, iterations, matrix, null, CancellationToken.None);
157    }
158
159    private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
160      var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices);
161      var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling);
162      var classes = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).ToArray();
163      var attributes = scaledData.GetLength(1);
164
165      alglib.mincgstate state;
166      alglib.mincgreport rep;
167      alglib.mincgcreate(matrix, out state);
168      alglib.mincgsetcond(state, 0, 0, 0, iterations);
169      alglib.mincgsetxrep(state, true);
170      //alglib.mincgsetgradientcheck(state, 0.01);
171      int neighborSampleSize = neighborSamples;
172      Optimize(state, scaledData, classes, dimensions, neighborSampleSize, regularization, cancellation, reporter);
173      alglib.mincgresults(state, out matrix, out rep);
174      if (rep.terminationtype == -7) throw new InvalidOperationException("Gradient verification failed.");
175
176      var transformationMatrix = new double[attributes, dimensions];
177      var counter = 0;
178      for (var i = 0; i < attributes; i++)
179        for (var j = 0; j < dimensions; j++)
180          transformationMatrix[i, j] = matrix[counter++];
181
182      return new NcaModel(k, transformationMatrix, data.Dataset, data.TrainingIndices, data.TargetVariable, data.AllowedInputVariables, scaling, data.ClassValues.ToArray());
183    }
184
185    private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda, CancellationToken cancellation, Reporter reporter) {
186      while (alglib.mincgiteration(state)) {
187        if (cancellation.IsCancellationRequested) break;
188        if (state.needfg) {
189          Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize, lambda);
190          continue;
191        }
192        if (state.innerobj.xupdated) {
193          if (reporter != null)
194            reporter(state.innerobj.f, state.innerobj.x, state.innerobj.g);
195          continue;
196        }
197        throw new InvalidOperationException("Neighborhood Components Analysis: Error in Optimize() (some derivatives were not provided?)");
198      }
199    }
200
201    private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda) {
202      var instances = data.GetLength(0);
203      var attributes = data.GetLength(1);
204
205      var AMatrix = new Matrix(A, A.Length / dimensions, dimensions);
206
207      alglib.sparsematrix probabilities;
208      alglib.sparsecreate(instances, instances, out probabilities);
209      var transformedDistances = new Dictionary<int, double>(instances);
210      for (int i = 0; i < instances; i++) {
211        var iVector = new Matrix(GetRow(data, i), data.GetLength(1));
212        for (int k = 0; k < instances; k++) {
213          if (k == i) {
214            transformedDistances.Remove(k);
215            continue;
216          }
217          var kVector = new Matrix(GetRow(data, k));
218          transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SumOfSquares());
219        }
220        var normalization = transformedDistances.Sum(x => x.Value);
221        if (normalization <= 0) continue;
222        foreach (var s in transformedDistances.Where(x => x.Value > 0).OrderByDescending(x => x.Value).Take(neighborSampleSize)) {
223          alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
224        }
225      }
226      alglib.sparseconverttocrs(probabilities); // needed to enumerate in order (top-down and left-right)
227
228      int t0 = 0, t1 = 0, r, c;
229      double val;
230      var pi = new double[instances];
231      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
232        if (classes[r].IsAlmost(classes[c])) {
233          pi[r] += val;
234        }
235      }
236
237      var innerSum = new double[attributes, attributes];
238      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
239        var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c)));
240        vector.OuterProduct(vector).Multiply(val * pi[r]).AddTo(innerSum);
241
242        if (classes[r].IsAlmost(classes[c])) {
243          vector.OuterProduct(vector).Multiply(-val).AddTo(innerSum);
244        }
245      }
246
247      func = -pi.Sum() + lambda * AMatrix.SumOfSquares();
248
249      r = 0;
250      var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
251      foreach (var g in newGrad) {
252        grad[r] = g + lambda * 2 * A[r];
253        r++;
254      }
255    }
256
257    private void ReportQuality(double func, double[] coefficients, double[] gradients) {
258      var instances = Problem.ProblemData.TrainingIndices.Count();
259      DataTable qualities;
260      if (!Results.ContainsKey("Optimization")) {
261        qualities = new DataTable("Optimization");
262        qualities.Rows.Add(new DataRow("Quality", string.Empty));
263        Results.Add(new Result("Optimization", qualities));
264      } else qualities = (DataTable)Results["Optimization"].Value;
265      qualities.Rows["Quality"].Values.Add(-func / instances);
266
267      string[] attributNames = Problem.ProblemData.AllowedInputVariables.ToArray();
268      if (gradients != null) {
269        DataTable grads;
270        if (!Results.ContainsKey("Gradients")) {
271          grads = new DataTable("Gradients");
272          for (int i = 0; i < gradients.Length; i++)
273            grads.Rows.Add(new DataRow(attributNames[i / Dimensions] + "-" + (i % Dimensions), string.Empty));
274          Results.Add(new Result("Gradients", grads));
275        } else grads = (DataTable)Results["Gradients"].Value;
276        for (int i = 0; i < gradients.Length; i++)
277          grads.Rows[attributNames[i / Dimensions] + "-" + (i % Dimensions)].Values.Add(gradients[i]);
278      }
279
280      if (!Results.ContainsKey("Quality")) {
281        Results.Add(new Result("Quality", new DoubleValue(-func / instances)));
282      } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances;
283
284      var attributes = attributNames.Length;
285      var transformationMatrix = new double[attributes, Dimensions];
286      var counter = 0;
287      for (var i = 0; i < attributes; i++)
288        for (var j = 0; j < Dimensions; j++)
289          transformationMatrix[i, j] = coefficients[counter++];
290
291      var scaling = new Scaling(Problem.ProblemData.Dataset, attributNames, Problem.ProblemData.TrainingIndices);
292      var model = new NcaModel(K, transformationMatrix, Problem.ProblemData.Dataset, Problem.ProblemData.TrainingIndices, Problem.ProblemData.TargetVariable, attributNames, scaling, Problem.ProblemData.ClassValues.ToArray());
293
294      IClassificationSolution solution = model.CreateClassificationSolution(Problem.ProblemData);
295      if (!Results.ContainsKey("ClassificationSolution")) {
296        Results.Add(new Result("ClassificationSolution", solution));
297      } else {
298        Results["ClassificationSolution"].Value = solution;
299      }
300    }
301
302    private static IEnumerable<double> GetRow(double[,] data, int row) {
303      for (int i = 0; i < data.GetLength(1); i++)
304        yield return data[row, i];
305    }
306
307  }
308}
Note: See TracBrowser for help on using the repository browser.