Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs @ 8471

Last change on this file since 8471 was 8471, checked in by abeham, 12 years ago

#1913: integrated branch into trunk

File size: 12.4 KB
RevLine 
[8466]1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Threading;
26using HeuristicLab.Analysis;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Data;
30using HeuristicLab.Optimization;
31using HeuristicLab.Parameters;
32using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
33using HeuristicLab.PluginInfrastructure;
34using HeuristicLab.Problems.DataAnalysis;
35
[8471]36namespace HeuristicLab.Algorithms.DataAnalysis {
[8466]37  internal delegate void Reporter(double quality, double[] coefficients);
38  /// <summary>
39  /// Neighborhood Components Analysis
40  /// </summary>
41  [Item("Neighborhood Components Analysis (NCA)", "Implementation of Neighborhood Components Analysis based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520.")]
42  [Creatable("Data Analysis")]
43  [StorableClass]
44  public sealed class NcaAlgorithm : FixedDataAnalysisAlgorithm<IClassificationProblem> {
45    #region Parameter Properties
[8470]46    public IFixedValueParameter<IntValue> KParameter {
47      get { return (IFixedValueParameter<IntValue>)Parameters["K"]; }
[8466]48    }
49    public IFixedValueParameter<IntValue> DimensionsParameter {
50      get { return (IFixedValueParameter<IntValue>)Parameters["Dimensions"]; }
51    }
52    public IConstrainedValueParameter<INCAInitializer> InitializationParameter {
53      get { return (IConstrainedValueParameter<INCAInitializer>)Parameters["Initialization"]; }
54    }
55    public IFixedValueParameter<IntValue> NeighborSamplesParameter {
56      get { return (IFixedValueParameter<IntValue>)Parameters["NeighborSamples"]; }
57    }
58    public IFixedValueParameter<IntValue> IterationsParameter {
59      get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; }
60    }
61    #endregion
62
63    #region Properties
[8470]64    private int K {
65      get { return KParameter.Value.Value; }
66      set { KParameter.Value.Value = value; }
[8466]67    }
68    private int Dimensions {
69      get { return DimensionsParameter.Value.Value; }
70      set { DimensionsParameter.Value.Value = value; }
71    }
72    private int NeighborSamples {
73      get { return NeighborSamplesParameter.Value.Value; }
74      set { NeighborSamplesParameter.Value.Value = value; }
75    }
76    private int Iterations {
77      get { return IterationsParameter.Value.Value; }
78      set { IterationsParameter.Value.Value = value; }
79    }
80    #endregion
81
82    [StorableConstructor]
83    private NcaAlgorithm(bool deserializing) : base(deserializing) { }
84    private NcaAlgorithm(NcaAlgorithm original, Cloner cloner) : base(original, cloner) { }
85    public NcaAlgorithm()
86      : base() {
[8470]87      Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(1)));
[8466]88      Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
89      Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
90      Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(50)));
91      Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(20)));
92
93      INCAInitializer defaultInitializer = null;
94      foreach (var initializer in ApplicationManager.Manager.GetInstances<INCAInitializer>().OrderBy(x => x.ItemName)) {
95        if (initializer is LDAInitializer) defaultInitializer = initializer;
96        InitializationParameter.ValidValues.Add(initializer);
97      }
98      if (defaultInitializer != null) InitializationParameter.Value = defaultInitializer;
99
100      Problem = new ClassificationProblem();
101    }
102
103    public override IDeepCloneable Clone(Cloner cloner) {
104      return new NcaAlgorithm(this, cloner);
105    }
106
107    public override void Prepare() {
108      if (Problem != null) base.Prepare();
109    }
110
111    protected override void Run() {
112      var initializer = InitializationParameter.Value;
113
114      var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone();
[8470]115      var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
[8466]116      Results.Add(new Result("ClassificationSolution", "The classification solution.", model.CreateClassificationSolution(clonedProblem)));
117    }
118
119    public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
120      var clonedProblem = (IClassificationProblemData)data.Clone();
121      var model = Train(clonedProblem, k, dimensions, neighborSamples, iterations, initializer);
122      return model.CreateClassificationSolution(clonedProblem);
123    }
124
125    public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
126      return Train(problemData, k, dimensions, neighborSamples, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
127    }
128
129    public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, int iterations, double[,] initalMatrix) {
130      var matrix = new double[initalMatrix.Length];
131      for (int i = 0; i < initalMatrix.GetLength(0); i++)
132        for (int j = 0; j < initalMatrix.GetLength(1); j++)
133          matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j];
134      return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, iterations, matrix, null, CancellationToken.None);
135    }
136
137    private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
138      var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices);
139      var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling);
140      var classes = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).ToArray();
141      var attributes = scaledData.GetLength(1);
142
143      alglib.mincgstate state;
144      alglib.mincgreport rep;
145      alglib.mincgcreate(matrix, out state);
146      alglib.mincgsetcond(state, 0, 0, 0, iterations);
147      alglib.mincgsetxrep(state, true);
148      int neighborSampleSize = neighborSamples;
149      Optimize(state, scaledData, classes, dimensions, neighborSampleSize, cancellation, reporter);
150      alglib.mincgresults(state, out matrix, out rep);
151
152      var transformationMatrix = new double[attributes, dimensions];
153      var counter = 0;
154      for (var i = 0; i < attributes; i++)
155        for (var j = 0; j < dimensions; j++)
156          transformationMatrix[i, j] = matrix[counter++];
157
158      return new NcaModel(k, transformationMatrix, data.Dataset, data.TrainingIndices, data.TargetVariable, data.AllowedInputVariables, scaling, data.ClassValues.ToArray());
159    }
160
161    private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {
162      while (alglib.mincgiteration(state)) {
163        if (cancellation.IsCancellationRequested) break;
164        if (state.needfg) {
165          Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize);
166          continue;
167        }
168        if (state.innerobj.xupdated) {
169          if (reporter != null)
170            reporter(state.innerobj.f, state.innerobj.x);
171          continue;
172        }
173        throw new InvalidOperationException("Neighborhood Components Analysis: Error in Optimize() (some derivatives were not provided?)");
174      }
175    }
176
177    private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize) {
178      var instances = data.GetLength(0);
179      var attributes = data.GetLength(1);
180
181      var AMatrix = new Matrix(A, A.Length / dimensions, dimensions);
182
183      alglib.sparsematrix probabilities;
184      alglib.sparsecreate(instances, instances, out probabilities);
185      var transformedDistances = new Dictionary<int, double>(instances);
186      for (int i = 0; i < instances; i++) {
187        var iVector = new Matrix(GetRow(data, i), data.GetLength(1));
188        for (int k = 0; k < instances; k++) {
189          if (k == i) {
190            transformedDistances.Remove(k);
191            continue;
192          }
193          var kVector = new Matrix(GetRow(data, k));
194          transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SquaredVectorLength());
195        }
196        var sample = transformedDistances.OrderByDescending(x => x.Value).Take(neighborSampleSize).ToArray();
197        var normalization = sample.Sum(x => x.Value);
198        if (normalization > 0) {
199          foreach (var s in sample) {
200            if (s.Value <= 0) break;
201            alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
202          }
203        }
204      }
205      alglib.sparseconverttocrs(probabilities); // needed to enumerate in order (top-down and left-right)
206
207      int t0 = 0, t1 = 0, r, c;
208      double val;
209      var pi = new double[instances];
210      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
211        if (classes[r].IsAlmost(classes[c])) {
212          pi[r] += val;
213        }
214      }
215
216      var innerSum = new double[attributes, attributes];
217      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
218        var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c)));
219        vector.OuterProduct(vector).Multiply(val * pi[r]).AddTo(innerSum);
220
221        if (classes[r].IsAlmost(classes[c])) {
222          vector.OuterProduct(vector).Multiply(-val).AddTo(innerSum);
223        }
224      }
225
226      func = -pi.Sum();
227
228      r = 0;
229      var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
230      foreach (var g in newGrad) {
231        grad[r++] = g;
232      }
233    }
234
235    private void ReportQuality(double func, double[] coefficients) {
236      var instances = Problem.ProblemData.TrainingIndices.Count();
237      DataTable qualities;
238      if (!Results.ContainsKey("Optimization")) {
239        qualities = new DataTable("Optimization");
240        qualities.Rows.Add(new DataRow("Quality", string.Empty));
241        Results.Add(new Result("Optimization", qualities));
242      } else qualities = (DataTable)Results["Optimization"].Value;
243      qualities.Rows["Quality"].Values.Add(-func / instances);
244
245      if (!Results.ContainsKey("Quality")) {
246        Results.Add(new Result("Quality", new DoubleValue(-func / instances)));
247      } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances;
248    }
249
250    private static IEnumerable<double> GetRow(double[,] data, int row) {
251      for (int i = 0; i < data.GetLength(1); i++)
252        yield return data[row, i];
253    }
254  }
255}
Note: See TracBrowser for help on using the repository browser.