Free cookie consent management tool by TermsFeed Policy Generator

source: branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs @ 10717

Last change on this file since 10717 was 8523, checked in by abeham, 12 years ago

#1913: added classification penalty into fitness function and gradient

File size: 12.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Threading;
26using HeuristicLab.Analysis;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Data;
30using HeuristicLab.Optimization;
31using HeuristicLab.Parameters;
32using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
33using HeuristicLab.PluginInfrastructure;
34using HeuristicLab.Problems.DataAnalysis;
35
36namespace HeuristicLab.Algorithms.DataAnalysis {
37  internal delegate void Reporter(double quality, double[] coefficients);
38  /// <summary>
39  /// Neighborhood Components Analysis
40  /// </summary>
41  [Item("Neighborhood Components Analysis (NCA)", "Implementation of Neighborhood Components Analysis based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520.")]
42  [Creatable("Data Analysis")]
43  [StorableClass]
44  public sealed class NcaAlgorithm : FixedDataAnalysisAlgorithm<IClassificationProblem> {
45    #region Parameter Properties
46    public IFixedValueParameter<IntValue> KParameter {
47      get { return (IFixedValueParameter<IntValue>)Parameters["K"]; }
48    }
49    public IFixedValueParameter<IntValue> DimensionsParameter {
50      get { return (IFixedValueParameter<IntValue>)Parameters["Dimensions"]; }
51    }
52    public IConstrainedValueParameter<INCAInitializer> InitializationParameter {
53      get { return (IConstrainedValueParameter<INCAInitializer>)Parameters["Initialization"]; }
54    }
55    public IFixedValueParameter<IntValue> NeighborSamplesParameter {
56      get { return (IFixedValueParameter<IntValue>)Parameters["NeighborSamples"]; }
57    }
58    public IFixedValueParameter<IntValue> IterationsParameter {
59      get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; }
60    }
61    #endregion
62
63    #region Properties
64    private int K {
65      get { return KParameter.Value.Value; }
66      set { KParameter.Value.Value = value; }
67    }
68    private int Dimensions {
69      get { return DimensionsParameter.Value.Value; }
70      set { DimensionsParameter.Value.Value = value; }
71    }
72    private int NeighborSamples {
73      get { return NeighborSamplesParameter.Value.Value; }
74      set { NeighborSamplesParameter.Value.Value = value; }
75    }
76    private int Iterations {
77      get { return IterationsParameter.Value.Value; }
78      set { IterationsParameter.Value.Value = value; }
79    }
80    #endregion
81
82    [StorableConstructor]
83    private NcaAlgorithm(bool deserializing) : base(deserializing) { }
84    private NcaAlgorithm(NcaAlgorithm original, Cloner cloner) : base(original, cloner) { }
85    public NcaAlgorithm()
86      : base() {
87      Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(1)));
88      Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
89      Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
90      Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(50)));
91      Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(20)));
92
93      INCAInitializer defaultInitializer = null;
94      foreach (var initializer in ApplicationManager.Manager.GetInstances<INCAInitializer>().OrderBy(x => x.ItemName)) {
95        if (initializer is LDAInitializer) defaultInitializer = initializer;
96        InitializationParameter.ValidValues.Add(initializer);
97      }
98      if (defaultInitializer != null) InitializationParameter.Value = defaultInitializer;
99
100      Problem = new ClassificationProblem();
101    }
102
103    public override IDeepCloneable Clone(Cloner cloner) {
104      return new NcaAlgorithm(this, cloner);
105    }
106
107    public override void Prepare() {
108      if (Problem != null) base.Prepare();
109    }
110
111    protected override void Run() {
112      var initializer = InitializationParameter.Value;
113
114      var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone();
115      var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
116      Results.Add(new Result("ClassificationSolution", "The classification solution.", model.CreateClassificationSolution(clonedProblem)));
117    }
118
119    public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
120      var clonedProblem = (IClassificationProblemData)data.Clone();
121      var model = Train(clonedProblem, k, dimensions, neighborSamples, iterations, initializer);
122      return model.CreateClassificationSolution(clonedProblem);
123    }
124
125    public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
126      return Train(problemData, k, dimensions, neighborSamples, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
127    }
128
129    public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, int iterations, double[,] initalMatrix) {
130      var matrix = new double[initalMatrix.Length];
131      for (int i = 0; i < initalMatrix.GetLength(0); i++)
132        for (int j = 0; j < initalMatrix.GetLength(1); j++)
133          matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j];
134      return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, iterations, matrix, null, CancellationToken.None);
135    }
136
137    private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
138      var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices);
139      var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling);
140      var classes = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).ToArray();
141      var attributes = scaledData.GetLength(1);
142
143      var penalties = new Dictionary<double, Dictionary<double, double>>();
144      foreach (var c in data.ClassValues) {
145        penalties[c] = new Dictionary<double, double>();
146        foreach (var r in data.ClassValues)
147          penalties[c][r] = data.GetClassificationPenalty(c, r);
148      }
149
150      alglib.mincgstate state;
151      alglib.mincgreport rep;
152      alglib.mincgcreate(matrix, out state);
153      alglib.mincgsetcond(state, 0, 0, 0, iterations);
154      alglib.mincgsetxrep(state, true);
155      int neighborSampleSize = neighborSamples;
156      Optimize(state, scaledData, classes, penalties, dimensions, neighborSampleSize, cancellation, reporter);
157      alglib.mincgresults(state, out matrix, out rep);
158
159      var transformationMatrix = new double[attributes, dimensions];
160      var counter = 0;
161      for (var i = 0; i < attributes; i++)
162        for (var j = 0; j < dimensions; j++)
163          transformationMatrix[i, j] = matrix[counter++];
164
165      return new NcaModel(k, transformationMatrix, data.Dataset, data.TrainingIndices, data.TargetVariable, data.AllowedInputVariables, scaling, data.ClassValues.ToArray());
166    }
167
168    private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {
169      while (alglib.mincgiteration(state)) {
170        if (cancellation.IsCancellationRequested) break;
171        if (state.needfg) {
172          Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, penalties, dimensions, neighborSampleSize);
173          continue;
174        }
175        if (state.innerobj.xupdated) {
176          if (reporter != null)
177            reporter(state.innerobj.f, state.innerobj.x);
178          continue;
179        }
180        throw new InvalidOperationException("Neighborhood Components Analysis: Error in Optimize() (some derivatives were not provided?)");
181      }
182    }
183
184    private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize) {
185      var instances = data.GetLength(0);
186      var attributes = data.GetLength(1);
187
188      var AMatrix = new Matrix(A, A.Length / dimensions, dimensions);
189
190      alglib.sparsematrix probabilities;
191      alglib.sparsecreate(instances, instances, out probabilities);
192      var transformedDistances = new Dictionary<int, double>(instances);
193      for (int i = 0; i < instances; i++) {
194        var iVector = new Matrix(GetRow(data, i), data.GetLength(1));
195        for (int k = 0; k < instances; k++) {
196          if (k == i) {
197            transformedDistances.Remove(k);
198            continue;
199          }
200          var kVector = new Matrix(GetRow(data, k));
201          transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SquaredVectorLength());
202        }
203        var sample = transformedDistances.OrderByDescending(x => x.Value).Take(neighborSampleSize).ToArray();
204        var normalization = sample.Sum(x => x.Value);
205        if (normalization > 0) {
206          foreach (var s in sample) {
207            if (s.Value <= 0) break;
208            alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
209          }
210        }
211      }
212      alglib.sparseconverttocrs(probabilities); // needed to enumerate in order (top-down and left-right)
213
214      int t0 = 0, t1 = 0, r, c;
215      double val;
216      var pi = new double[instances];
217      func = 0;
218      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
219        double vp = val * penalties[classes[r]][classes[c]];
220        pi[r] += vp;
221        func += vp;
222      }
223
224      t0 = 0; t1 = 0;
225      var innerSum = new double[attributes, attributes];
226      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
227        var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c)));
228        vector.OuterProduct(vector).Multiply(val * pi[r]).AddTo(innerSum);
229        vector.OuterProduct(vector).Multiply(-val * penalties[classes[r]][classes[c]]).AddTo(innerSum);
230      }
231
232      r = 0;
233      var newGrad = AMatrix.Multiply(2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
234      foreach (var g in newGrad) {
235        grad[r++] = g;
236      }
237    }
238
239    private void ReportQuality(double func, double[] coefficients) {
240      var instances = Problem.ProblemData.TrainingIndices.Count();
241      DataTable qualities;
242      if (!Results.ContainsKey("Optimization")) {
243        qualities = new DataTable("Optimization");
244        qualities.Rows.Add(new DataRow("Penalty", string.Empty));
245        Results.Add(new Result("Optimization", qualities));
246      } else qualities = (DataTable)Results["Optimization"].Value;
247      qualities.Rows["Penalty"].Values.Add(func / instances);
248
249      if (!Results.ContainsKey("Penalty")) {
250        Results.Add(new Result("Penalty", new DoubleValue(func / instances)));
251      } else ((DoubleValue)Results["Penalty"].Value).Value = func / instances;
252    }
253
254    private static IEnumerable<double> GetRow(double[,] data, int row) {
255      for (int i = 0; i < data.GetLength(1); i++)
256        yield return data[row, i];
257    }
258  }
259}
Note: See TracBrowser for help on using the repository browser.