Free cookie consent management tool by TermsFeed Policy Generator

source: branches/NCA/HeuristicLab.Algorithms.NCA/3.3/NeighborhoodComponentsAnalysis.cs @ 8464

Last change on this file since 8464 was 8454, checked in by abeham, 12 years ago

#1913:

  • Refactored NCAModel and NeighborhoodComponentsAnalysis algorithm
  • Model now includes NearestNeighborModel
  • Algorithm has ability to be canceled (basically recreated the optimization loop of mincgoptimize)
  • Scaling should work properly now
File size: 10.6 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Threading;
26using HeuristicLab.Algorithms.DataAnalysis;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Optimization;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.PluginInfrastructure;
35using HeuristicLab.Problems.DataAnalysis;
36
37namespace HeuristicLab.Algorithms.NCA {
38  internal delegate void Reporter(double quality, double[] coefficients);
39  /// <summary>
40  /// Neighborhood Components Analysis
41  /// </summary>
42  [Item("Neighborhood Components Analysis", "NCA is described in J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520.")]
43  [Creatable("Data Analysis")]
44  [StorableClass]
45  public sealed class NeighborhoodComponentsAnalysis : FixedDataAnalysisAlgorithm<IClassificationProblem> {
46    #region Parameter Properties
47    public IValueLookupParameter<IntValue> KParameter {
48      get { return (IValueLookupParameter<IntValue>)Parameters["k"]; }
49    }
50    public IValueLookupParameter<IntValue> ReduceDimensionsParameter {
51      get { return (IValueLookupParameter<IntValue>)Parameters["ReduceDimensions"]; }
52    }
53    private IConstrainedValueParameter<INCAInitializer> InitializationParameter {
54      get { return (IConstrainedValueParameter<INCAInitializer>)Parameters["Initialization"]; }
55    }
56    #endregion
57
58    #region Properties
59    public IntValue K {
60      get { return KParameter.Value; }
61    }
62    public IntValue ReduceDimensions {
63      get { return ReduceDimensionsParameter.Value; }
64    }
65    #endregion
66
67    [StorableConstructor]
68    private NeighborhoodComponentsAnalysis(bool deserializing) : base(deserializing) { }
69    private NeighborhoodComponentsAnalysis(NeighborhoodComponentsAnalysis original, Cloner cloner) : base(original, cloner) { }
70    public NeighborhoodComponentsAnalysis()
71      : base() {
72      Parameters.Add(new ValueLookupParameter<IntValue>("k", "The k for the nearest neighbor.", new IntValue(1)));
73      Parameters.Add(new ValueLookupParameter<IntValue>("ReduceDimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
74      Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
75
76      INCAInitializer defaultInitializer = null;
77      foreach (var initializer in ApplicationManager.Manager.GetInstances<INCAInitializer>().OrderBy(x => x.ItemName)) {
78        if (initializer is LDAInitializer) defaultInitializer = initializer;
79        InitializationParameter.ValidValues.Add(initializer);
80      }
81      if (defaultInitializer != null) InitializationParameter.Value = defaultInitializer;
82
83      Problem = new ClassificationProblem();
84    }
85
86    public override IDeepCloneable Clone(Cloner cloner) {
87      return new NeighborhoodComponentsAnalysis(this, cloner);
88    }
89
90    public override void Prepare() {
91      if (Problem != null) base.Prepare();
92    }
93
94    protected override void Run() {
95      var k = K.Value;
96      var dimensions = ReduceDimensions.Value;
97      var initializer = InitializationParameter.Value;
98
99      var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone();
100      var model = Train(clonedProblem, k, dimensions, initializer.Initialize(clonedProblem, dimensions), ReportQuality, CancellationToken.None);
101      var classification = new NCAClassificationSolution(clonedProblem, model);
102      Results.Add(new Result("ClassificationSolution", "The classification solution.", classification));
103    }
104
105    private void ReportQuality(double func, double[] coefficients) {
106      var instances = Problem.ProblemData.TrainingIndices.Count();
107      DataTable qualities;
108      if (!Results.ContainsKey("Optimization")) {
109        qualities = new DataTable("Optimization");
110        qualities.Rows.Add(new DataRow("Quality", string.Empty));
111        Results.Add(new Result("Optimization", qualities));
112      } else qualities = (DataTable)Results["Optimization"].Value;
113      qualities.Rows["Quality"].Values.Add(-func / instances);
114
115      if (!Results.ContainsKey("Quality")) {
116        Results.Add(new Result("Quality", new DoubleValue(-func / instances)));
117      } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances;
118    }
119
120    public static INCAModel Train(IClassificationProblemData problemData, int k, int dimensions, INCAInitializer initializer) {
121      return Train(problemData, k, dimensions, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
122    }
123
124    public static INCAModel Train(IClassificationProblemData problemData, int k, int dimensions, double[,] initalMatrix) {
125      var matrix = new double[initalMatrix.Length];
126      for (int i = 0; i < initalMatrix.GetLength(0); i++)
127        for (int j = 0; j < initalMatrix.GetLength(1); j++)
128          matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j];
129      return Train(problemData, k, dimensions, matrix, null, CancellationToken.None);
130    }
131
132    private static INCAModel Train(IClassificationProblemData data, int k, int dimensions, double[] matrix, Reporter reporter, CancellationToken cancellation) {
133      var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices);
134      var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling);
135      var classes = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).ToArray();
136      var instances = scaledData.GetLength(0);
137      var attributes = scaledData.GetLength(1);
138
139      alglib.mincgstate state;
140      alglib.mincgreport rep;
141      alglib.mincgcreate(matrix, out state);
142      alglib.mincgsetcond(state, 0, 0, 0, 20);
143      alglib.mincgsetxrep(state, true);
144      Optimize(state, scaledData, classes, dimensions, cancellation, reporter);
145      alglib.mincgresults(state, out matrix, out rep);
146
147      var transformationMatrix = new double[attributes, dimensions];
148      var counter = 0;
149      for (var i = 0; i < attributes; i++)
150        for (var j = 0; j < dimensions; j++)
151          transformationMatrix[i, j] = matrix[counter++];
152
153      return new NCAModel(k, scaledData, scaling, transformationMatrix, data.TargetVariable, data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices), data.AllowedInputVariables);
154    }
155
156    private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, CancellationToken cancellation, Reporter reporter) {
157      while (alglib.mincgiteration(state)) {
158
159        if (cancellation.IsCancellationRequested) break;
160
161        if (state.needfg) {
162          Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions);
163          continue;
164        }
165
166        if (state.innerobj.xupdated) {
167          if (reporter != null)
168            reporter(state.innerobj.f, state.innerobj.x);
169          continue;
170        }
171
172        throw new InvalidOperationException("Neighborhood Components Analysis: Error in Optimize() (some derivatives were not provided?)");
173      }
174    }
175
176    private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions) {
177      var instances = data.GetLength(0);
178      var attributes = data.GetLength(1);
179
180      var AMatrix = new Matrix(A, A.Length / dimensions, dimensions);
181
182      alglib.sparsematrix probabilities;
183      alglib.sparsecreate(instances, instances, out probabilities);
184      var transformedDistances = new double[instances];
185      for (int i = 0; i < instances - 1; i++) {
186        var iVector = new Matrix(GetRow(data, i), data.GetLength(1));
187        var denom = 0.0;
188        for (int k = 0; k < instances; k++) {
189          if (k == i) continue;
190          var kVector = new Matrix(GetRow(data, k));
191          transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SquaredVectorLength());
192          denom += transformedDistances[k];
193        }
194        if (denom > 1e-05) {
195          for (int j = i + 1; j < instances; j++) {
196            if (i == j) continue;
197            var v = transformedDistances[j] / denom;
198            alglib.sparseset(probabilities, i, j, v);
199          }
200        }
201      }
202      alglib.sparseconverttocrs(probabilities); // needed to enumerate in order (top-down and left-right)
203
204      int t0 = 0, t1 = 0, r, c;
205      double val;
206      var pi = new double[instances];
207      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
208        if (classes[r].IsAlmost(classes[c])) {
209          pi[r] += val;
210        }
211      }
212
213      var innerSum = new double[attributes, attributes];
214      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
215        var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c)));
216        vector.OuterProduct(vector).Multiply(2.0 * val * pi[r]).AddTo(innerSum);
217
218        if (classes[r].IsAlmost(classes[c])) {
219          vector.OuterProduct(vector).Multiply(-2.0 * val).AddTo(innerSum);
220        }
221      }
222
223      func = -2.0 * pi.Sum();
224
225      r = 0;
226      var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
227      foreach (var g in newGrad) {
228        grad[r++] = g;
229      }
230    }
231
232    private static IEnumerable<double> GetRow(double[,] data, int row) {
233      for (int i = 0; i < data.GetLength(1); i++)
234        yield return data[row, i];
235    }
236  }
237}
Note: See TracBrowser for help on using the repository browser.