Free cookie consent management tool by TermsFeed Policy Generator

source: stable/HeuristicLab.Algorithms.DataAnalysis/3.4/TSNE/TSNEAlgorithm.cs @ 15318

Last change on this file since 15318 was 15249, checked in by gkronber, 7 years ago

#2699,#2700
merged r14862, r14863, r14911, r14936, r15156, r15157, r15158, r15164, r15169, r15207:15209, r15225, r15227, r15234, r15248 from trunk to stable

File size: 22.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Drawing;
25using System.Linq;
26using System.Threading;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Optimization;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.PluginInfrastructure;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Random;
37
38namespace HeuristicLab.Algorithms.DataAnalysis {
39  /// <summary>
40  /// t-distributed stochastic neighbourhood embedding (tSNE) projects the data in a low dimensional
41  /// space to allow visual cluster identification.
42  /// </summary>
43  [Item("tSNE", "t-distributed stochastic neighbourhood embedding projects the data in a low " +
44                "dimensional space to allow visual cluster identification. Implemented similar to: https://lvdmaaten.github.io/tsne/#implementations (Barnes-Hut t-SNE). Described in : https://lvdmaaten.github.io/publications/papers/JMLR_2014.pdf")]
45  [Creatable(CreatableAttribute.Categories.DataAnalysis, Priority = 100)]
46  [StorableClass]
47  public sealed class TSNEAlgorithm : BasicAlgorithm {
48    public override bool SupportsPause {
49      get { return true; }
50    }
51    public override Type ProblemType {
52      get { return typeof(IDataAnalysisProblem); }
53    }
54    public new IDataAnalysisProblem Problem {
55      get { return (IDataAnalysisProblem)base.Problem; }
56      set { base.Problem = value; }
57    }
58
59    #region parameter names
60    private const string DistanceFunctionParameterName = "DistanceFunction";
61    private const string PerplexityParameterName = "Perplexity";
62    private const string ThetaParameterName = "Theta";
63    private const string NewDimensionsParameterName = "Dimensions";
64    private const string MaxIterationsParameterName = "MaxIterations";
65    private const string StopLyingIterationParameterName = "StopLyingIteration";
66    private const string MomentumSwitchIterationParameterName = "MomentumSwitchIteration";
67    private const string InitialMomentumParameterName = "InitialMomentum";
68    private const string FinalMomentumParameterName = "FinalMomentum";
69    private const string EtaParameterName = "Eta";
70    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
71    private const string SeedParameterName = "Seed";
72    private const string ClassesNameParameterName = "ClassesName";
73    private const string NormalizationParameterName = "Normalization";
74    private const string UpdateIntervalParameterName = "UpdateInterval";
75    #endregion
76
77    #region result names
78    private const string IterationResultName = "Iteration";
79    private const string ErrorResultName = "Error";
80    private const string ErrorPlotResultName = "Error plot";
81    private const string ScatterPlotResultName = "Scatterplot";
82    private const string DataResultName = "Projected data";
83    #endregion
84
85    #region parameter properties
86    public IFixedValueParameter<DoubleValue> PerplexityParameter {
87      get { return Parameters[PerplexityParameterName] as IFixedValueParameter<DoubleValue>; }
88    }
89    public IFixedValueParameter<PercentValue> ThetaParameter {
90      get { return Parameters[ThetaParameterName] as IFixedValueParameter<PercentValue>; }
91    }
92    public IFixedValueParameter<IntValue> NewDimensionsParameter {
93      get { return Parameters[NewDimensionsParameterName] as IFixedValueParameter<IntValue>; }
94    }
95    public IConstrainedValueParameter<IDistance<double[]>> DistanceFunctionParameter {
96      get { return Parameters[DistanceFunctionParameterName] as IConstrainedValueParameter<IDistance<double[]>>; }
97    }
98    public IFixedValueParameter<IntValue> MaxIterationsParameter {
99      get { return Parameters[MaxIterationsParameterName] as IFixedValueParameter<IntValue>; }
100    }
101    public IFixedValueParameter<IntValue> StopLyingIterationParameter {
102      get { return Parameters[StopLyingIterationParameterName] as IFixedValueParameter<IntValue>; }
103    }
104    public IFixedValueParameter<IntValue> MomentumSwitchIterationParameter {
105      get { return Parameters[MomentumSwitchIterationParameterName] as IFixedValueParameter<IntValue>; }
106    }
107    public IFixedValueParameter<DoubleValue> InitialMomentumParameter {
108      get { return Parameters[InitialMomentumParameterName] as IFixedValueParameter<DoubleValue>; }
109    }
110    public IFixedValueParameter<DoubleValue> FinalMomentumParameter {
111      get { return Parameters[FinalMomentumParameterName] as IFixedValueParameter<DoubleValue>; }
112    }
113    public IFixedValueParameter<DoubleValue> EtaParameter {
114      get { return Parameters[EtaParameterName] as IFixedValueParameter<DoubleValue>; }
115    }
116    public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter {
117      get { return Parameters[SetSeedRandomlyParameterName] as IFixedValueParameter<BoolValue>; }
118    }
119    public IFixedValueParameter<IntValue> SeedParameter {
120      get { return Parameters[SeedParameterName] as IFixedValueParameter<IntValue>; }
121    }
122    public IConstrainedValueParameter<StringValue> ClassesNameParameter {
123      get { return Parameters[ClassesNameParameterName] as IConstrainedValueParameter<StringValue>; }
124    }
125    public IFixedValueParameter<BoolValue> NormalizationParameter {
126      get { return Parameters[NormalizationParameterName] as IFixedValueParameter<BoolValue>; }
127    }
128    public IFixedValueParameter<IntValue> UpdateIntervalParameter {
129      get { return Parameters[UpdateIntervalParameterName] as IFixedValueParameter<IntValue>; }
130    }
131    #endregion
132
133    #region  Properties
134    public IDistance<double[]> DistanceFunction {
135      get { return DistanceFunctionParameter.Value; }
136    }
137    public double Perplexity {
138      get { return PerplexityParameter.Value.Value; }
139      set { PerplexityParameter.Value.Value = value; }
140    }
141    public double Theta {
142      get { return ThetaParameter.Value.Value; }
143      set { ThetaParameter.Value.Value = value; }
144    }
145    public int NewDimensions {
146      get { return NewDimensionsParameter.Value.Value; }
147      set { NewDimensionsParameter.Value.Value = value; }
148    }
149    public int MaxIterations {
150      get { return MaxIterationsParameter.Value.Value; }
151      set { MaxIterationsParameter.Value.Value = value; }
152    }
153    public int StopLyingIteration {
154      get { return StopLyingIterationParameter.Value.Value; }
155      set { StopLyingIterationParameter.Value.Value = value; }
156    }
157    public int MomentumSwitchIteration {
158      get { return MomentumSwitchIterationParameter.Value.Value; }
159      set { MomentumSwitchIterationParameter.Value.Value = value; }
160    }
161    public double InitialMomentum {
162      get { return InitialMomentumParameter.Value.Value; }
163      set { InitialMomentumParameter.Value.Value = value; }
164    }
165    public double FinalMomentum {
166      get { return FinalMomentumParameter.Value.Value; }
167      set { FinalMomentumParameter.Value.Value = value; }
168    }
169    public double Eta {
170      get { return EtaParameter.Value.Value; }
171      set { EtaParameter.Value.Value = value; }
172    }
173    public bool SetSeedRandomly {
174      get { return SetSeedRandomlyParameter.Value.Value; }
175      set { SetSeedRandomlyParameter.Value.Value = value; }
176    }
177    public int Seed {
178      get { return SeedParameter.Value.Value; }
179      set { SeedParameter.Value.Value = value; }
180    }
181    public string ClassesName {
182      get { return ClassesNameParameter.Value != null ? ClassesNameParameter.Value.Value : null; }
183      set { ClassesNameParameter.Value.Value = value; }
184    }
185    public bool Normalization {
186      get { return NormalizationParameter.Value.Value; }
187      set { NormalizationParameter.Value.Value = value; }
188    }
189
190    public int UpdateInterval {
191      get { return UpdateIntervalParameter.Value.Value; }
192      set { UpdateIntervalParameter.Value.Value = value; }
193    }
194    #endregion
195
196    #region Constructors & Cloning
197    [StorableConstructor]
198    private TSNEAlgorithm(bool deserializing) : base(deserializing) { }
199
200    private TSNEAlgorithm(TSNEAlgorithm original, Cloner cloner) : base(original, cloner) {
201      if (original.dataRowNames != null)
202        this.dataRowNames = new Dictionary<string, List<int>>(original.dataRowNames);
203      if (original.dataRows != null)
204        this.dataRows = original.dataRows.ToDictionary(kvp => kvp.Key, kvp => cloner.Clone(kvp.Value));
205      if (original.state != null)
206        this.state = cloner.Clone(original.state);
207      this.iter = original.iter;
208    }
209    public override IDeepCloneable Clone(Cloner cloner) { return new TSNEAlgorithm(this, cloner); }
210    public TSNEAlgorithm() {
211      var distances = new ItemSet<IDistance<double[]>>(ApplicationManager.Manager.GetInstances<IDistance<double[]>>());
212      Parameters.Add(new ConstrainedValueParameter<IDistance<double[]>>(DistanceFunctionParameterName, "The distance function used to differentiate similar from non-similar points", distances, distances.OfType<EuclideanDistance>().FirstOrDefault()));
213      Parameters.Add(new FixedValueParameter<DoubleValue>(PerplexityParameterName, "Perplexity-parameter of tSNE. Comparable to k in a k-nearest neighbour algorithm. Recommended value is floor(number of points /3) or lower", new DoubleValue(25)));
214      Parameters.Add(new FixedValueParameter<PercentValue>(ThetaParameterName, "Value describing how much appoximated " +
215                                                                              "gradients my differ from exact gradients. Set to 0 for exact calculation and in [0,1] otherwise. " +
216                                                                              "Appropriate values for theta are between 0.1 and 0.7 (default = 0.5). CAUTION: exact calculation of " +
217                                                                              "forces requires building a non-sparse N*N matrix where N is the number of data points. This may " +
218                                                                              "exceed memory limitations. The function is designed to run on large (N > 5000) data sets. It may give" +
219                                                                              " poor performance on very small data sets(it is better to use a standard t - SNE implementation on such data).", new PercentValue(0)));
220      Parameters.Add(new FixedValueParameter<IntValue>(NewDimensionsParameterName, "Dimensionality of projected space (usually 2 for easy visual analysis)", new IntValue(2)));
221      Parameters.Add(new FixedValueParameter<IntValue>(MaxIterationsParameterName, "Maximum number of iterations for gradient descent.", new IntValue(1000)));
222      Parameters.Add(new FixedValueParameter<IntValue>(StopLyingIterationParameterName, "Number of iterations after which p is no longer approximated.", new IntValue(0)));
223      Parameters.Add(new FixedValueParameter<IntValue>(MomentumSwitchIterationParameterName, "Number of iterations after which the momentum in the gradient descent is switched.", new IntValue(0)));
224      Parameters.Add(new FixedValueParameter<DoubleValue>(InitialMomentumParameterName, "The initial momentum in the gradient descent.", new DoubleValue(0.5)));
225      Parameters.Add(new FixedValueParameter<DoubleValue>(FinalMomentumParameterName, "The final momentum.", new DoubleValue(0.8)));
226      Parameters.Add(new FixedValueParameter<DoubleValue>(EtaParameterName, "Gradient descent learning rate.", new DoubleValue(10)));
227      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "If the seed should be random.", new BoolValue(true)));
228      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The seed used if it should not be random.", new IntValue(0)));
229      Parameters.Add(new OptionalConstrainedValueParameter<StringValue>(ClassesNameParameterName, "Name of the column specifying the class lables of each data point. If this is not set training/test is used as labels."));
230      Parameters.Add(new FixedValueParameter<BoolValue>(NormalizationParameterName, "Whether the data should be zero centered and have variance of 1 for each variable, so different scalings are ignored.", new BoolValue(true)));
231      Parameters.Add(new FixedValueParameter<IntValue>(UpdateIntervalParameterName, "The interval after which the results will be updated.", new IntValue(50)));
232      Parameters[UpdateIntervalParameterName].Hidden = true;
233
234      MomentumSwitchIterationParameter.Hidden = true;
235      InitialMomentumParameter.Hidden = true;
236      FinalMomentumParameter.Hidden = true;
237      StopLyingIterationParameter.Hidden = true;
238      EtaParameter.Hidden = false;
239      Problem = new RegressionProblem();
240    }
241    #endregion
242
243    [Storable]
244    private Dictionary<string, List<int>> dataRowNames;
245    [Storable]
246    private Dictionary<string, ScatterPlotDataRow> dataRows;
247    [Storable]
248    private TSNEStatic<double[]>.TSNEState state;
249    [Storable]
250    private int iter;
251
252    public override void Prepare() {
253      base.Prepare();
254      dataRowNames = null;
255      dataRows = null;
256      state = null;
257    }
258
259    protected override void Run(CancellationToken cancellationToken) {
260      var problemData = Problem.ProblemData;
261      // set up and initialized everything if necessary
262      if (state == null) {
263        if (SetSeedRandomly) Seed = new System.Random().Next();
264        var random = new MersenneTwister((uint)Seed);
265        var dataset = problemData.Dataset;
266        var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
267        var data = new double[dataset.Rows][];
268        for (var row = 0; row < dataset.Rows; row++)
269          data[row] = allowedInputVariables.Select(col => dataset.GetDoubleValue(col, row)).ToArray();
270
271        if (Normalization) data = NormalizeData(data);
272
273        state = TSNEStatic<double[]>.CreateState(data, DistanceFunction, random, NewDimensions, Perplexity, Theta,
274          StopLyingIteration, MomentumSwitchIteration, InitialMomentum, FinalMomentum, Eta);
275
276        SetUpResults(data);
277        iter = 0;
278      }
279      for (; iter < MaxIterations && !cancellationToken.IsCancellationRequested; iter++) {
280        if (iter % UpdateInterval == 0)
281          Analyze(state);
282        TSNEStatic<double[]>.Iterate(state);
283      }
284      Analyze(state);
285    }
286
287    #region Events
288    protected override void OnProblemChanged() {
289      base.OnProblemChanged();
290      if (Problem == null) return;
291      OnProblemDataChanged(this, null);
292    }
293
294    protected override void RegisterProblemEvents() {
295      base.RegisterProblemEvents();
296      Problem.ProblemDataChanged += OnProblemDataChanged;
297    }
298    protected override void DeregisterProblemEvents() {
299      base.DeregisterProblemEvents();
300      Problem.ProblemDataChanged -= OnProblemDataChanged;
301    }
302
303    private void OnProblemDataChanged(object sender, EventArgs args) {
304      if (Problem == null || Problem.ProblemData == null) return;
305      if (!Parameters.ContainsKey(ClassesNameParameterName)) return;
306      ClassesNameParameter.ValidValues.Clear();
307      foreach (var input in Problem.ProblemData.InputVariables) ClassesNameParameter.ValidValues.Add(input);
308    }
309
310    #endregion
311
312    #region Helpers
313    private void SetUpResults(IReadOnlyCollection<double[]> data) {
314      if (Results == null) return;
315      var results = Results;
316      dataRowNames = new Dictionary<string, List<int>>();
317      dataRows = new Dictionary<string, ScatterPlotDataRow>();
318      var problemData = Problem.ProblemData;
319
320      //color datapoints acording to classes variable (be it double or string)
321      if (problemData.Dataset.VariableNames.Contains(ClassesName)) {
322        if ((problemData.Dataset as Dataset).VariableHasType<string>(ClassesName)) {
323          var classes = problemData.Dataset.GetStringValues(ClassesName).ToArray();
324          for (var i = 0; i < classes.Length; i++) {
325            if (!dataRowNames.ContainsKey(classes[i])) dataRowNames.Add(classes[i], new List<int>());
326            dataRowNames[classes[i]].Add(i);
327          }
328        } else if ((problemData.Dataset as Dataset).VariableHasType<double>(ClassesName)) {
329          var classValues = problemData.Dataset.GetDoubleValues(ClassesName).ToArray();
330          var max = classValues.Max() + 0.1;
331          var min = classValues.Min() - 0.1;
332          const int contours = 8;
333          for (var i = 0; i < contours; i++) {
334            var contourname = GetContourName(i, min, max, contours);
335            dataRowNames.Add(contourname, new List<int>());
336            dataRows.Add(contourname, new ScatterPlotDataRow(contourname, "", new List<Point2D<double>>()));
337            dataRows[contourname].VisualProperties.Color = GetHeatMapColor(i, contours);
338            dataRows[contourname].VisualProperties.PointSize = i + 3;
339          }
340          for (var i = 0; i < classValues.Length; i++) {
341            dataRowNames[GetContourName(classValues[i], min, max, contours)].Add(i);
342          }
343        }
344      } else {
345        dataRowNames.Add("Training", problemData.TrainingIndices.ToList());
346        dataRowNames.Add("Test", problemData.TestIndices.ToList());
347      }
348
349      if (!results.ContainsKey(IterationResultName)) results.Add(new Result(IterationResultName, new IntValue(0)));
350      else ((IntValue)results[IterationResultName].Value).Value = 0;
351
352      if (!results.ContainsKey(ErrorResultName)) results.Add(new Result(ErrorResultName, new DoubleValue(0)));
353      else ((DoubleValue)results[ErrorResultName].Value).Value = 0;
354
355      if (!results.ContainsKey(ErrorPlotResultName)) results.Add(new Result(ErrorPlotResultName, new DataTable(ErrorPlotResultName, "Development of errors during gradient descent")));
356      else results[ErrorPlotResultName].Value = new DataTable(ErrorPlotResultName, "Development of errors during gradient descent");
357
358      var plot = results[ErrorPlotResultName].Value as DataTable;
359      if (plot == null) throw new ArgumentException("could not create/access error data table in results collection");
360
361      if (!plot.Rows.ContainsKey("errors")) plot.Rows.Add(new DataRow("errors"));
362      plot.Rows["errors"].Values.Clear();
363      plot.Rows["errors"].VisualProperties.StartIndexZero = true;
364
365      results.Add(new Result(ScatterPlotResultName, "Plot of the projected data", new ScatterPlot(DataResultName, "")));
366      results.Add(new Result(DataResultName, "Projected Data", new DoubleMatrix()));
367    }
368
369    private void Analyze(TSNEStatic<double[]>.TSNEState tsneState) {
370      if (Results == null) return;
371      var results = Results;
372      var plot = results[ErrorPlotResultName].Value as DataTable;
373      if (plot == null) throw new ArgumentException("Could not create/access error data table in results collection.");
374      var errors = plot.Rows["errors"].Values;
375      var c = tsneState.EvaluateError();
376      errors.Add(c);
377      ((IntValue)results[IterationResultName].Value).Value = tsneState.iter;
378      ((DoubleValue)results[ErrorResultName].Value).Value = errors.Last();
379
380      var ndata = Normalize(tsneState.newData);
381      results[DataResultName].Value = new DoubleMatrix(ndata);
382      var splot = results[ScatterPlotResultName].Value as ScatterPlot;
383      FillScatterPlot(ndata, splot);
384    }
385
386    private void FillScatterPlot(double[,] lowDimData, ScatterPlot plot) {
387      foreach (var rowName in dataRowNames.Keys) {
388        if (!plot.Rows.ContainsKey(rowName))
389          plot.Rows.Add(dataRows.ContainsKey(rowName) ? dataRows[rowName] : new ScatterPlotDataRow(rowName, "", new List<Point2D<double>>()));
390        plot.Rows[rowName].Points.Replace(dataRowNames[rowName].Select(i => new Point2D<double>(lowDimData[i, 0], lowDimData[i, 1])));
391      }
392    }
393
394    private static double[,] Normalize(double[,] data) {
395      var max = new double[data.GetLength(1)];
396      var min = new double[data.GetLength(1)];
397      var res = new double[data.GetLength(0), data.GetLength(1)];
398      for (var i = 0; i < max.Length; i++) max[i] = min[i] = data[0, i];
399      for (var i = 0; i < data.GetLength(0); i++)
400        for (var j = 0; j < data.GetLength(1); j++) {
401          var v = data[i, j];
402          max[j] = Math.Max(max[j], v);
403          min[j] = Math.Min(min[j], v);
404        }
405      for (var i = 0; i < data.GetLength(0); i++) {
406        for (var j = 0; j < data.GetLength(1); j++) {
407          var d = max[j] - min[j];
408          var s = data[i, j] - (max[j] + min[j]) / 2;  //shift data
409          if (d.IsAlmost(0)) res[i, j] = data[i, j];   //no scaling possible
410          else res[i, j] = s / d;  //scale data
411        }
412      }
413      return res;
414    }
415
416    private static double[][] NormalizeData(IReadOnlyList<double[]> data) {
417      // as in tSNE implementation by van der Maaten
418      var n = data[0].Length;
419      var mean = new double[n];
420      var max = new double[n];
421      var nData = new double[data.Count][];
422      for (var i = 0; i < n; i++) {
423        mean[i] = Enumerable.Range(0, data.Count).Select(x => data[x][i]).Average();
424        max[i] = Enumerable.Range(0, data.Count).Max(x => Math.Abs(data[x][i]));
425      }
426      for (var i = 0; i < data.Count; i++) {
427        nData[i] = new double[n];
428        for (var j = 0; j < n; j++) nData[i][j] = max[j].IsAlmost(0) ? data[i][j] - mean[j] : (data[i][j] - mean[j]) / max[j];
429      }
430      return nData;
431    }
432
433    private static Color GetHeatMapColor(int contourNr, int noContours) {
434      var q = (double)contourNr / noContours;  // q in [0,1]
435      var c = q < 0.5 ? Color.FromArgb((int)(q * 2 * 255), 255, 0) : Color.FromArgb(255, (int)((1 - q) * 2 * 255), 0);
436      return c;
437    }
438
439    private static string GetContourName(double value, double min, double max, int noContours) {
440      var size = (max - min) / noContours;
441      var contourNr = (int)((value - min) / size);
442      return GetContourName(contourNr, min, max, noContours);
443    }
444
445    private static string GetContourName(int i, double min, double max, int noContours) {
446      var size = (max - min) / noContours;
447      return "[" + (min + i * size) + ";" + (min + (i + 1) * size) + ")";
448    }
449    #endregion
450  }
451}
Note: See TracBrowser for help on using the repository browser.