Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/TSNE/TSNEAlgorithm.cs @ 15532

Last change on this file since 15532 was 15532, checked in by bwerth, 6 years ago

#2850 merged Weighted TSNE to trunk

File size: 28.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Drawing;
25using System.Linq;
26using System.Threading;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Optimization;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.PluginInfrastructure;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Random;
37
38namespace HeuristicLab.Algorithms.DataAnalysis {
39  /// <summary>
40  /// t-distributed stochastic neighbourhood embedding (tSNE) projects the data in a low dimensional
41  /// space to allow visual cluster identification.
42  /// </summary>
43  [Item("tSNE", "t-distributed stochastic neighbourhood embedding projects the data in a low " +
44                "dimensional space to allow visual cluster identification. Implemented similar to: https://lvdmaaten.github.io/tsne/#implementations (Barnes-Hut t-SNE). Described in : https://lvdmaaten.github.io/publications/papers/JMLR_2014.pdf")]
45  [Creatable(CreatableAttribute.Categories.DataAnalysis, Priority = 100)]
46  [StorableClass]
47  public sealed class TSNEAlgorithm : BasicAlgorithm {
48    public override bool SupportsPause {
49      get { return true; }
50    }
51    public override Type ProblemType {
52      get { return typeof(IDataAnalysisProblem); }
53    }
54    public new IDataAnalysisProblem Problem {
55      get { return (IDataAnalysisProblem) base.Problem; }
56      set { base.Problem = value; }
57    }
58
59    #region Parameter names
60    private const string DistanceFunctionParameterName = "DistanceFunction";
61    private const string PerplexityParameterName = "Perplexity";
62    private const string ThetaParameterName = "Theta";
63    private const string NewDimensionsParameterName = "Dimensions";
64    private const string MaxIterationsParameterName = "MaxIterations";
65    private const string StopLyingIterationParameterName = "StopLyingIteration";
66    private const string MomentumSwitchIterationParameterName = "MomentumSwitchIteration";
67    private const string InitialMomentumParameterName = "InitialMomentum";
68    private const string FinalMomentumParameterName = "FinalMomentum";
69    private const string EtaParameterName = "Eta";
70    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
71    private const string SeedParameterName = "Seed";
72    private const string ClassesNameParameterName = "ClassesName";
73    private const string NormalizationParameterName = "Normalization";
74    private const string RandomInitializationParameterName = "RandomInitialization";
75    private const string UpdateIntervalParameterName = "UpdateInterval";
76    #endregion
77
78    #region Result names
79    private const string IterationResultName = "Iteration";
80    private const string ErrorResultName = "Error";
81    private const string ErrorPlotResultName = "Error plot";
82    private const string ScatterPlotResultName = "Scatterplot";
83    private const string DataResultName = "Projected data";
84    #endregion
85
86    #region Parameter properties
87    public IFixedValueParameter<DoubleValue> PerplexityParameter {
88      get { return (IFixedValueParameter<DoubleValue>) Parameters[PerplexityParameterName]; }
89    }
90    public IFixedValueParameter<PercentValue> ThetaParameter {
91      get { return (IFixedValueParameter<PercentValue>) Parameters[ThetaParameterName]; }
92    }
93    public IFixedValueParameter<IntValue> NewDimensionsParameter {
94      get { return (IFixedValueParameter<IntValue>) Parameters[NewDimensionsParameterName]; }
95    }
96    public IConstrainedValueParameter<IDistance<double[]>> DistanceFunctionParameter {
97      get { return (IConstrainedValueParameter<IDistance<double[]>>) Parameters[DistanceFunctionParameterName]; }
98    }
99    public IFixedValueParameter<IntValue> MaxIterationsParameter {
100      get { return (IFixedValueParameter<IntValue>) Parameters[MaxIterationsParameterName]; }
101    }
102    public IFixedValueParameter<IntValue> StopLyingIterationParameter {
103      get { return (IFixedValueParameter<IntValue>) Parameters[StopLyingIterationParameterName]; }
104    }
105    public IFixedValueParameter<IntValue> MomentumSwitchIterationParameter {
106      get { return (IFixedValueParameter<IntValue>) Parameters[MomentumSwitchIterationParameterName]; }
107    }
108    public IFixedValueParameter<DoubleValue> InitialMomentumParameter {
109      get { return (IFixedValueParameter<DoubleValue>) Parameters[InitialMomentumParameterName]; }
110    }
111    public IFixedValueParameter<DoubleValue> FinalMomentumParameter {
112      get { return (IFixedValueParameter<DoubleValue>) Parameters[FinalMomentumParameterName]; }
113    }
114    public IFixedValueParameter<DoubleValue> EtaParameter {
115      get { return (IFixedValueParameter<DoubleValue>) Parameters[EtaParameterName]; }
116    }
117    public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter {
118      get { return (IFixedValueParameter<BoolValue>) Parameters[SetSeedRandomlyParameterName]; }
119    }
120    public IFixedValueParameter<IntValue> SeedParameter {
121      get { return (IFixedValueParameter<IntValue>) Parameters[SeedParameterName]; }
122    }
123    public IConstrainedValueParameter<StringValue> ClassesNameParameter {
124      get { return (IConstrainedValueParameter<StringValue>) Parameters[ClassesNameParameterName]; }
125    }
126    public IFixedValueParameter<BoolValue> NormalizationParameter {
127      get { return (IFixedValueParameter<BoolValue>) Parameters[NormalizationParameterName]; }
128    }
129    public IFixedValueParameter<BoolValue> RandomInitializationParameter {
130      get { return (IFixedValueParameter<BoolValue>) Parameters[RandomInitializationParameterName]; }
131    }
132    public IFixedValueParameter<IntValue> UpdateIntervalParameter {
133      get { return (IFixedValueParameter<IntValue>) Parameters[UpdateIntervalParameterName]; }
134    }
135    #endregion
136
137    #region  Properties
138    public IDistance<double[]> DistanceFunction {
139      get { return DistanceFunctionParameter.Value; }
140    }
141    public double Perplexity {
142      get { return PerplexityParameter.Value.Value; }
143      set { PerplexityParameter.Value.Value = value; }
144    }
145    public double Theta {
146      get { return ThetaParameter.Value.Value; }
147      set { ThetaParameter.Value.Value = value; }
148    }
149    public int NewDimensions {
150      get { return NewDimensionsParameter.Value.Value; }
151      set { NewDimensionsParameter.Value.Value = value; }
152    }
153    public int MaxIterations {
154      get { return MaxIterationsParameter.Value.Value; }
155      set { MaxIterationsParameter.Value.Value = value; }
156    }
157    public int StopLyingIteration {
158      get { return StopLyingIterationParameter.Value.Value; }
159      set { StopLyingIterationParameter.Value.Value = value; }
160    }
161    public int MomentumSwitchIteration {
162      get { return MomentumSwitchIterationParameter.Value.Value; }
163      set { MomentumSwitchIterationParameter.Value.Value = value; }
164    }
165    public double InitialMomentum {
166      get { return InitialMomentumParameter.Value.Value; }
167      set { InitialMomentumParameter.Value.Value = value; }
168    }
169    public double FinalMomentum {
170      get { return FinalMomentumParameter.Value.Value; }
171      set { FinalMomentumParameter.Value.Value = value; }
172    }
173    public double Eta {
174      get { return EtaParameter.Value.Value; }
175      set { EtaParameter.Value.Value = value; }
176    }
177    public bool SetSeedRandomly {
178      get { return SetSeedRandomlyParameter.Value.Value; }
179      set { SetSeedRandomlyParameter.Value.Value = value; }
180    }
181    public int Seed {
182      get { return SeedParameter.Value.Value; }
183      set { SeedParameter.Value.Value = value; }
184    }
185    public string ClassesName {
186      get { return ClassesNameParameter.Value != null ? ClassesNameParameter.Value.Value : null; }
187      set { ClassesNameParameter.Value.Value = value; }
188    }
189    public bool Normalization {
190      get { return NormalizationParameter.Value.Value; }
191      set { NormalizationParameter.Value.Value = value; }
192    }
193    public bool RandomInitialization {
194      get { return RandomInitializationParameter.Value.Value; }
195      set { RandomInitializationParameter.Value.Value = value; }
196    }
197    public int UpdateInterval {
198      get { return UpdateIntervalParameter.Value.Value; }
199      set { UpdateIntervalParameter.Value.Value = value; }
200    }
201    #endregion
202
203    #region Storable poperties
204    [Storable]
205    private Dictionary<string, List<int>> dataRowNames;
206    [Storable]
207    private Dictionary<string, ScatterPlotDataRow> dataRows;
208    [Storable]
209    private TSNEStatic<double[]>.TSNEState state;
210    [Storable]
211    private int iter;
212    #endregion
213
214    #region Constructors & Cloning
215    [StorableConstructor]
216    private TSNEAlgorithm(bool deserializing) : base(deserializing) { }
217
218    [StorableHook(HookType.AfterDeserialization)]
219    private void AfterDeserialization() {
220      if (Parameters.ContainsKey(RandomInitializationParameterName))
221        Parameters.Add(new FixedValueParameter<BoolValue>(RandomInitializationParameterName, "Wether data points should be randomly initialized or according to the first 2 dimensions", new BoolValue(true)));
222      RegisterParameterEvents();
223    }
224    private TSNEAlgorithm(TSNEAlgorithm original, Cloner cloner) : base(original, cloner) {
225      if (original.dataRowNames != null)
226        dataRowNames = new Dictionary<string, List<int>>(original.dataRowNames);
227      if (original.dataRows != null)
228        dataRows = original.dataRows.ToDictionary(kvp => kvp.Key, kvp => cloner.Clone(kvp.Value));
229      if (original.state != null)
230        state = cloner.Clone(original.state);
231      iter = original.iter;
232    }
233    public override IDeepCloneable Clone(Cloner cloner) {
234      return new TSNEAlgorithm(this, cloner);
235    }
236    public TSNEAlgorithm() {
237      var distances = new ItemSet<IDistance<double[]>>(ApplicationManager.Manager.GetInstances<IDistance<double[]>>());
238      Parameters.Add(new ConstrainedValueParameter<IDistance<double[]>>(DistanceFunctionParameterName, "The distance function used to differentiate similar from non-similar points", distances, distances.OfType<EuclideanDistance>().FirstOrDefault()));
239      Parameters.Add(new FixedValueParameter<DoubleValue>(PerplexityParameterName, "Perplexity-parameter of tSNE. Comparable to k in a k-nearest neighbour algorithm. Recommended value is floor(number of points /3) or lower", new DoubleValue(25)));
240      Parameters.Add(new FixedValueParameter<PercentValue>(ThetaParameterName, "Value describing how much appoximated " +
241                                                                               "gradients my differ from exact gradients. Set to 0 for exact calculation and in [0,1] otherwise. " +
242                                                                               "Appropriate values for theta are between 0.1 and 0.7 (default = 0.5). CAUTION: exact calculation of " +
243                                                                               "forces requires building a non-sparse N*N matrix where N is the number of data points. This may " +
244                                                                               "exceed memory limitations. The function is designed to run on large (N > 5000) data sets. It may give" +
245                                                                               " poor performance on very small data sets(it is better to use a standard t - SNE implementation on such data).", new PercentValue(0)));
246      Parameters.Add(new FixedValueParameter<IntValue>(NewDimensionsParameterName, "Dimensionality of projected space (usually 2 for easy visual analysis)", new IntValue(2)));
247      Parameters.Add(new FixedValueParameter<IntValue>(MaxIterationsParameterName, "Maximum number of iterations for gradient descent.", new IntValue(1000)));
248      Parameters.Add(new FixedValueParameter<IntValue>(StopLyingIterationParameterName, "Number of iterations after which p is no longer approximated.", new IntValue(0)));
249      Parameters.Add(new FixedValueParameter<IntValue>(MomentumSwitchIterationParameterName, "Number of iterations after which the momentum in the gradient descent is switched.", new IntValue(0)));
250      Parameters.Add(new FixedValueParameter<DoubleValue>(InitialMomentumParameterName, "The initial momentum in the gradient descent.", new DoubleValue(0.5)));
251      Parameters.Add(new FixedValueParameter<DoubleValue>(FinalMomentumParameterName, "The final momentum.", new DoubleValue(0.8)));
252      Parameters.Add(new FixedValueParameter<DoubleValue>(EtaParameterName, "Gradient descent learning rate.", new DoubleValue(10)));
253      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "If the seed should be random.", new BoolValue(true)));
254      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The seed used if it should not be random.", new IntValue(0)));
255      Parameters.Add(new OptionalConstrainedValueParameter<StringValue>(ClassesNameParameterName, "Name of the column specifying the class lables of each data point. If this is not set training/test is used as labels."));
256      Parameters.Add(new FixedValueParameter<BoolValue>(NormalizationParameterName, "Whether the data should be zero centered and have variance of 1 for each variable, so different scalings are ignored.", new BoolValue(true)));
257      Parameters.Add(new FixedValueParameter<IntValue>(UpdateIntervalParameterName, "The interval after which the results will be updated.", new IntValue(50)));
258      Parameters.Add(new FixedValueParameter<BoolValue>(RandomInitializationParameterName, "Wether data points should be randomly initialized or according to the first 2 dimensions", new BoolValue(true)));
259
260      Parameters[UpdateIntervalParameterName].Hidden = true;
261
262      MomentumSwitchIterationParameter.Hidden = true;
263      InitialMomentumParameter.Hidden = true;
264      FinalMomentumParameter.Hidden = true;
265      StopLyingIterationParameter.Hidden = true;
266      EtaParameter.Hidden = false;
267      Problem = new RegressionProblem();
268      RegisterParameterEvents();
269    }
270    #endregion
271
272    public override void Prepare() {
273      base.Prepare();
274      dataRowNames = null;
275      dataRows = null;
276      state = null;
277    }
278
279    protected override void Run(CancellationToken cancellationToken) {
280      var problemData = Problem.ProblemData;
281      // set up and initialize everything if necessary
282      var wdist = DistanceFunction as WeightedEuclideanDistance;
283      if (wdist != null) wdist.Initialize(problemData);
284      if (state == null) {
285        if (SetSeedRandomly) Seed = new System.Random().Next();
286        var random = new MersenneTwister((uint) Seed);
287        var dataset = problemData.Dataset;
288        var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
289        var allindices = Problem.ProblemData.AllIndices.ToArray();
290
291        // jagged array is required to meet the static method declarations of TSNEStatic<T>
292        var data = Enumerable.Range(0, dataset.Rows).Select(x => new double[allowedInputVariables.Length]).ToArray();
293        var col = 0;
294        foreach (var s in allowedInputVariables) {
295          var row = 0;
296          foreach (var d in dataset.GetDoubleValues(s)) {
297            data[row][col] = d;
298            row++;
299          }
300          col++;
301        }
302
303        if (Normalization) data = NormalizeInputData(data);
304        state = TSNEStatic<double[]>.CreateState(data, DistanceFunction, random, NewDimensions, Perplexity, Theta, StopLyingIteration, MomentumSwitchIteration, InitialMomentum, FinalMomentum, Eta, RandomInitialization);
305        SetUpResults(allindices);
306        iter = 0;
307      }
308      for (; iter < MaxIterations && !cancellationToken.IsCancellationRequested; iter++) {
309        if (iter % UpdateInterval == 0) Analyze(state);
310        TSNEStatic<double[]>.Iterate(state);
311      }
312      Analyze(state);
313    }
314
315    #region Events
316    protected override void OnProblemChanged() {
317      base.OnProblemChanged();
318      if (Problem == null) return;
319      OnProblemDataChanged(this, null);
320    }
321
322    protected override void RegisterProblemEvents() {
323      base.RegisterProblemEvents();
324      Problem.ProblemDataChanged += OnProblemDataChanged;
325    }
326
327    protected override void DeregisterProblemEvents() {
328      base.DeregisterProblemEvents();
329      Problem.ProblemDataChanged -= OnProblemDataChanged;
330    }
331
332    protected override void OnStopped() {
333      base.OnStopped();
334      state = null;
335      dataRowNames = null;
336      dataRows = null;
337    }
338
339    private void OnProblemDataChanged(object sender, EventArgs args) {
340      if (Problem == null || Problem.ProblemData == null) return;
341      OnPerplexityChanged(this, null);
342      OnColumnsChanged(this, null);
343      Problem.ProblemData.Changed += OnPerplexityChanged;
344      Problem.ProblemData.Changed += OnColumnsChanged;
345      Problem.ProblemData.Dataset.RowsChanged += OnPerplexityChanged;
346      Problem.ProblemData.Dataset.ColumnsChanged += OnColumnsChanged;
347      if (!Parameters.ContainsKey(ClassesNameParameterName)) return;
348      ClassesNameParameter.ValidValues.Clear();
349      foreach (var input in Problem.ProblemData.InputVariables) ClassesNameParameter.ValidValues.Add(input);
350    }
351
352    private void OnColumnsChanged(object sender, EventArgs e) {
353      if (Problem == null || Problem.ProblemData == null || Problem.ProblemData.Dataset == null || !Parameters.ContainsKey(DistanceFunctionParameterName)) return;
354      DistanceFunctionParameter.ValidValues.OfType<WeightedEuclideanDistance>().Single().AdaptToProblemData(Problem.ProblemData);
355    }
356
357    private void RegisterParameterEvents() {
358      PerplexityParameter.Value.ValueChanged -= OnPerplexityChanged;
359      PerplexityParameter.Value.ValueChanged += OnPerplexityChanged;
360    }
361
362    private void OnPerplexityChanged(object sender, EventArgs e) {
363      if (Problem == null || Problem.ProblemData == null || Problem.ProblemData.Dataset == null || !Parameters.ContainsKey(PerplexityParameterName)) return;
364      PerplexityParameter.Value.ValueChanged -= OnPerplexityChanged;
365      PerplexityParameter.Value.Value = Math.Max(1, Math.Min((Problem.ProblemData.Dataset.Rows - 1) / 3.0, Perplexity));
366      PerplexityParameter.Value.ValueChanged += OnPerplexityChanged;
367    }
368    #endregion
369
370    #region Helpers
371    private void SetUpResults(IReadOnlyList<int> allIndices) {
372      if (Results == null) return;
373      var results = Results;
374      dataRowNames = new Dictionary<string, List<int>>();
375      dataRows = new Dictionary<string, ScatterPlotDataRow>();
376      var problemData = Problem.ProblemData;
377
378      if (!results.ContainsKey(IterationResultName)) results.Add(new Result(IterationResultName, new IntValue(0)));
379      if (!results.ContainsKey(ErrorResultName)) results.Add(new Result(ErrorResultName, new DoubleValue(0)));
380      if (!results.ContainsKey(ScatterPlotResultName)) results.Add(new Result(ScatterPlotResultName, "Plot of the projected data", new ScatterPlot(DataResultName, "")));
381      if (!results.ContainsKey(DataResultName)) results.Add(new Result(DataResultName, "Projected Data", new DoubleMatrix()));
382      if (!results.ContainsKey(ErrorPlotResultName)) {
383        var errortable = new DataTable(ErrorPlotResultName, "Development of errors during gradient descent") {
384          VisualProperties = {
385            XAxisTitle = "UpdateIntervall",
386            YAxisTitle = "Error",
387            YAxisLogScale = true
388          }
389        };
390        errortable.Rows.Add(new DataRow("Errors"));
391        errortable.Rows["Errors"].VisualProperties.StartIndexZero = true;
392        results.Add(new Result(ErrorPlotResultName, errortable));
393      }
394
395      //color datapoints acording to classes variable (be it double or string)
396      if (!problemData.Dataset.VariableNames.Contains(ClassesName)) {
397        dataRowNames.Add("Training", problemData.TrainingIndices.ToList());
398        dataRowNames.Add("Test", problemData.TestIndices.ToList());
399        return;
400      }
401      var classificationData = problemData as ClassificationProblemData;
402      if (classificationData != null && classificationData.TargetVariable.Equals(ClassesName)) {
403        var classNames = classificationData.ClassValues.Zip(classificationData.ClassNames, (v, n) => new {v, n}).ToDictionary(x => x.v, x => x.n);
404        var classes = classificationData.Dataset.GetDoubleValues(classificationData.TargetVariable, allIndices).Select(v => classNames[v]).ToArray();
405        for (var i = 0; i < classes.Length; i++) {
406          if (!dataRowNames.ContainsKey(classes[i])) dataRowNames.Add(classes[i], new List<int>());
407          dataRowNames[classes[i]].Add(i);
408        }
409      }
410      else if (((Dataset) problemData.Dataset).VariableHasType<string>(ClassesName)) {
411        var classes = problemData.Dataset.GetStringValues(ClassesName, allIndices).ToArray();
412        for (var i = 0; i < classes.Length; i++) {
413          if (!dataRowNames.ContainsKey(classes[i])) dataRowNames.Add(classes[i], new List<int>());
414          dataRowNames[classes[i]].Add(i);
415        }
416      }
417      else if (((Dataset) problemData.Dataset).VariableHasType<double>(ClassesName)) {
418        var clusterdata = new Dataset(problemData.Dataset.DoubleVariables, problemData.Dataset.DoubleVariables.Select(v => problemData.Dataset.GetDoubleValues(v, allIndices).ToList()));
419        const int contours = 8;
420        Dictionary<int, string> contourMap;
421        IClusteringModel clusterModel;
422        double[][] borders;
423        CreateClusters(clusterdata, ClassesName, contours, out clusterModel, out contourMap, out borders);
424        var contourorder = borders.Select((x, i) => new {x, i}).OrderBy(x => x.x[0]).Select(x => x.i).ToArray();
425        for (var i = 0; i < contours; i++) {
426          var c = contourorder[i];
427          var contourname = contourMap[c];
428          dataRowNames.Add(contourname, new List<int>());
429          dataRows.Add(contourname, new ScatterPlotDataRow(contourname, "", new List<Point2D<double>>()));
430          dataRows[contourname].VisualProperties.Color = GetHeatMapColor(i, contours);
431        }
432        var allClusters = clusterModel.GetClusterValues(clusterdata, Enumerable.Range(0, clusterdata.Rows)).ToArray();
433        for (var i = 0; i < clusterdata.Rows; i++) dataRowNames[contourMap[allClusters[i] - 1]].Add(i);
434      }
435      else if (((Dataset) problemData.Dataset).VariableHasType<DateTime>(ClassesName)) {
436        var clusterdata = new Dataset(problemData.Dataset.DateTimeVariables, problemData.Dataset.DateTimeVariables.Select(v => problemData.Dataset.GetDoubleValues(v, allIndices).ToList()));
437        const int contours = 8;
438        Dictionary<int, string> contourMap;
439        IClusteringModel clusterModel;
440        double[][] borders;
441        CreateClusters(clusterdata, ClassesName, contours, out clusterModel, out contourMap, out borders);
442        var contourorder = borders.Select((x, i) => new {x, i}).OrderBy(x => x.x[0]).Select(x => x.i).ToArray();
443        for (var i = 0; i < contours; i++) {
444          var c = contourorder[i];
445          var contourname = contourMap[c];
446          dataRowNames.Add(contourname, new List<int>());
447          dataRows.Add(contourname, new ScatterPlotDataRow(contourname, "", new List<Point2D<double>>()));
448          dataRows[contourname].VisualProperties.Color = GetHeatMapColor(i, contours);
449        }
450        var allClusters = clusterModel.GetClusterValues(clusterdata, Enumerable.Range(0, clusterdata.Rows)).ToArray();
451        for (var i = 0; i < clusterdata.Rows; i++) dataRowNames[contourMap[allClusters[i] - 1]].Add(i);
452      }
453      else {
454        dataRowNames.Add("Training", problemData.TrainingIndices.ToList());
455        dataRowNames.Add("Test", problemData.TestIndices.ToList());
456      }
457    }
458
459    private void Analyze(TSNEStatic<double[]>.TSNEState tsneState) {
460      if (Results == null) return;
461      var results = Results;
462      var plot = results[ErrorPlotResultName].Value as DataTable;
463      if (plot == null) throw new ArgumentException("Could not create/access error data table in results collection.");
464      var errors = plot.Rows["Errors"].Values;
465      var c = tsneState.EvaluateError();
466      errors.Add(c);
467      ((IntValue) results[IterationResultName].Value).Value = tsneState.iter;
468      ((DoubleValue) results[ErrorResultName].Value).Value = errors.Last();
469
470      var ndata = NormalizeProjectedData(tsneState.newData);
471      results[DataResultName].Value = new DoubleMatrix(ndata);
472      var splot = results[ScatterPlotResultName].Value as ScatterPlot;
473      FillScatterPlot(ndata, splot);
474    }
475
476    private void FillScatterPlot(double[,] lowDimData, ScatterPlot plot) {
477      foreach (var rowName in dataRowNames.Keys) {
478        if (!plot.Rows.ContainsKey(rowName)) {
479          plot.Rows.Add(dataRows.ContainsKey(rowName) ? dataRows[rowName] : new ScatterPlotDataRow(rowName, "", new List<Point2D<double>>()));
480          plot.Rows[rowName].VisualProperties.PointSize = 8;
481        }
482        plot.Rows[rowName].Points.Replace(dataRowNames[rowName].Select(i => new Point2D<double>(lowDimData[i, 0], lowDimData[i, 1])));
483      }
484    }
485
486    private static double[,] NormalizeProjectedData(double[,] data) {
487      var max = new double[data.GetLength(1)];
488      var min = new double[data.GetLength(1)];
489      var res = new double[data.GetLength(0), data.GetLength(1)];
490      for (var i = 0; i < max.Length; i++) max[i] = min[i] = data[0, i];
491      for (var i = 0; i < data.GetLength(0); i++)
492      for (var j = 0; j < data.GetLength(1); j++) {
493        var v = data[i, j];
494        max[j] = Math.Max(max[j], v);
495        min[j] = Math.Min(min[j], v);
496      }
497      for (var i = 0; i < data.GetLength(0); i++) {
498        for (var j = 0; j < data.GetLength(1); j++) {
499          var d = max[j] - min[j];
500          var s = data[i, j] - (max[j] + min[j]) / 2; //shift data
501          if (d.IsAlmost(0)) res[i, j] = data[i, j]; //no scaling possible
502          else res[i, j] = s / d; //scale data
503        }
504      }
505      return res;
506    }
507
508    private static double[][] NormalizeInputData(IReadOnlyList<IReadOnlyList<double>> data) {
509      // as in tSNE implementation by van der Maaten
510      var n = data[0].Count;
511      var mean = new double[n];
512      var max = new double[n];
513      var nData = new double[data.Count][];
514      for (var i = 0; i < n; i++) {
515        mean[i] = Enumerable.Range(0, data.Count).Select(x => data[x][i]).Average();
516        max[i] = Enumerable.Range(0, data.Count).Max(x => Math.Abs(data[x][i]));
517      }
518      for (var i = 0; i < data.Count; i++) {
519        nData[i] = new double[n];
520        for (var j = 0; j < n; j++) nData[i][j] = max[j].IsAlmost(0) ? data[i][j] - mean[j] : (data[i][j] - mean[j]) / max[j];
521      }
522      return nData;
523    }
524
525    private static Color GetHeatMapColor(int contourNr, int noContours) {
526      return ConvertTotalToRgb(0, noContours, contourNr);
527    }
528
529    private static void CreateClusters(IDataset data, string target, int contours, out IClusteringModel contourCluster, out Dictionary<int, string> contourNames, out double[][] borders) {
530      var cpd = new ClusteringProblemData((Dataset) data, new[] {target});
531      contourCluster = KMeansClustering.CreateKMeansSolution(cpd, contours, 3).Model;
532
533      borders = Enumerable.Range(0, contours).Select(x => new[] {double.MaxValue, double.MinValue}).ToArray();
534      var clusters = contourCluster.GetClusterValues(cpd.Dataset, cpd.AllIndices).ToArray();
535      var targetvalues = cpd.Dataset.GetDoubleValues(target).ToArray();
536      foreach (var i in cpd.AllIndices) {
537        var cl = clusters[i] - 1;
538        var clv = targetvalues[i];
539        if (borders[cl][0] > clv) borders[cl][0] = clv;
540        if (borders[cl][1] < clv) borders[cl][1] = clv;
541      }
542
543      contourNames = new Dictionary<int, string>();
544      for (var i = 0; i < contours; i++)
545        contourNames.Add(i, "[" + borders[i][0] + ";" + borders[i][1] + "]");
546    }
547
548    private static Color ConvertTotalToRgb(double low, double high, double cell) {
549      var colorGradient = ColorGradient.Colors;
550      var range = high - low;
551      var h = Math.Min(cell / range * colorGradient.Count, colorGradient.Count - 1);
552      return colorGradient[(int) h];
553    }
554    #endregion
555  }
556}
Note: See TracBrowser for help on using the repository browser.