Free cookie consent management tool by TermsFeed Policy Generator

Changeset 14788


Ignore:
Timestamp:
03/27/17 17:27:03 (7 years ago)
Author:
gkronber
Message:

#2700: refactoring

Location:
branches/TSNE/HeuristicLab.Algorithms.DataAnalysis/3.4
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • branches/TSNE/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/TSNEInterfaces/ISpacePartitioningTree.cs

    r14785 r14788  
    3333    void GetAllIndices(int[] indices);
    3434    int GetAllIndices(int[] indices, int loc);
    35     void ComputeNonEdgeForces(int pointIndex, double theta, double[] negF, double[] sumQ);
     35    void ComputeNonEdgeForces(int pointIndex, double theta, double[] negF, ref double sumQ);
    3636    void ComputeEdgeForces(int[] rowP, int[] colP, double[] valP, int n, double[,] posF);
    3737
  • branches/TSNE/HeuristicLab.Algorithms.DataAnalysis/3.4/TSNE/SpacePartitioningTree.cs

    r14787 r14788  
    218218    }
    219219
    220     public void ComputeNonEdgeForces(int pointIndex, double theta, double[] negF, double[] sumQ) {
     220    public void ComputeNonEdgeForces(int pointIndex, double theta, double[] negF, ref double sumQ)
     221    {
    221222      // Make sure that we spend no time on empty nodes or self-interactions
    222223      if (cumulativeSize == 0 || (isLeaf && size == 1 && index[0] == pointIndex)) return;
     
    238239        D = 1.0 / (1.0 + D);
    239240        var mult = cumulativeSize * D;
    240         sumQ[0] += mult;
     241        sumQ += mult;
    241242        mult *= D;
    242243        for (var d = 0; d < dimension; d++) negF[d] += mult * buff[d];
     
    244245
    245246        // Recursively apply Barnes-Hut to children
    246         for (var i = 0; i < noChildren; i++) children[i].ComputeNonEdgeForces(pointIndex, theta, negF, sumQ);
     247        for (var i = 0; i < noChildren; i++) children[i].ComputeNonEdgeForces(pointIndex, theta, negF, ref sumQ);
    247248      }
    248249    }
  • branches/TSNE/HeuristicLab.Algorithms.DataAnalysis/3.4/TSNE/TSNEAlgorithm.cs

    r14785 r14788  
    2929using HeuristicLab.Core;
    3030using HeuristicLab.Data;
    31 using HeuristicLab.Encodings.RealVectorEncoding;
    3231using HeuristicLab.Optimization;
    3332using HeuristicLab.Parameters;
     
    7271    private const string ClassesParameterName = "ClassNames";
    7372    private const string NormalizationParameterName = "Normalization";
     73    #endregion
     74
     75    #region result names
     76    private const string IterationResultName = "Iteration";
     77    private const string ErrorResultName = "Error";
     78    private const string ErrorPlotResultName = "Error plot";
     79    private const string ScatterPlotResultName = "Scatterplot";
     80    private const string DataResultName = "Projected data";
    7481    #endregion
    7582
     
    209216    #endregion
    210217
    211     public override void Stop() {
    212       base.Stop();
    213       if (tsne != null) tsne.Running = false;
    214     }
     218    [Storable]
     219    private Dictionary<string, List<int>> dataRowNames;    // TODO
     220    [Storable]
     221    private Dictionary<string, ScatterPlotDataRow> dataRows; // TODO
     222
    215223
    216224    protected override void Run(CancellationToken cancellationToken) {
    217       var dataRowNames = new Dictionary<string, List<int>>();
    218       var rows = new Dictionary<string, ScatterPlotDataRow>();
     225      var problemData = Problem.ProblemData;
     226
     227      // set up and run tSNE
     228      if (SetSeedRandomly) Seed = new System.Random().Next();
     229      var random = new MersenneTwister((uint)Seed);
     230      var dataset = problemData.Dataset;
     231      var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
     232      var data = new double[dataset.Rows][];
     233      for (var row = 0; row < dataset.Rows; row++) data[row] = allowedInputVariables.Select(col => dataset.GetDoubleValue(col, row)).ToArray();
     234      if (Normalization) data = NormalizeData(data);
     235
     236      var tsneState = TSNE<double[]>.CreateState(data, Distance, random, NewDimensions, Perplexity, Theta, StopLyingIteration, MomentumSwitchIteration, InitialMomentum, FinalMomentum, Eta);
     237
     238      SetUpResults(data);
     239      for (int iter = 0; iter < MaxIterations && !cancellationToken.IsCancellationRequested; iter++)
     240      {
     241        TSNE<double[]>.Iterate(tsneState);
     242        Analyze(tsneState);
     243      }
     244    }
     245
     246    private void SetUpResults(IReadOnlyCollection<double[]> data) {
     247      if (Results == null) return;
     248      var results = Results;
     249      dataRowNames = new Dictionary<string, List<int>>();
     250      dataRows = new Dictionary<string, ScatterPlotDataRow>();
    219251      var problemData = Problem.ProblemData;
    220252
     
    235267            var contourname = GetContourName(i, min, max, contours);
    236268            dataRowNames.Add(contourname, new List<int>());
    237             rows.Add(contourname, new ScatterPlotDataRow(contourname, "", new List<Point2D<double>>()));
    238             rows[contourname].VisualProperties.Color = GetHeatMapColor(i, contours);
    239             rows[contourname].VisualProperties.PointSize = i + 3;
     269            dataRows.Add(contourname, new ScatterPlotDataRow(contourname, "", new List<Point2D<double>>()));
     270            dataRows[contourname].VisualProperties.Color = GetHeatMapColor(i, contours);
     271            dataRows[contourname].VisualProperties.PointSize = i + 3;
    240272          }
    241273          for (var i = 0; i < classValues.Length; i++) {
     
    248280      }
    249281
    250       // set up and run tSNE
    251       if (SetSeedRandomly) Seed = new System.Random().Next();
    252       var random = new MersenneTwister((uint)Seed);
    253       tsne = new TSNE<double[]>(Distance, random, Results, MaxIterations, StopLyingIteration, MomentumSwitchIteration, InitialMomentum, FinalMomentum, Eta, dataRowNames, rows);
    254       var dataset = problemData.Dataset;
    255       var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
    256       var data = new double[dataset.Rows][];
    257       for (var row = 0; row < dataset.Rows; row++) data[row] = allowedInputVariables.Select(col => dataset.GetDoubleValue(col, row)).ToArray();
    258       if (Normalization) data = NormalizeData(data);
    259       tsne.Run(data, NewDimensions, Perplexity, Theta);
     282      if (!results.ContainsKey(IterationResultName)) results.Add(new Result(IterationResultName, new IntValue(0)));
     283      else ((IntValue)results[IterationResultName].Value).Value = 0;
     284
     285      if (!results.ContainsKey(ErrorResultName)) results.Add(new Result(ErrorResultName, new DoubleValue(0)));
     286      else ((DoubleValue)results[ErrorResultName].Value).Value = 0;
     287
     288      if (!results.ContainsKey(ErrorPlotResultName)) results.Add(new Result(ErrorPlotResultName, new DataTable(ErrorPlotResultName, "Development of errors during gradient descent")));
     289      else results[ErrorPlotResultName].Value = new DataTable(ErrorPlotResultName, "Development of errors during gradient descent");
     290
     291      var plot = results[ErrorPlotResultName].Value as DataTable;
     292      if (plot == null) throw new ArgumentException("could not create/access error data table in results collection");
     293
     294      if (!plot.Rows.ContainsKey("errors")) plot.Rows.Add(new DataRow("errors"));
     295      plot.Rows["errors"].Values.Clear();
     296
     297      results.Add(new Result(ScatterPlotResultName, "Plot of the projected data", new ScatterPlot(DataResultName, "")));
     298      results.Add(new Result(DataResultName, "Projected Data", new DoubleMatrix()));
     299    }
     300
     301    private void Analyze(TSNE<double[]>.TSNEState tsneState) {
     302      if (Results == null) return;
     303      var results = Results;
     304      var plot = results[ErrorPlotResultName].Value as DataTable;
     305      if (plot == null) throw new ArgumentException("Could not create/access error data table in results collection.");
     306      var errors = plot.Rows["errors"].Values;
     307      var c = tsneState.EvaluateError();
     308      errors.Add(c);
     309      ((IntValue)results[IterationResultName].Value).Value = tsneState.iter + 1;
     310      ((DoubleValue)results[ErrorResultName].Value).Value = errors.Last();
     311
     312      var ndata = Normalize(tsneState.newData);
     313      results[DataResultName].Value = new DoubleMatrix(ndata);
     314      var splot = results[ScatterPlotResultName].Value as ScatterPlot;
     315      FillScatterPlot(ndata, splot);
     316    }
     317
     318    private void FillScatterPlot(double[,] lowDimData, ScatterPlot plot) {
     319      foreach (var rowName in dataRowNames.Keys) {
     320        if (!plot.Rows.ContainsKey(rowName))
     321          plot.Rows.Add(dataRows.ContainsKey(rowName) ? dataRows[rowName] : new ScatterPlotDataRow(rowName, "", new List<Point2D<double>>()));
     322        plot.Rows[rowName].Points.Replace(dataRowNames[rowName].Select(i => new Point2D<double>(lowDimData[i, 0], lowDimData[i, 1])));
     323      }
     324    }
     325
     326    private static double[,] Normalize(double[,] data) {
     327      var max = new double[data.GetLength(1)];
     328      var min = new double[data.GetLength(1)];
     329      var res = new double[data.GetLength(0), data.GetLength(1)];
     330      for (var i = 0; i < max.Length; i++) max[i] = min[i] = data[0, i];
     331      for (var i = 0; i < data.GetLength(0); i++)
     332        for (var j = 0; j < data.GetLength(1); j++) {
     333          var v = data[i, j];
     334          max[j] = Math.Max(max[j], v);
     335          min[j] = Math.Min(min[j], v);
     336        }
     337      for (var i = 0; i < data.GetLength(0); i++) {
     338        for (var j = 0; j < data.GetLength(1); j++) {
     339          res[i, j] = (data[i, j] - (max[j] + min[j]) / 2) / (max[j] - min[j]);
     340        }
     341      }
     342      return res;
    260343    }
    261344
     
    276359      return nData;
    277360    }
     361
    278362    private static Color GetHeatMapColor(int contourNr, int noContours) {
    279363      var q = (double)contourNr / noContours;  // q in [0,1]
     
    281365      return c;
    282366    }
     367
    283368    private static string GetContourName(double value, double min, double max, int noContours) {
    284369      var size = (max - min) / noContours;
     
    286371      return GetContourName(contourNr, min, max, noContours);
    287372    }
     373
    288374    private static string GetContourName(int i, double min, double max, int noContours) {
    289375      var size = (max - min) / noContours;
  • branches/TSNE/HeuristicLab.Algorithms.DataAnalysis/3.4/TSNE/TSNEStatic.cs

    r14785 r14788  
    5757using System.Collections.Generic;
    5858using System.Linq;
    59 using HeuristicLab.Analysis;
    6059using HeuristicLab.Collections;
    6160using HeuristicLab.Common;
    6261using HeuristicLab.Core;
    63 using HeuristicLab.Data;
    64 using HeuristicLab.Optimization;
    6562using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    6663using HeuristicLab.Random;
     
    6865namespace HeuristicLab.Algorithms.DataAnalysis {
    6966  [StorableClass]
    70   public class TSNE<T> : DeepCloneable /*where T : class, IDeepCloneable*/ {
    71 
    72     private const string IterationResultName = "Iteration";
    73     private const string ErrorResultName = "Error";
    74     private const string ErrorPlotResultName = "ErrorPlot";
    75     private const string ScatterPlotResultName = "Scatterplot";
    76     private const string DataResultName = "Projected Data";
    77 
    78     #region Properties
    79     [Storable]
    80     private IDistance<T> distance;
    81     [Storable]
    82     private int maxIter;
    83     [Storable]
    84     private int stopLyingIter;
    85     [Storable]
    86     private int momSwitchIter;
    87     [Storable]
    88     double momentum;
    89     [Storable]
    90     private double finalMomentum;
    91     [Storable]
    92     private double eta;
    93     [Storable]
    94     private IRandom random;
    95     [Storable]
    96     private ResultCollection results;
    97     [Storable]
    98     private Dictionary<string, List<int>> dataRowLookup;
    99     [Storable]
    100     private Dictionary<string, ScatterPlotDataRow> dataRows;
    101     #endregion
    102 
    103     #region Stopping
    104     public volatile bool Running;      // TODO
    105     #endregion
    106 
    107     #region HLConstructors & Cloning
    108     [StorableConstructor]
    109     protected TSNE(bool deserializing) { }
    110     protected TSNE(TSNE<T> original, Cloner cloner) : base(original, cloner) {
    111       distance = cloner.Clone(original.distance);
    112       maxIter = original.maxIter;
    113       stopLyingIter = original.stopLyingIter;
    114       momSwitchIter = original.momSwitchIter;
    115       momentum = original.momentum;
    116       finalMomentum = original.finalMomentum;
    117       eta = original.eta;
    118       random = cloner.Clone(random);
    119       results = cloner.Clone(results);
    120       dataRowLookup = original.dataRowLookup.ToDictionary(entry => entry.Key, entry => entry.Value.Select(x => x).ToList());
    121       dataRows = original.dataRows.ToDictionary(entry => entry.Key, entry => cloner.Clone(entry.Value));
     67  public class TSNE<T> {
     68
     69    [StorableClass]
     70    public sealed class TSNEState : DeepCloneable {
     71      // initialized once
     72      public IDistance<T> distance;
     73      public IRandom random;
     74      public double perplexity;
     75      public bool exact;
     76      public int noDatapoints;
     77      public double finalMomentum;
     78      public int momSwitchIter;
     79      public int stopLyingIter;
     80      public double theta;
     81      public double eta;
     82      public int newDimensions;
     83
     84      // for approximate version: sparse representation of similarity/distance matrix
     85      public double[] valP; // similarity/distance
     86      public int[] rowP; // row index
     87      public int[] colP; // col index
     88
     89      // for exact version: dense representation of distance/similarity matrix
     90      public double[,] p;
     91
     92      // mapped data
     93      public double[,] newData;
     94
     95      public int iter;
     96      public double currentMomentum;
     97
     98      // helper variables (updated in each iteration)
     99      public double[,] gains;
     100      public double[,] uY;
     101      public double[,] dY;
     102
     103      private TSNEState(TSNEState original, Cloner cloner) : base(original, cloner) {
     104      }
     105      public override IDeepCloneable Clone(Cloner cloner) {
     106        return new TSNEState(this, cloner);
     107      }
     108
     109      public TSNEState(T[] data, IDistance<T> distance, IRandom random, int newDimensions, double perplexity, double theta, int stopLyingIter, int momSwitchIter, double momentum, double finalMomentum, double eta) {
     110        this.distance = distance;
     111        this.random = random;
     112        this.newDimensions = newDimensions;
     113        this.perplexity = perplexity;
     114        this.theta = theta;
     115        this.stopLyingIter = stopLyingIter;
     116        this.momSwitchIter = momSwitchIter;
     117        this.currentMomentum = momentum;
     118        this.finalMomentum = finalMomentum;
     119        this.eta = eta;
     120
     121
     122        // initialize
     123        noDatapoints = data.Length;
     124        if (noDatapoints - 1 < 3 * perplexity) throw new ArgumentException("Perplexity too large for the number of data points!");
     125
     126        exact = Math.Abs(theta) < double.Epsilon;
     127        newData = new double[noDatapoints, newDimensions];
     128        dY = new double[noDatapoints, newDimensions];
     129        uY = new double[noDatapoints, newDimensions];
     130        gains = new double[noDatapoints, newDimensions];
     131        for (var i = 0; i < noDatapoints; i++)
     132          for (var j = 0; j < newDimensions; j++)
     133            gains[i, j] = 1.0;
     134
     135        p = null;
     136        rowP = null;
     137        colP = null;
     138        valP = null;
     139
     140        //Calculate Similarities
     141        if (exact) p = CalculateExactSimilarites(data, distance, perplexity);
     142        else CalculateApproximateSimilarities(data, distance, perplexity, out rowP, out colP, out valP);
     143
     144        // Lie about the P-values
     145        if (exact) for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < noDatapoints; j++) p[i, j] *= 12.0;
     146        else for (var i = 0; i < rowP[noDatapoints]; i++) valP[i] *= 12.0;
     147
     148        // Initialize solution (randomly)
     149        var rand = new NormalDistributedRandom(random, 0, 1);
     150        for (var i = 0; i < noDatapoints; i++)
     151          for (var j = 0; j < newDimensions; j++)
     152            newData[i, j] = rand.NextDouble() * .0001;  // TODO const
     153      }
     154
     155      public double EvaluateError() {
     156        return exact ? EvaluateErrorExact(p, newData, noDatapoints, newDimensions) : EvaluateErrorApproximate(rowP, colP, valP, newData, theta);
     157      }
     158
     159      private static void CalculateApproximateSimilarities(T[] data, IDistance<T> distance, double perplexity, out int[] rowP, out int[] colP, out double[] valP) {
     160        // Compute asymmetric pairwise input similarities
     161        ComputeGaussianPerplexity(data, distance, out rowP, out colP, out valP, perplexity, (int)(3 * perplexity));        // TODO: why 3?
     162        // Symmetrize input similarities
     163        int[] sRowP, symColP;
     164        double[] sValP;
     165        SymmetrizeMatrix(rowP, colP, valP, out sRowP, out symColP, out sValP);
     166        rowP = sRowP;
     167        colP = symColP;
     168        valP = sValP;
     169        var sumP = .0;
     170        for (var i = 0; i < rowP[data.Length]; i++) sumP += valP[i];
     171        for (var i = 0; i < rowP[data.Length]; i++) valP[i] /= sumP;
     172      }
     173      private static double[,] CalculateExactSimilarites(T[] data, IDistance<T> distance, double perplexity) {
     174        // Compute similarities
     175        var p = new double[data.Length, data.Length];
     176        ComputeGaussianPerplexity(data, distance, p, perplexity);
     177        // Symmetrize input similarities
     178        for (var n = 0; n < data.Length; n++) {
     179          for (var m = n + 1; m < data.Length; m++) {
     180            p[n, m] += p[m, n];
     181            p[m, n] = p[n, m];
     182          }
     183        }
     184        var sumP = .0;
     185        for (var i = 0; i < data.Length; i++) for (var j = 0; j < data.Length; j++) sumP += p[i, j];
     186        for (var i = 0; i < data.Length; i++) for (var j = 0; j < data.Length; j++) p[i, j] /= sumP;
     187        return p;
     188      }
     189
     190      private static void ComputeGaussianPerplexity(IReadOnlyList<T> x, IDistance<T> distance, out int[] rowP, out int[] colP, out double[] valP, double perplexity, int k) {
     191        if (perplexity > k) throw new ArgumentException("Perplexity should be lower than k!");
     192
     193        int n = x.Count;
     194        // Allocate the memory we need
     195        rowP = new int[n + 1];
     196        colP = new int[n * k];
     197        valP = new double[n * k];
     198        var curP = new double[n - 1];
     199        rowP[0] = 0;
     200        for (var i = 0; i < n; i++) rowP[i + 1] = rowP[i] + k;
     201
     202        var objX = new List<IndexedItem<T>>();
     203        for (var i = 0; i < n; i++) objX.Add(new IndexedItem<T>(i, x[i]));
     204
     205        // Build ball tree on data set
     206        var tree = new VantagePointTree<IndexedItem<T>>(new IndexedItemDistance<T>(distance), objX);           // do we really want to re-create the tree on each call?
     207
     208        // Loop over all points to find nearest neighbors
     209        for (var i = 0; i < n; i++) {
     210          IList<IndexedItem<T>> indices;
     211          IList<double> distances;
     212
     213          // Find nearest neighbors
     214          tree.Search(objX[i], k + 1, out indices, out distances);
     215
     216          // Initialize some variables for binary search
     217          var found = false;
     218          var beta = 1.0;
     219          var minBeta = double.MinValue;
     220          var maxBeta = double.MaxValue;
     221          const double tol = 1e-5;  // TODO: why 1e-5?
     222
     223          // Iterate until we found a good perplexity
     224          var iter = 0; double sumP = 0;
     225          while (!found && iter < 200) {
     226
     227            // Compute Gaussian kernel row
     228            for (var m = 0; m < k; m++) curP[m] = Math.Exp(-beta * distances[m + 1]);
     229
     230            // Compute entropy of current row
     231            sumP = double.Epsilon;
     232            for (var m = 0; m < k; m++) sumP += curP[m];
     233            var h = .0;
     234            for (var m = 0; m < k; m++) h += beta * (distances[m + 1] * curP[m]);
     235            h = h / sumP + Math.Log(sumP);
     236
     237            // Evaluate whether the entropy is within the tolerance level
     238            var hdiff = h - Math.Log(perplexity);
     239            if (hdiff < tol && -hdiff < tol) {
     240              found = true;
     241            } else {
     242              if (hdiff > 0) {
     243                minBeta = beta;
     244                if (maxBeta.IsAlmost(double.MaxValue) || maxBeta.IsAlmost(double.MinValue))
     245                  beta *= 2.0;
     246                else
     247                  beta = (beta + maxBeta) / 2.0;
     248              } else {
     249                maxBeta = beta;
     250                if (minBeta.IsAlmost(double.MinValue) || minBeta.IsAlmost(double.MaxValue))
     251                  beta /= 2.0;
     252                else
     253                  beta = (beta + minBeta) / 2.0;
     254              }
     255            }
     256
     257            // Update iteration counter
     258            iter++;
     259          }
     260
     261          // Row-normalize current row of P and store in matrix
     262          for (var m = 0; m < k; m++) curP[m] /= sumP;
     263          for (var m = 0; m < k; m++) {
     264            colP[rowP[i] + m] = indices[m + 1].Index;
     265            valP[rowP[i] + m] = curP[m];
     266          }
     267        }
     268      }
     269      private static void ComputeGaussianPerplexity(T[] x, IDistance<T> distance, double[,] p, double perplexity) {
     270        // Compute the distance matrix
     271        var dd = ComputeDistances(x, distance);
     272
     273        int n = x.Length;
     274        // Compute the Gaussian kernel row by row
     275        for (var i = 0; i < n; i++) {
     276          // Initialize some variables
     277          var found = false;
     278          var beta = 1.0;
     279          var minBeta = -double.MaxValue;
     280          var maxBeta = double.MaxValue;
     281          const double tol = 1e-5;
     282          double sumP = 0;
     283
     284          // Iterate until we found a good perplexity
     285          var iter = 0;
     286          while (!found && iter < 200) {       // TODO constant
     287
     288            // Compute Gaussian kernel row
     289            for (var m = 0; m < n; m++) p[i, m] = Math.Exp(-beta * dd[i][m]);
     290            p[i, i] = double.Epsilon;
     291
     292            // Compute entropy of current row
     293            sumP = double.Epsilon;
     294            for (var m = 0; m < n; m++) sumP += p[i, m];
     295            var h = 0.0;
     296            for (var m = 0; m < n; m++) h += beta * (dd[i][m] * p[i, m]);
     297            h = h / sumP + Math.Log(sumP);
     298
     299            // Evaluate whether the entropy is within the tolerance level
     300            var hdiff = h - Math.Log(perplexity);
     301            if (hdiff < tol && -hdiff < tol) {
     302              found = true;
     303            } else {
     304              if (hdiff > 0) {
     305                minBeta = beta;
     306                if (maxBeta.IsAlmost(double.MaxValue) || maxBeta.IsAlmost(double.MinValue))
     307                  beta *= 2.0;
     308                else
     309                  beta = (beta + maxBeta) / 2.0;
     310              } else {
     311                maxBeta = beta;
     312                if (minBeta.IsAlmost(double.MinValue) || minBeta.IsAlmost(double.MaxValue))
     313                  beta /= 2.0;
     314                else
     315                  beta = (beta + minBeta) / 2.0;
     316              }
     317            }
     318
     319            // Update iteration counter
     320            iter++;
     321          }
     322
     323          // Row normalize P
     324          for (var m = 0; m < n; m++) p[i, m] /= sumP;
     325        }
     326      }
     327
     328      private static double[][] ComputeDistances(T[] x, IDistance<T> distance) {
     329        return x.Select(m => x.Select(n => distance.Get(m, n)).ToArray()).ToArray();
     330      }
     331
     332
     333
     334      private static double EvaluateErrorExact(double[,] p, double[,] y, int n, int d) {
     335        // Compute the squared Euclidean distance matrix
     336        var dd = new double[n, n];
     337        var q = new double[n, n];
     338        ComputeSquaredEuclideanDistance(y, n, d, dd);
     339
     340        // Compute Q-matrix and normalization sum
     341        var sumQ = double.Epsilon;
     342        for (var n1 = 0; n1 < n; n1++) {
     343          for (var m = 0; m < n; m++) {
     344            if (n1 != m) {
     345              q[n1, m] = 1 / (1 + dd[n1, m]);
     346              sumQ += q[n1, m];
     347            } else q[n1, m] = double.Epsilon;
     348          }
     349        }
     350        for (var i = 0; i < n; i++) for (var j = 0; j < n; j++) q[i, j] /= sumQ;
     351
     352        // Sum t-SNE error
     353        var c = .0;
     354        for (var i = 0; i < n; i++)
     355          for (var j = 0; j < n; j++) {
     356            c += p[i, j] * Math.Log((p[i, j] + float.Epsilon) / (q[i, j] + float.Epsilon));
     357          }
     358        return c;
     359      }
     360      private static double EvaluateErrorApproximate(IReadOnlyList<int> rowP, IReadOnlyList<int> colP, IReadOnlyList<double> valP, double[,] y, double theta) {
     361        // Get estimate of normalization term
     362        var n = y.GetLength(0);
     363        var d = y.GetLength(1);
     364        var tree = new SpacePartitioningTree(y);
     365        var buff = new double[d];
     366        double sumQ = 0.0;
     367        for (var i = 0; i < n; i++) tree.ComputeNonEdgeForces(i, theta, buff, ref sumQ);
     368
     369        // Loop over all edges to compute t-SNE error
     370        var c = .0;
     371        for (var k = 0; k < n; k++) {
     372          for (var i = rowP[k]; i < rowP[k + 1]; i++) {
     373            var q = .0;
     374            for (var j = 0; j < d; j++) buff[j] = y[k, j];
     375            for (var j = 0; j < d; j++) buff[j] -= y[colP[i], j];
     376            for (var j = 0; j < d; j++) q += buff[j] * buff[j];
     377            q = 1.0 / (1.0 + q) / sumQ;
     378            c += valP[i] * Math.Log((valP[i] + float.Epsilon) / (q + float.Epsilon));
     379          }
     380        }
     381        return c;
     382      }
     383      private static void SymmetrizeMatrix(IReadOnlyList<int> rowP, IReadOnlyList<int> colP, IReadOnlyList<double> valP, out int[] symRowP, out int[] symColP, out double[] symValP) {
     384
     385        // Count number of elements and row counts of symmetric matrix
     386        var n = rowP.Count - 1;
     387        var rowCounts = new int[n];
     388        for (var j = 0; j < n; j++) {
     389          for (var i = rowP[j]; i < rowP[j + 1]; i++) {
     390
     391            // Check whether element (col_P[i], n) is present
     392            var present = false;
     393            for (var m = rowP[colP[i]]; m < rowP[colP[i] + 1]; m++) {
     394              if (colP[m] == j) present = true;
     395            }
     396            if (present) rowCounts[j]++;
     397            else {
     398              rowCounts[j]++;
     399              rowCounts[colP[i]]++;
     400            }
     401          }
     402        }
     403        var noElem = 0;
     404        for (var i = 0; i < n; i++) noElem += rowCounts[i];
     405
     406        // Allocate memory for symmetrized matrix
     407        symRowP = new int[n + 1];
     408        symColP = new int[noElem];
     409        symValP = new double[noElem];
     410
     411        // Construct new row indices for symmetric matrix
     412        symRowP[0] = 0;
     413        for (var i = 0; i < n; i++) symRowP[i + 1] = symRowP[i] + rowCounts[i];
     414
     415        // Fill the result matrix
     416        var offset = new int[n];
     417        for (var j = 0; j < n; j++) {
     418          for (var i = rowP[j]; i < rowP[j + 1]; i++) {                                  // considering element(n, colP[i])
     419
     420            // Check whether element (col_P[i], n) is present
     421            var present = false;
     422            for (var m = rowP[colP[i]]; m < rowP[colP[i] + 1]; m++) {
     423              if (colP[m] != j) continue;
     424              present = true;
     425              if (j > colP[i]) continue; // make sure we do not add elements twice
     426              symColP[symRowP[j] + offset[j]] = colP[i];
     427              symColP[symRowP[colP[i]] + offset[colP[i]]] = j;
     428              symValP[symRowP[j] + offset[j]] = valP[i] + valP[m];
     429              symValP[symRowP[colP[i]] + offset[colP[i]]] = valP[i] + valP[m];
     430            }
     431
     432            // If (colP[i], n) is not present, there is no addition involved
     433            if (!present) {
     434              symColP[symRowP[j] + offset[j]] = colP[i];
     435              symColP[symRowP[colP[i]] + offset[colP[i]]] = j;
     436              symValP[symRowP[j] + offset[j]] = valP[i];
     437              symValP[symRowP[colP[i]] + offset[colP[i]]] = valP[i];
     438            }
     439
     440            // Update offsets
     441            if (present && (j > colP[i])) continue;
     442            offset[j]++;
     443            if (colP[i] != j) offset[colP[i]]++;
     444          }
     445        }
     446
     447        // Divide the result by two
     448        for (var i = 0; i < noElem; i++) symValP[i] /= 2.0;
     449      }
     450
    122451    }
    123     public override IDeepCloneable Clone(Cloner cloner) { return new TSNE<T>(this, cloner); }
    124     public TSNE(IDistance<T> distance, IRandom random, ResultCollection results = null, int maxIter = 1000, int stopLyingIter = 250, int momSwitchIter = 250, double momentum = .5, double finalMomentum = .8, double eta = 200.0, Dictionary<string, List<int>> dataRowLookup = null, Dictionary<string, ScatterPlotDataRow> dataRows = null) {
    125       this.distance = distance;
    126       this.maxIter = maxIter;
    127       this.stopLyingIter = stopLyingIter;
    128       this.momSwitchIter = momSwitchIter;
    129       this.momentum = momentum;
    130       this.finalMomentum = finalMomentum;
    131       this.eta = eta;
    132       this.random = random;
    133       this.results = results;
    134       this.dataRowLookup = dataRowLookup;
    135       if (dataRows != null) this.dataRows = dataRows;
    136       else { this.dataRows = new Dictionary<string, ScatterPlotDataRow>(); }
     452
     453    public static TSNEState CreateState(T[] data, IDistance<T> distance, IRandom random, int newDimensions = 2, double perplexity = 25, double theta = 0,
     454      int stopLyingIter = 250, int momSwitchIter = 250, double momentum = .5, double finalMomentum = .8, double eta = 200.0
     455      ) {
     456      return new TSNEState(data, distance, random, newDimensions, perplexity, theta, stopLyingIter, momSwitchIter, momentum, finalMomentum, eta);
    137457    }
    138     #endregion
    139 
    140     public double[,] Run(T[] data, int newDimensions, double perplexity, double theta) {
    141       var currentMomentum = momentum;
    142       var noDatapoints = data.Length;
    143       if (noDatapoints - 1 < 3 * perplexity) throw new ArgumentException("Perplexity too large for the number of data points!");
    144       SetUpResults(data);
    145       Running = true;
    146       var exact = Math.Abs(theta) < double.Epsilon;
    147       var newData = new double[noDatapoints, newDimensions];
    148       var dY = new double[noDatapoints, newDimensions];
    149       var uY = new double[noDatapoints, newDimensions];
    150       var gains = new double[noDatapoints, newDimensions];
    151       for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < newDimensions; j++) gains[i, j] = 1.0;
    152       double[,] p = null;
    153       int[] rowP = null;
    154       int[] colP = null;
    155       double[] valP = null;
    156       var rand = new NormalDistributedRandom(random, 0, 1);
    157 
    158       //Calculate Similarities
    159       if (exact) p = CalculateExactSimilarites(data, perplexity);
    160       else CalculateApproximateSimilarities(data, perplexity, out rowP, out colP, out valP);
    161 
    162       // Lie about the P-values
    163       if (exact) for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < noDatapoints; j++) p[i, j] *= 12.0;
    164       else for (var i = 0; i < rowP[noDatapoints]; i++) valP[i] *= 12.0;
    165 
    166       // Initialize solution (randomly)
    167       for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < newDimensions; j++) newData[i, j] = rand.NextDouble() * .0001;  // TODO const
    168 
    169       // Perform main training loop
    170       for (var iter = 0; iter < maxIter && Running; iter++) {
    171         if (exact) ComputeExactGradient(p, newData, noDatapoints, newDimensions, dY);
    172         else ComputeApproximateGradient(rowP, colP, valP, newData, noDatapoints, newDimensions, dY, theta);
    173         // Update gains
    174         for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < newDimensions; j++) gains[i, j] = Math.Sign(dY[i, j]) != Math.Sign(uY[i, j]) ? gains[i, j] + .2 : gains[i, j] * .8;
    175         for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < newDimensions; j++) if (gains[i, j] < .01) gains[i, j] = .01;
    176         // Perform gradient update (with momentum and gains)
    177         for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < newDimensions; j++) uY[i, j] = currentMomentum * uY[i, j] - eta * gains[i, j] * dY[i, j];
    178         for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < newDimensions; j++) newData[i, j] = newData[i, j] + uY[i, j];
    179         // Make solution zero-mean
    180         ZeroMean(newData);
    181         // Stop lying about the P-values after a while, and switch momentum
    182         if (iter == stopLyingIter) {
    183           if (exact) for (var i = 0; i < noDatapoints; i++) for (var j = 0; j < noDatapoints; j++) p[i, j] /= 12.0;
    184           else for (var i = 0; i < rowP[noDatapoints]; i++) valP[i] /= 12.0;
    185         }
    186         if (iter == momSwitchIter) currentMomentum = finalMomentum;
    187 
    188         Analyze(exact, iter, p, rowP, colP, valP, newData, noDatapoints, newDimensions, theta);
    189       }
    190       return newData;
     458
     459
     460    public static double[,] Iterate(TSNEState state) {
     461      if (state.exact)
     462        ComputeExactGradient(state.p, state.newData, state.noDatapoints, state.newDimensions, state.dY);
     463      else
     464        ComputeApproximateGradient(state.rowP, state.colP, state.valP, state.newData, state.noDatapoints, state.newDimensions, state.dY, state.theta);
     465
     466      // Update gains
     467      for (var i = 0; i < state.noDatapoints; i++) {
     468        for (var j = 0; j < state.newDimensions; j++) {
     469          state.gains[i, j] = Math.Sign(state.dY[i, j]) != Math.Sign(state.uY[i, j])
     470            ? state.gains[i, j] + .2
     471            : state.gains[i, j] * .8; // 20% up or 20% down // TODO: +0.2?!
     472
     473          if (state.gains[i, j] < .01) state.gains[i, j] = .01;
     474        }
     475      }
     476
     477
     478      // Perform gradient update (with momentum and gains)
     479      for (var i = 0; i < state.noDatapoints; i++)
     480        for (var j = 0; j < state.newDimensions; j++)
     481          state.uY[i, j] = state.currentMomentum * state.uY[i, j] - state.eta * state.gains[i, j] * state.dY[i, j];
     482
     483      for (var i = 0; i < state.noDatapoints; i++)
     484        for (var j = 0; j < state.newDimensions; j++)
     485          state.newData[i, j] = state.newData[i, j] + state.uY[i, j];
     486
     487      // Make solution zero-mean
     488      ZeroMean(state.newData);
     489      // Stop lying about the P-values after a while, and switch momentum
     490
     491      if (state.iter == state.stopLyingIter) {
     492        if (state.exact)
     493          for (var i = 0; i < state.noDatapoints; i++) for (var j = 0; j < state.noDatapoints; j++) state.p[i, j] /= 12.0;                                   //XXX why 12?
     494        else
     495          for (var i = 0; i < state.rowP[state.noDatapoints]; i++) state.valP[i] /= 12.0;                       // XXX are we not scaling all values?
     496      }
     497
     498      if (state.iter == state.momSwitchIter)
     499        state.currentMomentum = state.finalMomentum;
     500
     501      state.iter++;
     502      return state.newData;
    191503    }
    192504
    193     #region helpers
    194 
    195     private void SetUpResults(IReadOnlyCollection<T> data) {
    196       if (dataRowLookup == null) dataRowLookup = new Dictionary<string, List<int>> { { "Data", Enumerable.Range(0, data.Count).ToList() } };
    197       if (results == null) return;
    198 
    199       if (!results.ContainsKey(IterationResultName)) results.Add(new Result(IterationResultName, new IntValue(0)));
    200       else ((IntValue)results[IterationResultName].Value).Value = 0;
    201 
    202       if (!results.ContainsKey(ErrorResultName)) results.Add(new Result(ErrorResultName, new DoubleValue(0)));
    203       else ((DoubleValue)results[ErrorResultName].Value).Value = 0;
    204 
    205       if (!results.ContainsKey(ErrorPlotResultName)) results.Add(new Result(ErrorPlotResultName, new DataTable(ErrorPlotResultName, "Development of errors during gradient descent")));
    206       else results[ErrorPlotResultName].Value = new DataTable(ErrorPlotResultName, "Development of errors during gradient descent");
    207 
    208       var plot = results[ErrorPlotResultName].Value as DataTable;
    209       if (plot == null) throw new ArgumentException("could not create/access error data table in results collection");
    210 
    211       if (!plot.Rows.ContainsKey("errors")) plot.Rows.Add(new DataRow("errors"));
    212       plot.Rows["errors"].Values.Clear();
    213 
    214       results.Add(new Result(ScatterPlotResultName, "Plot of the projected data", new ScatterPlot(DataResultName, "")));
    215       results.Add(new Result(DataResultName, "Projected Data", new DoubleMatrix()));
    216 
    217     }
    218     private void Analyze(bool exact, int iter, double[,] p, int[] rowP, int[] colP, double[] valP, double[,] newData, int noDatapoints, int newDimensions, double theta) {
    219       if (results == null) return;
    220       var plot = results[ErrorPlotResultName].Value as DataTable;
    221       if (plot == null) throw new ArgumentException("Could not create/access error data table in results collection. Was it removed by some effect?");
    222       var errors = plot.Rows["errors"].Values;
    223       var c = exact
    224         ? EvaluateErrorExact(p, newData, noDatapoints, newDimensions)
    225         : EvaluateErrorApproximate(rowP, colP, valP, newData, theta);
    226       errors.Add(c);
    227       ((IntValue)results[IterationResultName].Value).Value = iter + 1;
    228       ((DoubleValue)results[ErrorResultName].Value).Value = errors.Last();
    229 
    230       var ndata = Normalize(newData);
    231       results[DataResultName].Value = new DoubleMatrix(ndata);
    232       var splot = results[ScatterPlotResultName].Value as ScatterPlot;
    233       FillScatterPlot(ndata, splot);
    234 
    235 
    236     }
    237     private void FillScatterPlot(double[,] lowDimData, ScatterPlot plot) {
    238       foreach (var rowName in dataRowLookup.Keys) {
    239         if (!plot.Rows.ContainsKey(rowName))
    240           plot.Rows.Add(dataRows.ContainsKey(rowName) ? dataRows[rowName] : new ScatterPlotDataRow(rowName, "", new List<Point2D<double>>()));
    241         plot.Rows[rowName].Points.Replace(dataRowLookup[rowName].Select(i => new Point2D<double>(lowDimData[i, 0], lowDimData[i, 1])));
    242       }
    243     }
    244     private static double[,] Normalize(double[,] data) {
    245       var max = new double[data.GetLength(1)];
    246       var min = new double[data.GetLength(1)];
    247       var res = new double[data.GetLength(0), data.GetLength(1)];
    248       for (var i = 0; i < max.Length; i++) max[i] = min[i] = data[0, i];
    249       for (var i = 0; i < data.GetLength(0); i++)
    250         for (var j = 0; j < data.GetLength(1); j++) {
    251           var v = data[i, j];
    252           max[j] = Math.Max(max[j], v);
    253           min[j] = Math.Min(min[j], v);
    254         }
    255       for (var i = 0; i < data.GetLength(0); i++) {
    256         for (var j = 0; j < data.GetLength(1); j++) {
    257           res[i, j] = (data[i, j] - (max[j] + min[j]) / 2) / (max[j] - min[j]);
    258         }
    259       }
    260       return res;
    261     }
    262     private void CalculateApproximateSimilarities(T[] data, double perplexity, out int[] rowP, out int[] colP, out double[] valP) {
    263       // Compute asymmetric pairwise input similarities
    264       ComputeGaussianPerplexity(data, data.Length, out rowP, out colP, out valP, perplexity, (int)(3 * perplexity));
    265       // Symmetrize input similarities
    266       int[] sRowP, symColP;
    267       double[] sValP;
    268       SymmetrizeMatrix(rowP, colP, valP, out sRowP, out symColP, out sValP);
    269       rowP = sRowP;
    270       colP = symColP;
    271       valP = sValP;
    272       var sumP = .0;
    273       for (var i = 0; i < rowP[data.Length]; i++) sumP += valP[i];
    274       for (var i = 0; i < rowP[data.Length]; i++) valP[i] /= sumP;
    275     }
    276     private double[,] CalculateExactSimilarites(T[] data, double perplexity) {
    277       // Compute similarities
    278       var p = new double[data.Length, data.Length];
    279       ComputeGaussianPerplexity(data, data.Length, p, perplexity);
    280       // Symmetrize input similarities
    281       for (var n = 0; n < data.Length; n++) {
    282         for (var m = n + 1; m < data.Length; m++) {
    283           p[n, m] += p[m, n];
    284           p[m, n] = p[n, m];
    285         }
    286       }
    287       var sumP = .0;
    288       for (var i = 0; i < data.Length; i++) for (var j = 0; j < data.Length; j++) sumP += p[i, j];
    289       for (var i = 0; i < data.Length; i++) for (var j = 0; j < data.Length; j++) p[i, j] /= sumP;
    290       return p;
    291     }
    292 
    293     private void ComputeGaussianPerplexity(IReadOnlyList<T> x, int n, out int[] rowP, out int[] colP, out double[] valP, double perplexity, int k) {
    294       if (perplexity > k) throw new ArgumentException("Perplexity should be lower than K!");
    295 
    296       // Allocate the memory we need
    297       rowP = new int[n + 1];
    298       colP = new int[n * k];
    299       valP = new double[n * k];
    300       var curP = new double[n - 1];
    301       rowP[0] = 0;
    302       for (var i = 0; i < n; i++) rowP[i + 1] = rowP[i] + k;
    303 
    304       var objX = new List<IndexedItem<T>>();
    305       for (var i = 0; i < n; i++) objX.Add(new IndexedItem<T>(i, x[i]));
    306 
    307       // Build ball tree on data set
    308       var tree = new VantagePointTree<IndexedItem<T>>(new IndexedItemDistance<T>(distance), objX);           // do we really want to re-create the tree on each call?
    309 
    310       // Loop over all points to find nearest neighbors
    311       for (var i = 0; i < n; i++) {
    312         IList<IndexedItem<T>> indices;
    313         IList<double> distances;
    314 
    315         // Find nearest neighbors
    316         tree.Search(objX[i], k + 1, out indices, out distances);
    317 
    318         // Initialize some variables for binary search
    319         var found = false;
    320         var beta = 1.0;
    321         var minBeta = double.MinValue;
    322         var maxBeta = double.MaxValue;
    323         const double tol = 1e-5;  // TODO: why 1e-5?
    324 
    325         // Iterate until we found a good perplexity
    326         var iter = 0; double sumP = 0;
    327         while (!found && iter < 200) {
    328 
    329           // Compute Gaussian kernel row
    330           for (var m = 0; m < k; m++) curP[m] = Math.Exp(-beta * distances[m + 1]);
    331 
    332           // Compute entropy of current row
    333           sumP = double.Epsilon;
    334           for (var m = 0; m < k; m++) sumP += curP[m];
    335           var h = .0;
    336           for (var m = 0; m < k; m++) h += beta * (distances[m + 1] * curP[m]);
    337           h = h / sumP + Math.Log(sumP);
    338 
    339           // Evaluate whether the entropy is within the tolerance level
    340           var hdiff = h - Math.Log(perplexity);
    341           if (hdiff < tol && -hdiff < tol) {
    342             found = true;
    343           } else {
    344             if (hdiff > 0) {
    345               minBeta = beta;
    346               if (maxBeta.IsAlmost(double.MaxValue) || maxBeta.IsAlmost(double.MinValue))
    347                 beta *= 2.0;
    348               else
    349                 beta = (beta + maxBeta) / 2.0;
    350             } else {
    351               maxBeta = beta;
    352               if (minBeta.IsAlmost(double.MinValue) || minBeta.IsAlmost(double.MaxValue))
    353                 beta /= 2.0;
    354               else
    355                 beta = (beta + minBeta) / 2.0;
    356             }
    357           }
    358 
    359           // Update iteration counter
    360           iter++;
    361         }
    362 
    363         // Row-normalize current row of P and store in matrix
    364         for (var m = 0; m < k; m++) curP[m] /= sumP;
    365         for (var m = 0; m < k; m++) {
    366           colP[rowP[i] + m] = indices[m + 1].Index;
    367           valP[rowP[i] + m] = curP[m];
    368         }
    369       }
    370     }
    371     private void ComputeGaussianPerplexity(T[] x, int n, double[,] p, double perplexity) {
    372       // Compute the distance matrix
    373       var dd = ComputeDistances(x);
    374 
    375       // Compute the Gaussian kernel row by row
    376       for (var i = 0; i < n; i++) {
    377         // Initialize some variables
    378         var found = false;
    379         var beta = 1.0;
    380         var minBeta = -double.MaxValue;
    381         var maxBeta = double.MaxValue;
    382         const double tol = 1e-5;
    383         double sumP = 0;
    384 
    385         // Iterate until we found a good perplexity
    386         var iter = 0;
    387         while (!found && iter < 200) {       // TODO constant
    388 
    389           // Compute Gaussian kernel row
    390           for (var m = 0; m < n; m++) p[i, m] = Math.Exp(-beta * dd[i][m]);
    391           p[i, i] = double.Epsilon;
    392 
    393           // Compute entropy of current row
    394           sumP = double.Epsilon;
    395           for (var m = 0; m < n; m++) sumP += p[i, m];
    396           var h = 0.0;
    397           for (var m = 0; m < n; m++) h += beta * (dd[i][m] * p[i, m]);
    398           h = h / sumP + Math.Log(sumP);
    399 
    400           // Evaluate whether the entropy is within the tolerance level
    401           var hdiff = h - Math.Log(perplexity);
    402           if (hdiff < tol && -hdiff < tol) {
    403             found = true;
    404           } else {
    405             if (hdiff > 0) {
    406               minBeta = beta;
    407               if (maxBeta.IsAlmost(double.MaxValue) || maxBeta.IsAlmost(double.MinValue))
    408                 beta *= 2.0;
    409               else
    410                 beta = (beta + maxBeta) / 2.0;
    411             } else {
    412               maxBeta = beta;
    413               if (minBeta.IsAlmost(double.MinValue) || minBeta.IsAlmost(double.MaxValue))
    414                 beta /= 2.0;
    415               else
    416                 beta = (beta + minBeta) / 2.0;
    417             }
    418           }
    419 
    420           // Update iteration counter
    421           iter++;
    422         }
    423 
    424         // Row normalize P
    425         for (var m = 0; m < n; m++) p[i, m] /= sumP;
    426       }
    427     }
    428 
    429     private double[][] ComputeDistances(T[] x) {
    430       return x.Select(m => x.Select(n => distance.Get(m, n)).ToArray()).ToArray();
     505
     506    private static void ComputeApproximateGradient(int[] rowP, int[] colP, double[] valP, double[,] y, int n, int d, double[,] dC, double theta) {
     507      var tree = new SpacePartitioningTree(y);
     508      double sumQ = 0.0;
     509      var posF = new double[n, d];
     510      var negF = new double[n, d];
     511      tree.ComputeEdgeForces(rowP, colP, valP, n, posF);
     512      var row = new double[d];
     513      for (var n1 = 0; n1 < n; n1++) {
     514        Buffer.BlockCopy(negF, (sizeof(double) * n1 * d), row, 0, d);
     515        tree.ComputeNonEdgeForces(n1, theta, row, ref sumQ);
     516      }
     517
     518      // Compute final t-SNE gradient
     519      for (var i = 0; i < n; i++)
     520        for (var j = 0; j < d; j++) {
     521          dC[i, j] = posF[i, j] - negF[i, j] / sumQ;
     522        }
    431523    }
    432524
     
    462554      }
    463555    }
     556
    464557    private static void ComputeSquaredEuclideanDistance(double[,] x, int n, int d, double[,] dd) {
    465558      var dataSums = new double[n];
     
    485578      }
    486579    }
    487     private static void ComputeApproximateGradient(int[] rowP, int[] colP, double[] valP, double[,] y, int n, int d, double[,] dC, double theta) {
    488       var tree = new SpacePartitioningTree(y);
    489       double[] sumQ = { 0 };
    490       var posF = new double[n, d];
    491       var negF = new double[n, d];
    492       tree.ComputeEdgeForces(rowP, colP, valP, n, posF);
    493       var row = new double[d];
    494       for (var n1 = 0; n1 < n; n1++) {
    495         Buffer.BlockCopy(negF, (sizeof(double) * n1 * d), row, 0, d);
    496         tree.ComputeNonEdgeForces(n1, theta, row, sumQ);
    497       }
    498 
    499       // Compute final t-SNE gradient
    500       for (var i = 0; i < n; i++)
    501         for (var j = 0; j < d; j++) {
    502           dC[i, j] = (posF[i, j] - negF[i, j]) / sumQ[0]; // TODO: check parenthesis
    503         }
    504     }
    505 
    506     private static double EvaluateErrorExact(double[,] p, double[,] y, int n, int d) {
    507       // Compute the squared Euclidean distance matrix
    508       var dd = new double[n, n];
    509       var q = new double[n, n];
    510       ComputeSquaredEuclideanDistance(y, n, d, dd);
    511 
    512       // Compute Q-matrix and normalization sum
    513       var sumQ = double.Epsilon;
    514       for (var n1 = 0; n1 < n; n1++) {
    515         for (var m = 0; m < n; m++) {
    516           if (n1 != m) {
    517             q[n1, m] = 1 / (1 + dd[n1, m]);
    518             sumQ += q[n1, m];
    519           } else q[n1, m] = double.Epsilon;
    520         }
    521       }
    522       for (var i = 0; i < n; i++) for (var j = 0; j < n; j++) q[i, j] /= sumQ;
    523 
    524       // Sum t-SNE error
    525       var c = .0;
    526       for (var i = 0; i < n; i++)
    527         for (var j = 0; j < n; j++) {
    528           c += p[i, j] * Math.Log((p[i, j] + float.Epsilon) / (q[i, j] + float.Epsilon));
    529         }
    530       return c;
    531     }
    532     private static double EvaluateErrorApproximate(IReadOnlyList<int> rowP, IReadOnlyList<int> colP, IReadOnlyList<double> valP, double[,] y, double theta) {
    533       // Get estimate of normalization term
    534       var n = y.GetLength(0);
    535       var d = y.GetLength(1);
    536       var tree = new SpacePartitioningTree(y);
    537       var buff = new double[d];
    538       double[] sumQ = { 0 };
    539       for (var i = 0; i < n; i++) tree.ComputeNonEdgeForces(i, theta, buff, sumQ);
    540 
    541       // Loop over all edges to compute t-SNE error
    542       var c = .0;
    543       for (var k = 0; k < n; k++) {
    544         for (var i = rowP[k]; i < rowP[k + 1]; i++) {
    545           var q = .0;
    546           for (var j = 0; j < d; j++) buff[j] = y[k, j];
    547           for (var j = 0; j < d; j++) buff[j] -= y[colP[i], j];
    548           for (var j = 0; j < d; j++) q += buff[j] * buff[j];
    549           q = 1.0 / (1.0 + q) / sumQ[0];
    550           c += valP[i] * Math.Log((valP[i] + float.Epsilon) / (q + float.Epsilon));
    551         }
    552       }
    553       return c;
    554     }
    555     private static void SymmetrizeMatrix(IReadOnlyList<int> rowP, IReadOnlyList<int> colP, IReadOnlyList<double> valP, out int[] symRowP, out int[] symColP, out double[] symValP) {
    556 
    557       // Count number of elements and row counts of symmetric matrix
    558       var n = rowP.Count - 1;
    559       var rowCounts = new int[n];
    560       for (var j = 0; j < n; j++) {
    561         for (var i = rowP[j]; i < rowP[j + 1]; i++) {
    562 
    563           // Check whether element (col_P[i], n) is present
    564           var present = false;
    565           for (var m = rowP[colP[i]]; m < rowP[colP[i] + 1]; m++) {
    566             if (colP[m] == j) present = true;
    567           }
    568           if (present) rowCounts[j]++;
    569           else {
    570             rowCounts[j]++;
    571             rowCounts[colP[i]]++;
    572           }
    573         }
    574       }
    575       var noElem = 0;
    576       for (var i = 0; i < n; i++) noElem += rowCounts[i];
    577 
    578       // Allocate memory for symmetrized matrix
    579       symRowP = new int[n + 1];
    580       symColP = new int[noElem];
    581       symValP = new double[noElem];
    582 
    583       // Construct new row indices for symmetric matrix
    584       symRowP[0] = 0;
    585       for (var i = 0; i < n; i++) symRowP[i + 1] = symRowP[i] + rowCounts[i];
    586 
    587       // Fill the result matrix
    588       var offset = new int[n];
    589       for (var j = 0; j < n; j++) {
    590         for (var i = rowP[j]; i < rowP[j + 1]; i++) {                                  // considering element(n, colP[i])
    591 
    592           // Check whether element (col_P[i], n) is present
    593           var present = false;
    594           for (var m = rowP[colP[i]]; m < rowP[colP[i] + 1]; m++) {
    595             if (colP[m] != j) continue;
    596             present = true;
    597             if (j > colP[i]) continue; // make sure we do not add elements twice
    598             symColP[symRowP[j] + offset[j]] = colP[i];
    599             symColP[symRowP[colP[i]] + offset[colP[i]]] = j;
    600             symValP[symRowP[j] + offset[j]] = valP[i] + valP[m];
    601             symValP[symRowP[colP[i]] + offset[colP[i]]] = valP[i] + valP[m];
    602           }
    603 
    604           // If (colP[i], n) is not present, there is no addition involved
    605           if (!present) {
    606             symColP[symRowP[j] + offset[j]] = colP[i];
    607             symColP[symRowP[colP[i]] + offset[colP[i]]] = j;
    608             symValP[symRowP[j] + offset[j]] = valP[i];
    609             symValP[symRowP[colP[i]] + offset[colP[i]]] = valP[i];
    610           }
    611 
    612           // Update offsets
    613           if (present && (j > colP[i])) continue;
    614           offset[j]++;
    615           if (colP[i] != j) offset[colP[i]]++;
    616         }
    617       }
    618 
    619       // Divide the result by two
    620       for (var i = 0; i < noElem; i++) symValP[i] /= 2.0;
    621     }
     580
    622581    private static void ZeroMean(double[,] x) {
    623582      // Compute data mean
     
    640599      }
    641600    }
    642     #endregion
    643601  }
    644602}
Note: See TracChangeset for help on using the changeset viewer.