Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
03/01/13 18:32:26 (11 years ago)
Author:
abeham
Message:

#1913: Changed NCA to use LM-BFGS optimization algorithm, added model/solution creators, added operator for gradient calculation

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs

    r8681 r9270  
    2121
    2222using System;
    23 using System.Collections.Generic;
    2423using System.Linq;
    25 using System.Threading;
    26 using HeuristicLab.Analysis;
     24using HeuristicLab.Algorithms.GradientDescent;
    2725using HeuristicLab.Common;
    2826using HeuristicLab.Core;
    2927using HeuristicLab.Data;
     28using HeuristicLab.Operators;
    3029using HeuristicLab.Optimization;
    3130using HeuristicLab.Parameters;
     
    3332using HeuristicLab.PluginInfrastructure;
    3433using HeuristicLab.Problems.DataAnalysis;
     34using HeuristicLab.Random;
    3535
    3636namespace HeuristicLab.Algorithms.DataAnalysis {
    37   internal delegate void Reporter(double quality, double[] coefficients, double[] gradients);
    3837  /// <summary>
    3938  /// Neighborhood Components Analysis
     
    4645  [Creatable("Data Analysis")]
    4746  [StorableClass]
    48   public sealed class NcaAlgorithm : FixedDataAnalysisAlgorithm<IClassificationProblem> {
     47  public sealed class NcaAlgorithm : EngineAlgorithm {
     48    #region Parameter Names
     49    private const string SeedParameterName = "Seed";
     50    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
     51    private const string KParameterName = "K";
     52    private const string DimensionsParameterName = "Dimensions";
     53    private const string InitializationParameterName = "Initialization";
     54    private const string NeighborSamplesParameterName = "NeighborSamples";
     55    private const string IterationsParameterName = "Iterations";
     56    private const string RegularizationParameterName = "Regularization";
     57    private const string NcaModelCreatorParameterName = "NcaModelCreator";
     58    private const string NcaSolutionCreatorParameterName = "NcaSolutionCreator";
     59    private const string ApproximateGradientsParameterName = "ApproximateGradients";
     60    private const string NcaMatrixParameterName = "NcaMatrix";
     61    private const string NcaMatrixGradientsParameterName = "NcaMatrixGradients";
     62    private const string QualityParameterName = "Quality";
     63    #endregion
     64
     65    public override Type ProblemType { get { return typeof(IClassificationProblem); } }
     66    public new IClassificationProblem Problem {
     67      get { return (IClassificationProblem)base.Problem; }
     68      set { base.Problem = value; }
     69    }
     70
    4971    #region Parameter Properties
     72    public IValueParameter<IntValue> SeedParameter {
     73      get { return (IValueParameter<IntValue>)Parameters[SeedParameterName]; }
     74    }
     75    public IValueParameter<BoolValue> SetSeedRandomlyParameter {
     76      get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
     77    }
    5078    public IFixedValueParameter<IntValue> KParameter {
    51       get { return (IFixedValueParameter<IntValue>)Parameters["K"]; }
     79      get { return (IFixedValueParameter<IntValue>)Parameters[KParameterName]; }
    5280    }
    5381    public IFixedValueParameter<IntValue> DimensionsParameter {
    54       get { return (IFixedValueParameter<IntValue>)Parameters["Dimensions"]; }
    55     }
    56     public IConstrainedValueParameter<INCAInitializer> InitializationParameter {
    57       get { return (IConstrainedValueParameter<INCAInitializer>)Parameters["Initialization"]; }
     82      get { return (IFixedValueParameter<IntValue>)Parameters[DimensionsParameterName]; }
     83    }
     84    public IConstrainedValueParameter<INcaInitializer> InitializationParameter {
     85      get { return (IConstrainedValueParameter<INcaInitializer>)Parameters[InitializationParameterName]; }
    5886    }
    5987    public IFixedValueParameter<IntValue> NeighborSamplesParameter {
    60       get { return (IFixedValueParameter<IntValue>)Parameters["NeighborSamples"]; }
     88      get { return (IFixedValueParameter<IntValue>)Parameters[NeighborSamplesParameterName]; }
    6189    }
    6290    public IFixedValueParameter<IntValue> IterationsParameter {
    63       get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; }
     91      get { return (IFixedValueParameter<IntValue>)Parameters[IterationsParameterName]; }
    6492    }
    6593    public IFixedValueParameter<DoubleValue> RegularizationParameter {
    66       get { return (IFixedValueParameter<DoubleValue>)Parameters["Regularization"]; }
     94      get { return (IFixedValueParameter<DoubleValue>)Parameters[RegularizationParameterName]; }
     95    }
     96    public IValueParameter<BoolValue> ApproximateGradientsParameter {
     97      get { return (IValueParameter<BoolValue>)Parameters[ApproximateGradientsParameterName]; }
     98    }
     99    public IValueParameter<INcaModelCreator> NcaModelCreatorParameter {
     100      get { return (IValueParameter<INcaModelCreator>)Parameters[NcaModelCreatorParameterName]; }
     101    }
     102    public IValueParameter<INcaSolutionCreator> NcaSolutionCreatorParameter {
     103      get { return (IValueParameter<INcaSolutionCreator>)Parameters[NcaSolutionCreatorParameterName]; }
    67104    }
    68105    #endregion
    69106
    70107    #region Properties
     108    public int Seed {
     109      get { return SeedParameter.Value.Value; }
     110      set { SeedParameter.Value.Value = value; }
     111    }
     112    public bool SetSeedRandomly {
     113      get { return SetSeedRandomlyParameter.Value.Value; }
     114      set { SetSeedRandomlyParameter.Value.Value = value; }
     115    }
    71116    public int K {
    72117      get { return KParameter.Value.Value; }
     
    88133      get { return RegularizationParameter.Value.Value; }
    89134      set { RegularizationParameter.Value.Value = value; }
     135    }
     136    public INcaModelCreator NcaModelCreator {
     137      get { return NcaModelCreatorParameter.Value; }
     138      set { NcaModelCreatorParameter.Value = value; }
     139    }
     140    public INcaSolutionCreator NcaSolutionCreator {
     141      get { return NcaSolutionCreatorParameter.Value; }
     142      set { NcaSolutionCreatorParameter.Value = value; }
    90143    }
    91144    #endregion
     
    96149    public NcaAlgorithm()
    97150      : base() {
    98       Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(3)));
    99       Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
    100       Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
    101       Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(60)));
    102       Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50)));
    103       Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
    104 
    105       INCAInitializer defaultInitializer = null;
    106       foreach (var initializer in ApplicationManager.Manager.GetInstances<INCAInitializer>().OrderBy(x => x.ItemName)) {
    107         if (initializer is LDAInitializer) defaultInitializer = initializer;
     151      Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The seed of the random number generator.", new IntValue(0)));
     152      Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "A boolean flag that indicates whether the seed should be randomly reset each time the algorithm is run.", new BoolValue(true)));
     153      Parameters.Add(new FixedValueParameter<IntValue>(KParameterName, "The K for the nearest neighbor.", new IntValue(3)));
     154      Parameters.Add(new FixedValueParameter<IntValue>(DimensionsParameterName, "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
     155      Parameters.Add(new ConstrainedValueParameter<INcaInitializer>(InitializationParameterName, "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
     156      Parameters.Add(new FixedValueParameter<IntValue>(NeighborSamplesParameterName, "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one will be used.", new IntValue(60)));
     157      Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50)));
     158      Parameters.Add(new FixedValueParameter<DoubleValue>(RegularizationParameterName, "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
     159      Parameters.Add(new ValueParameter<INcaModelCreator>(NcaModelCreatorParameterName, "Creates an NCA model out of the matrix.", new NcaModelCreator()));
     160      Parameters.Add(new ValueParameter<INcaSolutionCreator>(NcaSolutionCreatorParameterName, "Creates an NCA solution given a model and some data.", new NcaSolutionCreator()));
     161      Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "True if the gradient should be approximated otherwise they are computed exactly.", new BoolValue()));
     162
     163      NcaSolutionCreatorParameter.Hidden = true;
     164      ApproximateGradientsParameter.Hidden = true;
     165
     166      INcaInitializer defaultInitializer = null;
     167      foreach (var initializer in ApplicationManager.Manager.GetInstances<INcaInitializer>().OrderBy(x => x.ItemName)) {
     168        if (initializer is LdaInitializer) defaultInitializer = initializer;
    108169        InitializationParameter.ValidValues.Add(initializer);
    109170      }
    110171      if (defaultInitializer != null) InitializationParameter.Value = defaultInitializer;
    111172
     173      var randomCreator = new RandomCreator();
     174      var ncaInitializer = new Placeholder();
     175      var bfgsInitializer = new LbfgsInitializer();
     176      var makeStep = new LbfgsMakeStep();
     177      var branch = new ConditionalBranch();
     178      var gradientCalculator = new NcaGradientCalculator();
     179      var modelCreator = new Placeholder();
     180      var updateResults = new LbfgsUpdateResults();
     181      var analyzer = new LbfgsAnalyzer();
     182      var finalModelCreator = new Placeholder();
     183      var finalAnalyzer = new LbfgsAnalyzer();
     184      var solutionCreator = new Placeholder();
     185
     186      OperatorGraph.InitialOperator = randomCreator;
     187      randomCreator.SeedParameter.ActualName = SeedParameterName;
     188      randomCreator.SeedParameter.Value = null;
     189      randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName;
     190      randomCreator.SetSeedRandomlyParameter.Value = null;
     191      randomCreator.Successor = ncaInitializer;
     192
     193      ncaInitializer.Name = "(NcaInitializer)";
     194      ncaInitializer.OperatorParameter.ActualName = InitializationParameterName;
     195      ncaInitializer.Successor = bfgsInitializer;
     196
     197      bfgsInitializer.IterationsParameter.ActualName = IterationsParameterName;
     198      bfgsInitializer.PointParameter.ActualName = NcaMatrixParameterName;
     199      bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
     200      bfgsInitializer.Successor = makeStep;
     201
     202      makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
     203      makeStep.PointParameter.ActualName = NcaMatrixParameterName;
     204      makeStep.Successor = branch;
     205
     206      branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
     207      branch.FalseBranch = gradientCalculator;
     208      branch.TrueBranch = finalModelCreator;
     209
     210      gradientCalculator.Successor = modelCreator;
     211
     212      modelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName;
     213      modelCreator.Successor = updateResults;
     214
     215      updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
     216      updateResults.QualityParameter.ActualName = QualityParameterName;
     217      updateResults.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName;
     218      updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
     219      updateResults.Successor = analyzer;
     220
     221      analyzer.QualityParameter.ActualName = QualityParameterName;
     222      analyzer.PointParameter.ActualName = NcaMatrixParameterName;
     223      analyzer.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName;
     224      analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
     225      analyzer.PointsTableParameter.ActualName = "Matrix table";
     226      analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
     227      analyzer.QualitiesTableParameter.ActualName = "Qualities";
     228      analyzer.Successor = makeStep;
     229
     230      finalModelCreator.OperatorParameter.ActualName = NcaModelCreatorParameterName;
     231      finalModelCreator.Successor = finalAnalyzer;
     232
     233      finalAnalyzer.QualityParameter.ActualName = QualityParameterName;
     234      finalAnalyzer.PointParameter.ActualName = NcaMatrixParameterName;
     235      finalAnalyzer.QualityGradientsParameter.ActualName = NcaMatrixGradientsParameterName;
     236      finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName;
     237      finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
     238      finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName;
     239      finalAnalyzer.Successor = solutionCreator;
     240
     241      solutionCreator.OperatorParameter.ActualName = NcaSolutionCreatorParameterName;
     242
    112243      Problem = new ClassificationProblem();
    113244    }
     
    117248    }
    118249
    119     [StorableHook(HookType.AfterDeserialization)]
    120     private void AfterDeserialization() {
    121       if (!Parameters.ContainsKey("Regularization")) {
    122         Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
    123       }
    124     }
    125 
    126250    public override void Prepare() {
    127251      if (Problem != null) base.Prepare();
    128252    }
    129 
    130     protected override void Run() {
    131       var initializer = InitializationParameter.Value;
    132 
    133       var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone();
    134       var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Regularization, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
    135       var solution = model.CreateClassificationSolution(clonedProblem);
    136       if (!Results.ContainsKey("ClassificationSolution"))
    137         Results.Add(new Result("ClassificationSolution", "The classification solution.", solution));
    138       else Results["ClassificationSolution"].Value = solution;
    139     }
    140 
    141     public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
    142       var clonedProblem = (IClassificationProblemData)data.Clone();
    143       var model = Train(clonedProblem, k, dimensions, neighborSamples, regularization, iterations, initializer);
    144       return model.CreateClassificationSolution(clonedProblem);
    145     }
    146 
    147     public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
    148       return Train(problemData, k, dimensions, neighborSamples, regularization, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
    149     }
    150 
    151     public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, double regularization, int iterations, double[,] initalMatrix) {
    152       var matrix = new double[initalMatrix.Length];
    153       for (int i = 0; i < initalMatrix.GetLength(0); i++)
    154         for (int j = 0; j < initalMatrix.GetLength(1); j++)
    155           matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j];
    156       return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, regularization, iterations, matrix, null, CancellationToken.None);
    157     }
    158 
    159     private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
    160       var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices);
    161       var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling);
    162       var classes = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).ToArray();
    163       var attributes = scaledData.GetLength(1);
    164 
    165       alglib.mincgstate state;
    166       alglib.mincgreport rep;
    167       alglib.mincgcreate(matrix, out state);
    168       alglib.mincgsetcond(state, 0, 0, 0, iterations);
    169       alglib.mincgsetxrep(state, true);
    170       //alglib.mincgsetgradientcheck(state, 0.01);
    171       int neighborSampleSize = neighborSamples;
    172       Optimize(state, scaledData, classes, dimensions, neighborSampleSize, regularization, cancellation, reporter);
    173       alglib.mincgresults(state, out matrix, out rep);
    174       if (rep.terminationtype == -7) throw new InvalidOperationException("Gradient verification failed.");
    175 
    176       var transformationMatrix = new double[attributes, dimensions];
    177       var counter = 0;
    178       for (var i = 0; i < attributes; i++)
    179         for (var j = 0; j < dimensions; j++)
    180           transformationMatrix[i, j] = matrix[counter++];
    181 
    182       return new NcaModel(k, transformationMatrix, data.Dataset, data.TrainingIndices, data.TargetVariable, data.AllowedInputVariables, scaling, data.ClassValues.ToArray());
    183     }
    184 
    185     private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda, CancellationToken cancellation, Reporter reporter) {
    186       while (alglib.mincgiteration(state)) {
    187         if (cancellation.IsCancellationRequested) break;
    188         if (state.needfg) {
    189           Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize, lambda);
    190           continue;
    191         }
    192         if (state.innerobj.xupdated) {
    193           if (reporter != null)
    194             reporter(state.innerobj.f, state.innerobj.x, state.innerobj.g);
    195           continue;
    196         }
    197         throw new InvalidOperationException("Neighborhood Components Analysis: Error in Optimize() (some derivatives were not provided?)");
    198       }
    199     }
    200 
    201     private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda) {
    202       var instances = data.GetLength(0);
    203       var attributes = data.GetLength(1);
    204 
    205       var AMatrix = new Matrix(A, A.Length / dimensions, dimensions);
    206 
    207       alglib.sparsematrix probabilities;
    208       alglib.sparsecreate(instances, instances, out probabilities);
    209       var transformedDistances = new Dictionary<int, double>(instances);
    210       for (int i = 0; i < instances; i++) {
    211         var iVector = new Matrix(GetRow(data, i), data.GetLength(1));
    212         for (int k = 0; k < instances; k++) {
    213           if (k == i) {
    214             transformedDistances.Remove(k);
    215             continue;
    216           }
    217           var kVector = new Matrix(GetRow(data, k));
    218           transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SumOfSquares());
    219         }
    220         var normalization = transformedDistances.Sum(x => x.Value);
    221         if (normalization <= 0) continue;
    222         foreach (var s in transformedDistances.Where(x => x.Value > 0).OrderByDescending(x => x.Value).Take(neighborSampleSize)) {
    223           alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
    224         }
    225       }
    226       alglib.sparseconverttocrs(probabilities); // needed to enumerate in order (top-down and left-right)
    227 
    228       int t0 = 0, t1 = 0, r, c;
    229       double val;
    230       var pi = new double[instances];
    231       while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
    232         if (classes[r].IsAlmost(classes[c])) {
    233           pi[r] += val;
    234         }
    235       }
    236 
    237       var innerSum = new double[attributes, attributes];
    238       while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
    239         var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c)));
    240         vector.OuterProduct(vector).Multiply(val * pi[r]).AddTo(innerSum);
    241 
    242         if (classes[r].IsAlmost(classes[c])) {
    243           vector.OuterProduct(vector).Multiply(-val).AddTo(innerSum);
    244         }
    245       }
    246 
    247       func = -pi.Sum() + lambda * AMatrix.SumOfSquares();
    248 
    249       r = 0;
    250       var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
    251       foreach (var g in newGrad) {
    252         grad[r] = g + lambda * 2 * A[r];
    253         r++;
    254       }
    255     }
    256 
    257     private void ReportQuality(double func, double[] coefficients, double[] gradients) {
    258       var instances = Problem.ProblemData.TrainingIndices.Count();
    259       DataTable qualities;
    260       if (!Results.ContainsKey("Optimization")) {
    261         qualities = new DataTable("Optimization");
    262         qualities.Rows.Add(new DataRow("Quality", string.Empty));
    263         Results.Add(new Result("Optimization", qualities));
    264       } else qualities = (DataTable)Results["Optimization"].Value;
    265       qualities.Rows["Quality"].Values.Add(-func / instances);
    266 
    267       string[] attributNames = Problem.ProblemData.AllowedInputVariables.ToArray();
    268       if (gradients != null) {
    269         DataTable grads;
    270         if (!Results.ContainsKey("Gradients")) {
    271           grads = new DataTable("Gradients");
    272           for (int i = 0; i < gradients.Length; i++)
    273             grads.Rows.Add(new DataRow(attributNames[i / Dimensions] + "-" + (i % Dimensions), string.Empty));
    274           Results.Add(new Result("Gradients", grads));
    275         } else grads = (DataTable)Results["Gradients"].Value;
    276         for (int i = 0; i < gradients.Length; i++)
    277           grads.Rows[attributNames[i / Dimensions] + "-" + (i % Dimensions)].Values.Add(gradients[i]);
    278       }
    279 
    280       if (!Results.ContainsKey("Quality")) {
    281         Results.Add(new Result("Quality", new DoubleValue(-func / instances)));
    282       } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances;
    283 
    284       var attributes = attributNames.Length;
    285       var transformationMatrix = new double[attributes, Dimensions];
    286       var counter = 0;
    287       for (var i = 0; i < attributes; i++)
    288         for (var j = 0; j < Dimensions; j++)
    289           transformationMatrix[i, j] = coefficients[counter++];
    290 
    291       var scaling = new Scaling(Problem.ProblemData.Dataset, attributNames, Problem.ProblemData.TrainingIndices);
    292       var model = new NcaModel(K, transformationMatrix, Problem.ProblemData.Dataset, Problem.ProblemData.TrainingIndices, Problem.ProblemData.TargetVariable, attributNames, scaling, Problem.ProblemData.ClassValues.ToArray());
    293 
    294       IClassificationSolution solution = model.CreateClassificationSolution(Problem.ProblemData);
    295       if (!Results.ContainsKey("ClassificationSolution")) {
    296         Results.Add(new Result("ClassificationSolution", solution));
    297       } else {
    298         Results["ClassificationSolution"].Value = solution;
    299       }
    300     }
    301 
    302     private static IEnumerable<double> GetRow(double[,] data, int row) {
    303       for (int i = 0; i < data.GetLength(1); i++)
    304         yield return data[row, i];
    305     }
    306 
    307253  }
    308254}
Note: See TracChangeset for help on using the changeset viewer.