Free cookie consent management tool by TermsFeed Policy Generator

Changeset 8681


Ignore:
Timestamp:
09/23/12 00:31:49 (12 years ago)
Author:
abeham
Message:

#1913: Added regularization term introduced by Yang and Laaksonen 2007

Location:
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Matrix.cs

    r8471 r8681  
    7070
    7171    public Matrix Transpose() {
    72       var result = new Matrix(Transpose(values, Columns, Rows), Columns, Rows);
    73       return result;
     72      return new Matrix(Transpose(values, Columns, Rows), Columns, Rows);
    7473    }
    7574
     
    125124    }
    126125
    127     public double VectorLength() {
    128       return Math.Sqrt(SquaredVectorLength());
    129     }
    130 
    131     public double SquaredVectorLength() {
    132       if (Rows != 1) throw new ArgumentException("Length only works on vectors.");
     126    public double EuclideanNorm() {
     127      return Math.Sqrt(SumOfSquares());
     128    }
     129
     130    public double SumOfSquares() {
    133131      return values.Sum(x => x * x);
    134132    }
     
    137135      if (Rows != 1 || other.Rows != 1) throw new ArgumentException("OuterProduct can only be applied to vectors.");
    138136      return Transpose().Multiply(other);
     137    }
     138
     139    public IEnumerable<double> ColumnSums() {
     140      return Transpose().RowSums();
     141    }
     142
     143    public IEnumerable<double> RowSums() {
     144      var sum = 0.0;
     145      int counter = 0;
     146      foreach (var v in values) {
     147        sum += v;
     148        counter++;
     149        if (counter == Rows) {
     150          yield return sum;
     151          sum = 0.0;
     152          counter = 0;
     153        }
     154      }
    139155    }
    140156
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs

    r8523 r8681  
    3535
    3636namespace HeuristicLab.Algorithms.DataAnalysis {
    37   internal delegate void Reporter(double quality, double[] coefficients);
     37  internal delegate void Reporter(double quality, double[] coefficients, double[] gradients);
    3838  /// <summary>
    3939  /// Neighborhood Components Analysis
    4040  /// </summary>
    41   [Item("Neighborhood Components Analysis (NCA)", "Implementation of Neighborhood Components Analysis based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520.")]
     41  [Item("Neighborhood Components Analysis (NCA)", @"Implementation of Neighborhood Components Analysis
     42based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005.
     43Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520
     44with additional regularizations described in Z. Yang, J. Laaksonen. 2007.
     45Regularized Neighborhood Component Analysis. Lecture Notes in Computer Science, 4522. pp. 253-262.")]
    4246  [Creatable("Data Analysis")]
    4347  [StorableClass]
     
    5963      get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; }
    6064    }
     65    public IFixedValueParameter<DoubleValue> RegularizationParameter {
     66      get { return (IFixedValueParameter<DoubleValue>)Parameters["Regularization"]; }
     67    }
    6168    #endregion
    6269
    6370    #region Properties
    64     private int K {
     71    public int K {
    6572      get { return KParameter.Value.Value; }
    6673      set { KParameter.Value.Value = value; }
    6774    }
    68     private int Dimensions {
     75    public int Dimensions {
    6976      get { return DimensionsParameter.Value.Value; }
    7077      set { DimensionsParameter.Value.Value = value; }
    7178    }
    72     private int NeighborSamples {
     79    public int NeighborSamples {
    7380      get { return NeighborSamplesParameter.Value.Value; }
    7481      set { NeighborSamplesParameter.Value.Value = value; }
    7582    }
    76     private int Iterations {
     83    public int Iterations {
    7784      get { return IterationsParameter.Value.Value; }
    7885      set { IterationsParameter.Value.Value = value; }
     86    }
     87    public double Regularization {
     88      get { return RegularizationParameter.Value.Value; }
     89      set { RegularizationParameter.Value.Value = value; }
    7990    }
    8091    #endregion
     
    8596    public NcaAlgorithm()
    8697      : base() {
    87       Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(1)));
     98      Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(3)));
    8899      Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
    89100      Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
    90       Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(50)));
    91       Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(20)));
     101      Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(60)));
     102      Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50)));
     103      Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
    92104
    93105      INCAInitializer defaultInitializer = null;
     
    105117    }
    106118
     119    [StorableHook(HookType.AfterDeserialization)]
     120    private void AfterDeserialization() {
     121      if (!Parameters.ContainsKey("Regularization")) {
     122        Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
     123      }
     124    }
     125
    107126    public override void Prepare() {
    108127      if (Problem != null) base.Prepare();
     
    113132
    114133      var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone();
    115       var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
    116       Results.Add(new Result("ClassificationSolution", "The classification solution.", model.CreateClassificationSolution(clonedProblem)));
    117     }
    118 
    119     public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
     134      var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Regularization, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
     135      var solution = model.CreateClassificationSolution(clonedProblem);
     136      if (!Results.ContainsKey("ClassificationSolution"))
     137        Results.Add(new Result("ClassificationSolution", "The classification solution.", solution));
     138      else Results["ClassificationSolution"].Value = solution;
     139    }
     140
     141    public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
    120142      var clonedProblem = (IClassificationProblemData)data.Clone();
    121       var model = Train(clonedProblem, k, dimensions, neighborSamples, iterations, initializer);
     143      var model = Train(clonedProblem, k, dimensions, neighborSamples, regularization, iterations, initializer);
    122144      return model.CreateClassificationSolution(clonedProblem);
    123145    }
    124146
    125     public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
    126       return Train(problemData, k, dimensions, neighborSamples, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
    127     }
    128 
    129     public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, int iterations, double[,] initalMatrix) {
     147    public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
     148      return Train(problemData, k, dimensions, neighborSamples, regularization, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
     149    }
     150
     151    public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, double regularization, int iterations, double[,] initalMatrix) {
    130152      var matrix = new double[initalMatrix.Length];
    131153      for (int i = 0; i < initalMatrix.GetLength(0); i++)
    132154        for (int j = 0; j < initalMatrix.GetLength(1); j++)
    133155          matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j];
    134       return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, iterations, matrix, null, CancellationToken.None);
    135     }
    136 
    137     private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
     156      return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, regularization, iterations, matrix, null, CancellationToken.None);
     157    }
     158
     159    private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
    138160      var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices);
    139161      var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling);
    140162      var classes = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).ToArray();
    141163      var attributes = scaledData.GetLength(1);
    142 
    143       var penalties = new Dictionary<double, Dictionary<double, double>>();
    144       foreach (var c in data.ClassValues) {
    145         penalties[c] = new Dictionary<double, double>();
    146         foreach (var r in data.ClassValues)
    147           penalties[c][r] = data.GetClassificationPenalty(c, r);
    148       }
    149164
    150165      alglib.mincgstate state;
     
    153168      alglib.mincgsetcond(state, 0, 0, 0, iterations);
    154169      alglib.mincgsetxrep(state, true);
     170      //alglib.mincgsetgradientcheck(state, 0.01);
    155171      int neighborSampleSize = neighborSamples;
    156       Optimize(state, scaledData, classes, penalties, dimensions, neighborSampleSize, cancellation, reporter);
     172      Optimize(state, scaledData, classes, dimensions, neighborSampleSize, regularization, cancellation, reporter);
    157173      alglib.mincgresults(state, out matrix, out rep);
     174      if (rep.terminationtype == -7) throw new InvalidOperationException("Gradient verification failed.");
    158175
    159176      var transformationMatrix = new double[attributes, dimensions];
     
    166183    }
    167184
    168     private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {
     185    private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda, CancellationToken cancellation, Reporter reporter) {
    169186      while (alglib.mincgiteration(state)) {
    170187        if (cancellation.IsCancellationRequested) break;
    171188        if (state.needfg) {
    172           Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, penalties, dimensions, neighborSampleSize);
     189          Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize, lambda);
    173190          continue;
    174191        }
    175192        if (state.innerobj.xupdated) {
    176193          if (reporter != null)
    177             reporter(state.innerobj.f, state.innerobj.x);
     194            reporter(state.innerobj.f, state.innerobj.x, state.innerobj.g);
    178195          continue;
    179196        }
     
    182199    }
    183200
    184     private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize) {
     201    private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda) {
    185202      var instances = data.GetLength(0);
    186203      var attributes = data.GetLength(1);
     
    199216          }
    200217          var kVector = new Matrix(GetRow(data, k));
    201           transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SquaredVectorLength());
    202         }
    203         var sample = transformedDistances.OrderByDescending(x => x.Value).Take(neighborSampleSize).ToArray();
    204         var normalization = sample.Sum(x => x.Value);
    205         if (normalization > 0) {
    206           foreach (var s in sample) {
    207             if (s.Value <= 0) break;
    208             alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
    209           }
     218          transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SumOfSquares());
     219        }
     220        var normalization = transformedDistances.Sum(x => x.Value);
     221        if (normalization <= 0) continue;
     222        foreach (var s in transformedDistances.Where(x => x.Value > 0).OrderByDescending(x => x.Value).Take(neighborSampleSize)) {
     223          alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
    210224        }
    211225      }
     
    215229      double val;
    216230      var pi = new double[instances];
    217       func = 0;
    218231      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
    219         double vp = val * penalties[classes[r]][classes[c]];
    220         pi[r] += vp;
    221         func += vp;
    222       }
    223 
    224       t0 = 0; t1 = 0;
     232        if (classes[r].IsAlmost(classes[c])) {
     233          pi[r] += val;
     234        }
     235      }
     236
    225237      var innerSum = new double[attributes, attributes];
    226238      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
    227239        var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c)));
    228240        vector.OuterProduct(vector).Multiply(val * pi[r]).AddTo(innerSum);
    229         vector.OuterProduct(vector).Multiply(-val * penalties[classes[r]][classes[c]]).AddTo(innerSum);
    230       }
     241
     242        if (classes[r].IsAlmost(classes[c])) {
     243          vector.OuterProduct(vector).Multiply(-val).AddTo(innerSum);
     244        }
     245      }
     246
     247      func = -pi.Sum() + lambda * AMatrix.SumOfSquares();
    231248
    232249      r = 0;
    233       var newGrad = AMatrix.Multiply(2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
     250      var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
    234251      foreach (var g in newGrad) {
    235         grad[r++] = g;
    236       }
    237     }
    238 
    239     private void ReportQuality(double func, double[] coefficients) {
     252        grad[r] = g + lambda * 2 * A[r];
     253        r++;
     254      }
     255    }
     256
     257    private void ReportQuality(double func, double[] coefficients, double[] gradients) {
    240258      var instances = Problem.ProblemData.TrainingIndices.Count();
    241259      DataTable qualities;
    242260      if (!Results.ContainsKey("Optimization")) {
    243261        qualities = new DataTable("Optimization");
    244         qualities.Rows.Add(new DataRow("Penalty", string.Empty));
     262        qualities.Rows.Add(new DataRow("Quality", string.Empty));
    245263        Results.Add(new Result("Optimization", qualities));
    246264      } else qualities = (DataTable)Results["Optimization"].Value;
    247       qualities.Rows["Penalty"].Values.Add(func / instances);
    248 
    249       if (!Results.ContainsKey("Penalty")) {
    250         Results.Add(new Result("Penalty", new DoubleValue(func / instances)));
    251       } else ((DoubleValue)Results["Penalty"].Value).Value = func / instances;
     265      qualities.Rows["Quality"].Values.Add(-func / instances);
     266
     267      string[] attributNames = Problem.ProblemData.AllowedInputVariables.ToArray();
     268      if (gradients != null) {
     269        DataTable grads;
     270        if (!Results.ContainsKey("Gradients")) {
     271          grads = new DataTable("Gradients");
     272          for (int i = 0; i < gradients.Length; i++)
     273            grads.Rows.Add(new DataRow(attributNames[i / Dimensions] + "-" + (i % Dimensions), string.Empty));
     274          Results.Add(new Result("Gradients", grads));
     275        } else grads = (DataTable)Results["Gradients"].Value;
     276        for (int i = 0; i < gradients.Length; i++)
     277          grads.Rows[attributNames[i / Dimensions] + "-" + (i % Dimensions)].Values.Add(gradients[i]);
     278      }
     279
     280      if (!Results.ContainsKey("Quality")) {
     281        Results.Add(new Result("Quality", new DoubleValue(-func / instances)));
     282      } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances;
     283
     284      var attributes = attributNames.Length;
     285      var transformationMatrix = new double[attributes, Dimensions];
     286      var counter = 0;
     287      for (var i = 0; i < attributes; i++)
     288        for (var j = 0; j < Dimensions; j++)
     289          transformationMatrix[i, j] = coefficients[counter++];
     290
     291      var scaling = new Scaling(Problem.ProblemData.Dataset, attributNames, Problem.ProblemData.TrainingIndices);
     292      var model = new NcaModel(K, transformationMatrix, Problem.ProblemData.Dataset, Problem.ProblemData.TrainingIndices, Problem.ProblemData.TargetVariable, attributNames, scaling, Problem.ProblemData.ClassValues.ToArray());
     293
     294      IClassificationSolution solution = model.CreateClassificationSolution(Problem.ProblemData);
     295      if (!Results.ContainsKey("ClassificationSolution")) {
     296        Results.Add(new Result("ClassificationSolution", solution));
     297      } else {
     298        Results["ClassificationSolution"].Value = solution;
     299      }
    252300    }
    253301
     
    256304        yield return data[row, i];
    257305    }
     306
    258307  }
    259308}
Note: See TracChangeset for help on using the changeset viewer.