Changeset 8523


Ignore:
Timestamp:
08/23/12 17:55:05 (7 years ago)
Author:
abeham
Message:

#1913: added classification penalty into fitness function and gradient

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs

    r8471 r8523  
    141141      var attributes = scaledData.GetLength(1);
    142142
     143      var penalties = new Dictionary<double, Dictionary<double, double>>();
     144      foreach (var c in data.ClassValues) {
     145        penalties[c] = new Dictionary<double, double>();
     146        foreach (var r in data.ClassValues)
     147          penalties[c][r] = data.GetClassificationPenalty(c, r);
     148      }
     149
    143150      alglib.mincgstate state;
    144151      alglib.mincgreport rep;
     
    147154      alglib.mincgsetxrep(state, true);
    148155      int neighborSampleSize = neighborSamples;
    149       Optimize(state, scaledData, classes, dimensions, neighborSampleSize, cancellation, reporter);
     156      Optimize(state, scaledData, classes, penalties, dimensions, neighborSampleSize, cancellation, reporter);
    150157      alglib.mincgresults(state, out matrix, out rep);
    151158
     
    159166    }
    160167
    161     private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {
     168    private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {
    162169      while (alglib.mincgiteration(state)) {
    163170        if (cancellation.IsCancellationRequested) break;
    164171        if (state.needfg) {
    165           Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize);
     172          Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, penalties, dimensions, neighborSampleSize);
    166173          continue;
    167174        }
     
    175182    }
    176183
    177     private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize) {
     184    private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize) {
    178185      var instances = data.GetLength(0);
    179186      var attributes = data.GetLength(1);
     
    208215      double val;
    209216      var pi = new double[instances];
     217      func = 0;
    210218      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
    211         if (classes[r].IsAlmost(classes[c])) {
    212           pi[r] += val;
    213         }
    214       }
    215 
     219        double vp = val * penalties[classes[r]][classes[c]];
     220        pi[r] += vp;
     221        func += vp;
     222      }
     223
     224      t0 = 0; t1 = 0;
    216225      var innerSum = new double[attributes, attributes];
    217226      while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) {
    218227        var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c)));
    219228        vector.OuterProduct(vector).Multiply(val * pi[r]).AddTo(innerSum);
    220 
    221         if (classes[r].IsAlmost(classes[c])) {
    222           vector.OuterProduct(vector).Multiply(-val).AddTo(innerSum);
    223         }
    224       }
    225 
    226       func = -pi.Sum();
     229        vector.OuterProduct(vector).Multiply(-val * penalties[classes[r]][classes[c]]).AddTo(innerSum);
     230      }
    227231
    228232      r = 0;
    229       var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
     233      var newGrad = AMatrix.Multiply(2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
    230234      foreach (var g in newGrad) {
    231235        grad[r++] = g;
     
    238242      if (!Results.ContainsKey("Optimization")) {
    239243        qualities = new DataTable("Optimization");
    240         qualities.Rows.Add(new DataRow("Quality", string.Empty));
     244        qualities.Rows.Add(new DataRow("Penalty", string.Empty));
    241245        Results.Add(new Result("Optimization", qualities));
    242246      } else qualities = (DataTable)Results["Optimization"].Value;
    243       qualities.Rows["Quality"].Values.Add(-func / instances);
    244 
    245       if (!Results.ContainsKey("Quality")) {
    246         Results.Add(new Result("Quality", new DoubleValue(-func / instances)));
    247       } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances;
     247      qualities.Rows["Penalty"].Values.Add(func / instances);
     248
     249      if (!Results.ContainsKey("Penalty")) {
     250        Results.Add(new Result("Penalty", new DoubleValue(func / instances)));
     251      } else ((DoubleValue)Results["Penalty"].Value).Value = func / instances;
    248252    }
    249253
Note: See TracChangeset for help on using the changeset viewer.