Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.GP.StructureIdentification.Classification/3.3/MulticlassOneVsOneAnalyzer.cs @ 2415

Last change on this file since 2415 was 2328, checked in by gkronber, 15 years ago

this is the remaining part of changeset r2327.
Applied changes in modeling plugins that are necessary for the new model analyzer (#722)

  • predictor has properties for the lower and upper limit of the predicted value
  • added views for predictors that show the limits (also added a new view for GeneticProgrammingModel that shows the size and height of the model)
  • Reintroduced TreeEvaluatorInjectors that read a PunishmentFactor and calculate the lower and upper limits for estimated values (limits are set in the tree evaluators)
  • Added operators to create Predictors. Changed modeling algorithms to use the predictors for the calculation of final model qualities and variable impacts (to be compatible with the new model analyzer the predictors use a very large PunishmentFactor)
  • replaced all private implementations of double.IsAlmost and use HL.Commons instead (see #733 r2324)
  • Implemented operator SolutionExtractor and moved BestSolutionStorer from HL.Logging to HL.Modeling (fixes #734)
File size: 6.7 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2008 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using HeuristicLab.Core;
24using HeuristicLab.Data;
25using HeuristicLab.DataAnalysis;
26using HeuristicLab.GP.Interfaces;
27using HeuristicLab.Common;
28
29namespace HeuristicLab.GP.StructureIdentification.Classification {
30  public class MulticlassOneVsOneAnalyzer : OperatorBase {
31
32    private const string DATASET = "Dataset";
33    private const string TARGETVARIABLE = "TargetVariable";
34    private const string TARGETCLASSVALUES = "TargetClassValues";
35    private const string TRAININGSAMPLESSTART = "TrainingSamplesStart";
36    private const string TRAININGSAMPLESEND = "TrainingSamplesEnd";
37    private const string SAMPLESSTART = "SamplesStart";
38    private const string SAMPLESEND = "SamplesEnd";
39    private const string CLASSAVALUE = "ClassAValue";
40    private const string CLASSBVALUE = "ClassBValue";
41    private const string BESTMODELLSCOPE = "BestValidationSolution";
42    private const string BESTMODELL = "FunctionTree";
43    private const string VOTES = "Votes";
44    private const string ACCURACY = "Accuracy";
45    private const string TREEEVALUATOR = "TreeEvaluator";
46
47    public override string Description {
48      get { return @"TASK"; }
49    }
50
51    public MulticlassOneVsOneAnalyzer()
52      : base() {
53      AddVariableInfo(new VariableInfo(DATASET, "The dataset to use", typeof(Dataset), VariableKind.In));
54      AddVariableInfo(new VariableInfo(TARGETVARIABLE, "Target variable", typeof(IntData), VariableKind.In));
55      AddVariableInfo(new VariableInfo(TARGETCLASSVALUES, "Class values of the target variable in the original dataset", typeof(ItemList<DoubleData>), VariableKind.In));
56      AddVariableInfo(new VariableInfo(CLASSAVALUE, "The original class value of the class A in the subscope", typeof(DoubleData), VariableKind.In));
57      AddVariableInfo(new VariableInfo(CLASSBVALUE, "The original class value of the class B in the subscope", typeof(DoubleData), VariableKind.In));
58      AddVariableInfo(new VariableInfo(SAMPLESSTART, "The start of samples in the original dataset", typeof(IntData), VariableKind.In));
59      AddVariableInfo(new VariableInfo(SAMPLESEND, "The end of samples in the original dataset", typeof(IntData), VariableKind.In));
60      AddVariableInfo(new VariableInfo(BESTMODELLSCOPE, "The variable containing the scope of the model (incl. meta data)", typeof(IScope), VariableKind.In));
61      AddVariableInfo(new VariableInfo(BESTMODELL, "The variable in the scope of the model that contains the actual model", typeof(IGeneticProgrammingModel), VariableKind.In));
62      AddVariableInfo(new VariableInfo(TREEEVALUATOR, "The evaluator to apply to the function tree", typeof(ITreeEvaluator), VariableKind.In));
63      AddVariableInfo(new VariableInfo(VOTES, "Array with the votes for each instance", typeof(IntMatrixData), VariableKind.New));
64      AddVariableInfo(new VariableInfo(ACCURACY, "Accuracy of the one-vs-one multi-cass classifier", typeof(DoubleData), VariableKind.New));
65    }
66
67    public override IOperation Apply(IScope scope) {
68      Dataset dataset = GetVariableValue<Dataset>(DATASET, scope, true);
69      int targetVariable = GetVariableValue<IntData>(TARGETVARIABLE, scope, true).Data;
70      int trainingSamplesStart = GetVariableValue<IntData>(TRAININGSAMPLESSTART, scope, true).Data;
71      int trainingSamplesEnd = GetVariableValue<IntData>(TRAININGSAMPLESEND, scope, true).Data;
72      int samplesStart = GetVariableValue<IntData>(SAMPLESSTART, scope, true).Data;
73      int samplesEnd = GetVariableValue<IntData>(SAMPLESEND, scope, true).Data;
74      ItemList<DoubleData> classValues = GetVariableValue<ItemList<DoubleData>>(TARGETCLASSVALUES, scope, true);
75      int[,] votes = new int[samplesEnd - samplesStart, classValues.Count];
76
77      foreach(IScope childScope in scope.SubScopes) {
78        double classAValue = GetVariableValue<DoubleData>(CLASSAVALUE, childScope, true).Data;
79        double classBValue = GetVariableValue<DoubleData>(CLASSBVALUE, childScope, true).Data;
80        IScope bestScope = GetVariableValue<IScope>(BESTMODELLSCOPE, childScope, true);
81        IGeneticProgrammingModel gpModel = GetVariableValue<IGeneticProgrammingModel>(BESTMODELL, bestScope, true);
82
83        ITreeEvaluator evaluator = GetVariableValue<ITreeEvaluator>(TREEEVALUATOR, bestScope, true);
84        evaluator.PrepareForEvaluation(dataset, gpModel.FunctionTree);
85        for(int i = 0; i < (samplesEnd - samplesStart); i++) {
86          double est = evaluator.Evaluate(i + samplesStart);
87          if(est < 0.5) {
88            CastVote(votes, i, classAValue, classValues);
89          } else {
90            CastVote(votes, i, classBValue, classValues);
91          }
92        }
93      }
94
95      int correctlyClassified = 0;
96      for(int i = 0; i < (samplesEnd - samplesStart); i++) {
97        double originalClassValue = dataset.GetValue(i + samplesStart, targetVariable);
98        double estimatedClassValue = classValues[0].Data;
99        int maxVotes = votes[i, 0];
100        int sameVotes = 0;
101        for(int j = 1; j < classValues[j].Data; j++) {
102          if(votes[i, j] > maxVotes) {
103            maxVotes = votes[i, j];
104            estimatedClassValue = classValues[j].Data;
105            sameVotes = 0;
106          } else if(votes[i, j] == maxVotes) {
107            sameVotes++;
108          }
109        }
110        if(originalClassValue.IsAlmost(estimatedClassValue) && sameVotes == 0) correctlyClassified++;
111      }
112
113      double accuracy = correctlyClassified / (double)(samplesEnd - samplesStart);
114
115      scope.AddVariable(new HeuristicLab.Core.Variable(scope.TranslateName(VOTES), new IntMatrixData(votes)));
116      scope.AddVariable(new HeuristicLab.Core.Variable(scope.TranslateName(ACCURACY), new DoubleData(accuracy)));
117      return null;
118    }
119
120    private void CastVote(int[,] votes, int sample, double votedClass, ItemList<DoubleData> classValues) {
121      for(int i = 0; i < classValues.Count; i++) {
122        if(classValues[i].Data.IsAlmost(votedClass)) votes[sample, i]++;
123      }
124    }
125  }
126}
Note: See TracBrowser for help on using the repository browser.