Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.GP.StructureIdentification.Classification/3.3/MulticlassOneVsOneAnalyzer.cs @ 1875

Last change on this file since 1875 was 1796, checked in by gkronber, 16 years ago

Refactored GP evaluation to make it possible to use different evaluators to interpret function trees. #615 (Evaluation of HL3 function trees should be equivalent to evaluation in HL2)

File size: 7.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2008 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Text;
25using System.Xml;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.DataAnalysis;
29using HeuristicLab.GP.StructureIdentification;
30
31namespace HeuristicLab.GP.StructureIdentification.Classification {
32  public class MulticlassOneVsOneAnalyzer : OperatorBase {
33
34    private const string DATASET = "Dataset";
35    private const string TARGETVARIABLE = "TargetVariable";
36    private const string TARGETCLASSVALUES = "TargetClassValues";
37    private const string TRAININGSAMPLESSTART = "TrainingSamplesStart";
38    private const string TRAININGSAMPLESEND = "TrainingSamplesEnd";
39    private const string SAMPLESSTART = "SamplesStart";
40    private const string SAMPLESEND = "SamplesEnd";
41    private const string CLASSAVALUE = "ClassAValue";
42    private const string CLASSBVALUE = "ClassBValue";
43    private const string BESTMODELLSCOPE = "BestValidationSolution";
44    private const string BESTMODELL = "FunctionTree";
45    private const string VOTES = "Votes";
46    private const string ACCURACY = "Accuracy";
47
48    private const double EPSILON = 1E-6;
49    public override string Description {
50      get { return @"TASK"; }
51    }
52
53    public MulticlassOneVsOneAnalyzer()
54      : base() {
55      AddVariableInfo(new VariableInfo(DATASET, "The dataset to use", typeof(Dataset), VariableKind.In));
56      AddVariableInfo(new VariableInfo(TARGETVARIABLE, "Target variable", typeof(IntData), VariableKind.In));
57      AddVariableInfo(new VariableInfo(TARGETCLASSVALUES, "Class values of the target variable in the original dataset", typeof(ItemList<DoubleData>), VariableKind.In));
58      AddVariableInfo(new VariableInfo(CLASSAVALUE, "The original class value of the class A in the subscope", typeof(DoubleData), VariableKind.In));
59      AddVariableInfo(new VariableInfo(CLASSBVALUE, "The original class value of the class B in the subscope", typeof(DoubleData), VariableKind.In));
60      AddVariableInfo(new VariableInfo(TRAININGSAMPLESSTART, "The start of training samples in the original dataset", typeof(IntData), VariableKind.In));
61      AddVariableInfo(new VariableInfo(TRAININGSAMPLESEND, "The end of training samples in the original dataset", typeof(IntData), VariableKind.In));
62      AddVariableInfo(new VariableInfo(SAMPLESSTART, "The start of samples in the original dataset", typeof(IntData), VariableKind.In));
63      AddVariableInfo(new VariableInfo(SAMPLESEND, "The end of samples in the original dataset", typeof(IntData), VariableKind.In));
64      AddVariableInfo(new VariableInfo(BESTMODELLSCOPE, "The variable containing the scope of the model (incl. meta data)", typeof(IScope), VariableKind.In));
65      AddVariableInfo(new VariableInfo(BESTMODELL, "The variable in the scope of the model that contains the actual model", typeof(BakedFunctionTree), VariableKind.In));
66      AddVariableInfo(new VariableInfo(VOTES, "Array with the votes for each instance", typeof(IntMatrixData), VariableKind.New));
67      AddVariableInfo(new VariableInfo(ACCURACY, "Accuracy of the one-vs-one multi-cass classifier", typeof(DoubleData), VariableKind.New));
68    }
69
70    public override IOperation Apply(IScope scope) {
71      Dataset dataset = GetVariableValue<Dataset>(DATASET, scope, true);
72      int targetVariable = GetVariableValue<IntData>(TARGETVARIABLE, scope, true).Data;
73      int trainingSamplesStart = GetVariableValue<IntData>(TRAININGSAMPLESSTART, scope, true).Data;
74      int trainingSamplesEnd = GetVariableValue<IntData>(TRAININGSAMPLESEND, scope, true).Data;
75      int samplesStart = GetVariableValue<IntData>(SAMPLESSTART, scope, true).Data;
76      int samplesEnd = GetVariableValue<IntData>(SAMPLESEND, scope, true).Data;
77      ItemList<DoubleData> classValues = GetVariableValue<ItemList<DoubleData>>(TARGETCLASSVALUES, scope, true);
78      int[,] votes = new int[samplesEnd - samplesStart, classValues.Count];
79
80      foreach(IScope childScope in scope.SubScopes) {
81        double classAValue = GetVariableValue<DoubleData>(CLASSAVALUE, childScope, true).Data;
82        double classBValue = GetVariableValue<DoubleData>(CLASSBVALUE, childScope, true).Data;
83        IScope bestScope = GetVariableValue<IScope>(BESTMODELLSCOPE, childScope, true);
84        BakedFunctionTree functionTree = GetVariableValue<BakedFunctionTree>(BESTMODELL, bestScope, true);
85
86        BakedTreeEvaluator evaluator = new BakedTreeEvaluator();
87        evaluator.ResetEvaluator(dataset, targetVariable, trainingSamplesStart, trainingSamplesEnd, 1.0);
88
89        for(int i = 0; i < (samplesEnd - samplesStart); i++) {
90          double est = evaluator.Evaluate(functionTree, i + samplesStart);
91          if(est < 0.5) {
92            CastVote(votes, i, classAValue, classValues);
93          } else {
94            CastVote(votes, i, classBValue, classValues);
95          }
96        }
97      }
98
99      int correctlyClassified = 0;
100      for(int i = 0; i < (samplesEnd - samplesStart); i++) {
101        double originalClassValue = dataset.GetValue(i + samplesStart, targetVariable);
102        double estimatedClassValue = classValues[0].Data;
103        int maxVotes = votes[i, 0];
104        int sameVotes = 0;
105        for(int j = 1; j < classValues[j].Data; j++) {
106          if(votes[i, j] > maxVotes) {
107            maxVotes = votes[i, j];
108            estimatedClassValue = classValues[j].Data;
109            sameVotes = 0;
110          } else if(votes[i, j] == maxVotes) {
111            sameVotes++;
112          }
113        }
114        if(IsEqual(originalClassValue, estimatedClassValue) && sameVotes == 0) correctlyClassified++;
115      }
116
117      double accuracy = correctlyClassified / (double)(samplesEnd - samplesStart);
118
119      scope.AddVariable(new HeuristicLab.Core.Variable(scope.TranslateName(VOTES), new IntMatrixData(votes)));
120      scope.AddVariable(new HeuristicLab.Core.Variable(scope.TranslateName(ACCURACY), new DoubleData(accuracy)));
121      return null;
122    }
123
124    private void CastVote(int[,] votes, int sample, double votedClass, ItemList<DoubleData> classValues) {
125      for(int i = 0; i < classValues.Count; i++) {
126        if(IsEqual(classValues[i].Data, votedClass)) votes[sample, i]++;
127      }
128    }
129
130    private bool IsEqual(double x, double y) {
131      return Math.Abs(x - y) < EPSILON;
132    }
133  }
134}
Note: See TracBrowser for help on using the repository browser.