Free cookie consent management tool by TermsFeed Policy Generator

source: branches/CEDMA-Exporter-715/sources/HeuristicLab.GP.StructureIdentification.Classification/3.3/MulticlassOneVsOneAnalyzer.cs @ 3199

Last change on this file since 3199 was 2034, checked in by gkronber, 15 years ago

Implemented a first version of an operator to calculate variable impacts of models (generated by GP or SVM). #644 (Variable impact of CEDMA models should be calculated and stored in the result DB)

File size: 7.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2008 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Text;
25using System.Xml;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.DataAnalysis;
29using HeuristicLab.GP.StructureIdentification;
30
31namespace HeuristicLab.GP.StructureIdentification.Classification {
32  public class MulticlassOneVsOneAnalyzer : OperatorBase {
33
34    private const string DATASET = "Dataset";
35    private const string TARGETVARIABLE = "TargetVariable";
36    private const string TARGETCLASSVALUES = "TargetClassValues";
37    private const string TRAININGSAMPLESSTART = "TrainingSamplesStart";
38    private const string TRAININGSAMPLESEND = "TrainingSamplesEnd";
39    private const string SAMPLESSTART = "SamplesStart";
40    private const string SAMPLESEND = "SamplesEnd";
41    private const string CLASSAVALUE = "ClassAValue";
42    private const string CLASSBVALUE = "ClassBValue";
43    private const string BESTMODELLSCOPE = "BestValidationSolution";
44    private const string BESTMODELL = "FunctionTree";
45    private const string VOTES = "Votes";
46    private const string ACCURACY = "Accuracy";
47
48    private const double EPSILON = 1E-6;
49    public override string Description {
50      get { return @"TASK"; }
51    }
52
53    public MulticlassOneVsOneAnalyzer()
54      : base() {
55      AddVariableInfo(new VariableInfo(DATASET, "The dataset to use", typeof(Dataset), VariableKind.In));
56      AddVariableInfo(new VariableInfo(TARGETVARIABLE, "Target variable", typeof(IntData), VariableKind.In));
57      AddVariableInfo(new VariableInfo(TARGETCLASSVALUES, "Class values of the target variable in the original dataset", typeof(ItemList<DoubleData>), VariableKind.In));
58      AddVariableInfo(new VariableInfo(CLASSAVALUE, "The original class value of the class A in the subscope", typeof(DoubleData), VariableKind.In));
59      AddVariableInfo(new VariableInfo(CLASSBVALUE, "The original class value of the class B in the subscope", typeof(DoubleData), VariableKind.In));
60      AddVariableInfo(new VariableInfo(TRAININGSAMPLESSTART, "The start of training samples in the original dataset", typeof(IntData), VariableKind.In));
61      AddVariableInfo(new VariableInfo(TRAININGSAMPLESEND, "The end of training samples in the original dataset", typeof(IntData), VariableKind.In));
62      AddVariableInfo(new VariableInfo(SAMPLESSTART, "The start of samples in the original dataset", typeof(IntData), VariableKind.In));
63      AddVariableInfo(new VariableInfo(SAMPLESEND, "The end of samples in the original dataset", typeof(IntData), VariableKind.In));
64      AddVariableInfo(new VariableInfo(BESTMODELLSCOPE, "The variable containing the scope of the model (incl. meta data)", typeof(IScope), VariableKind.In));
65      AddVariableInfo(new VariableInfo(BESTMODELL, "The variable in the scope of the model that contains the actual model", typeof(BakedFunctionTree), VariableKind.In));
66      AddVariableInfo(new VariableInfo(VOTES, "Array with the votes for each instance", typeof(IntMatrixData), VariableKind.New));
67      AddVariableInfo(new VariableInfo(ACCURACY, "Accuracy of the one-vs-one multi-cass classifier", typeof(DoubleData), VariableKind.New));
68    }
69
70    public override IOperation Apply(IScope scope) {
71      Dataset dataset = GetVariableValue<Dataset>(DATASET, scope, true);
72      int targetVariable = GetVariableValue<IntData>(TARGETVARIABLE, scope, true).Data;
73      int trainingSamplesStart = GetVariableValue<IntData>(TRAININGSAMPLESSTART, scope, true).Data;
74      int trainingSamplesEnd = GetVariableValue<IntData>(TRAININGSAMPLESEND, scope, true).Data;
75      int samplesStart = GetVariableValue<IntData>(SAMPLESSTART, scope, true).Data;
76      int samplesEnd = GetVariableValue<IntData>(SAMPLESEND, scope, true).Data;
77      ItemList<DoubleData> classValues = GetVariableValue<ItemList<DoubleData>>(TARGETCLASSVALUES, scope, true);
78      int[,] votes = new int[samplesEnd - samplesStart, classValues.Count];
79
80      foreach(IScope childScope in scope.SubScopes) {
81        double classAValue = GetVariableValue<DoubleData>(CLASSAVALUE, childScope, true).Data;
82        double classBValue = GetVariableValue<DoubleData>(CLASSBVALUE, childScope, true).Data;
83        IScope bestScope = GetVariableValue<IScope>(BESTMODELLSCOPE, childScope, true);
84        BakedFunctionTree functionTree = GetVariableValue<BakedFunctionTree>(BESTMODELL, bestScope, true);
85
86        BakedTreeEvaluator evaluator = new BakedTreeEvaluator();
87        evaluator.PrepareForEvaluation(dataset, targetVariable, trainingSamplesStart, trainingSamplesEnd, 1.0, functionTree);
88        for(int i = 0; i < (samplesEnd - samplesStart); i++) {
89          double est = evaluator.Evaluate(i + samplesStart);
90          if(est < 0.5) {
91            CastVote(votes, i, classAValue, classValues);
92          } else {
93            CastVote(votes, i, classBValue, classValues);
94          }
95        }
96      }
97
98      int correctlyClassified = 0;
99      for(int i = 0; i < (samplesEnd - samplesStart); i++) {
100        double originalClassValue = dataset.GetValue(i + samplesStart, targetVariable);
101        double estimatedClassValue = classValues[0].Data;
102        int maxVotes = votes[i, 0];
103        int sameVotes = 0;
104        for(int j = 1; j < classValues[j].Data; j++) {
105          if(votes[i, j] > maxVotes) {
106            maxVotes = votes[i, j];
107            estimatedClassValue = classValues[j].Data;
108            sameVotes = 0;
109          } else if(votes[i, j] == maxVotes) {
110            sameVotes++;
111          }
112        }
113        if(IsEqual(originalClassValue, estimatedClassValue) && sameVotes == 0) correctlyClassified++;
114      }
115
116      double accuracy = correctlyClassified / (double)(samplesEnd - samplesStart);
117
118      scope.AddVariable(new HeuristicLab.Core.Variable(scope.TranslateName(VOTES), new IntMatrixData(votes)));
119      scope.AddVariable(new HeuristicLab.Core.Variable(scope.TranslateName(ACCURACY), new DoubleData(accuracy)));
120      return null;
121    }
122
123    private void CastVote(int[,] votes, int sample, double votedClass, ItemList<DoubleData> classValues) {
124      for(int i = 0; i < classValues.Count; i++) {
125        if(IsEqual(classValues[i].Data, votedClass)) votes[sample, i]++;
126      }
127    }
128
129    private bool IsEqual(double x, double y) {
130      return Math.Abs(x - y) < EPSILON;
131    }
132  }
133}
Note: See TracBrowser for help on using the repository browser.