Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ClassificationPerformanceMeasures.cs @ 13801

Last change on this file since 13801 was 13801, checked in by mkommend, 9 years ago

#2601: Reset classification performance measures to double.NaN before calculating the new ones.

File size: 12.9 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using HeuristicLab.Common;
24using HeuristicLab.Data;
25using HeuristicLab.Optimization;
26using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
27
28namespace HeuristicLab.Problems.DataAnalysis {
29  [StorableClass]
30  public class ClassificationPerformanceMeasuresResultCollection : ResultCollection {
31    #region result names
32    protected const string ClassificationPositiveClassNameResultName = "Classification positive class";
33    protected const string TrainingTruePositiveRateResultName = "True positive rate (training)";
34    protected const string TrainingTrueNegativeRateResultName = "True negative rate (training)";
35    protected const string TrainingPositivePredictiveValueResultName = "Positive predictive value (training)";
36    protected const string TrainingNegativePredictiveValueResultName = "Negative predictive value (training)";
37    protected const string TrainingFalsePositiveRateResultName = "False positive rate (training)";
38    protected const string TrainingFalseDiscoveryRateResultName = "False discovery rate (training)";
39    protected const string TrainingF1ScoreResultName = "F1 score (training)";
40    protected const string TrainingMatthewsCorrelationResultName = "Matthews Correlation (training)";
41    protected const string TestTruePositiveRateResultName = "True positive rate (test)";
42    protected const string TestTrueNegativeRateResultName = "True negative rate (test)";
43    protected const string TestPositivePredictiveValueResultName = "Positive predictive value (test)";
44    protected const string TestNegativePredictiveValueResultName = "Negative predictive value (test)";
45    protected const string TestFalsePositiveRateResultName = "False positive rate (test)";
46    protected const string TestFalseDiscoveryRateResultName = "False discovery rate (test)";
47    protected const string TestF1ScoreResultName = "F1 score (test)";
48    protected const string TestMatthewsCorrelationResultName = "Matthews Correlation (test)";
49    #endregion
50
51    public ClassificationPerformanceMeasuresResultCollection()
52      : base() {
53      AddMeasures();
54    }
55    [StorableConstructor]
56    protected ClassificationPerformanceMeasuresResultCollection(bool deserializing)
57      : base(deserializing) {
58    }
59
60    protected ClassificationPerformanceMeasuresResultCollection(ClassificationPerformanceMeasuresResultCollection original, Cloner cloner)
61      : base(original, cloner) { }
62    public override IDeepCloneable Clone(Cloner cloner) {
63      return new ClassificationPerformanceMeasuresResultCollection(this, cloner);
64    }
65
66    #region result properties
67    public string ClassificationPositiveClassName {
68      get { return ((StringValue)this[ClassificationPositiveClassNameResultName].Value).Value; }
69      set { ((StringValue)this[ClassificationPositiveClassNameResultName].Value).Value = value; }
70    }
71    public double TrainingTruePositiveRate {
72      get { return ((DoubleValue)this[TrainingTruePositiveRateResultName].Value).Value; }
73      set { ((DoubleValue)this[TrainingTruePositiveRateResultName].Value).Value = value; }
74    }
75    public double TrainingTrueNegativeRate {
76      get { return ((DoubleValue)this[TrainingTrueNegativeRateResultName].Value).Value; }
77      set { ((DoubleValue)this[TrainingTrueNegativeRateResultName].Value).Value = value; }
78    }
79    public double TrainingPositivePredictiveValue {
80      get { return ((DoubleValue)this[TrainingPositivePredictiveValueResultName].Value).Value; }
81      set { ((DoubleValue)this[TrainingPositivePredictiveValueResultName].Value).Value = value; }
82    }
83    public double TrainingNegativePredictiveValue {
84      get { return ((DoubleValue)this[TrainingNegativePredictiveValueResultName].Value).Value; }
85      set { ((DoubleValue)this[TrainingNegativePredictiveValueResultName].Value).Value = value; }
86    }
87    public double TrainingFalsePositiveRate {
88      get { return ((DoubleValue)this[TrainingFalsePositiveRateResultName].Value).Value; }
89      set { ((DoubleValue)this[TrainingFalsePositiveRateResultName].Value).Value = value; }
90    }
91    public double TrainingFalseDiscoveryRate {
92      get { return ((DoubleValue)this[TrainingFalseDiscoveryRateResultName].Value).Value; }
93      set { ((DoubleValue)this[TrainingFalseDiscoveryRateResultName].Value).Value = value; }
94    }
95    public double TrainingF1Score {
96      get { return ((DoubleValue)this[TrainingF1ScoreResultName].Value).Value; }
97      set { ((DoubleValue)this[TrainingF1ScoreResultName].Value).Value = value; }
98    }
99    public double TrainingMatthewsCorrelation {
100      get { return ((DoubleValue)this[TrainingMatthewsCorrelationResultName].Value).Value; }
101      set { ((DoubleValue)this[TrainingMatthewsCorrelationResultName].Value).Value = value; }
102    }
103    public double TestTruePositiveRate {
104      get { return ((DoubleValue)this[TestTruePositiveRateResultName].Value).Value; }
105      set { ((DoubleValue)this[TestTruePositiveRateResultName].Value).Value = value; }
106    }
107    public double TestTrueNegativeRate {
108      get { return ((DoubleValue)this[TestTrueNegativeRateResultName].Value).Value; }
109      set { ((DoubleValue)this[TestTrueNegativeRateResultName].Value).Value = value; }
110    }
111    public double TestPositivePredictiveValue {
112      get { return ((DoubleValue)this[TestPositivePredictiveValueResultName].Value).Value; }
113      set { ((DoubleValue)this[TestPositivePredictiveValueResultName].Value).Value = value; }
114    }
115    public double TestNegativePredictiveValue {
116      get { return ((DoubleValue)this[TestNegativePredictiveValueResultName].Value).Value; }
117      set { ((DoubleValue)this[TestNegativePredictiveValueResultName].Value).Value = value; }
118    }
119    public double TestFalsePositiveRate {
120      get { return ((DoubleValue)this[TestFalsePositiveRateResultName].Value).Value; }
121      set { ((DoubleValue)this[TestFalsePositiveRateResultName].Value).Value = value; }
122    }
123    public double TestFalseDiscoveryRate {
124      get { return ((DoubleValue)this[TestFalseDiscoveryRateResultName].Value).Value; }
125      set { ((DoubleValue)this[TestFalseDiscoveryRateResultName].Value).Value = value; }
126    }
127    public double TestF1Score {
128      get { return ((DoubleValue)this[TestF1ScoreResultName].Value).Value; }
129      set { ((DoubleValue)this[TestF1ScoreResultName].Value).Value = value; }
130    }
131    public double TestMatthewsCorrelation {
132      get { return ((DoubleValue)this[TestMatthewsCorrelationResultName].Value).Value; }
133      set { ((DoubleValue)this[TestMatthewsCorrelationResultName].Value).Value = value; }
134    }
135    #endregion
136
137    protected void AddMeasures() {
138      Add(new Result(ClassificationPositiveClassNameResultName, "The positive class which is used for the performance measure calculations.", new StringValue()));
139      Add(new Result(TrainingTruePositiveRateResultName, "Sensitivity/True positive rate of the model on the training partition\n(TP/(TP+FN)).", new PercentValue()));
140      Add(new Result(TrainingTrueNegativeRateResultName, "Specificity/True negative rate of the model on the training partition\n(TN/(FP+TN)).", new PercentValue()));
141      Add(new Result(TrainingPositivePredictiveValueResultName, "Precision/Positive predictive value of the model on the training partition\n(TP/(TP+FP)).", new PercentValue()));
142      Add(new Result(TrainingNegativePredictiveValueResultName, "Negative predictive value of the model on the training partition\n(TN/(TN+FN)).", new PercentValue()));
143      Add(new Result(TrainingFalsePositiveRateResultName, "The false positive rate is the complement of the true negative rate of the model on the training partition.", new PercentValue()));
144      Add(new Result(TrainingFalseDiscoveryRateResultName, "The false discovery rate is the complement of the positive predictive value of the model on the training partition.", new PercentValue()));
145      Add(new Result(TrainingF1ScoreResultName, "The F1 score of the model on the training partition.", new DoubleValue()));
146      Add(new Result(TrainingMatthewsCorrelationResultName, "The Matthews correlation value of the model on the training partition.", new DoubleValue()));
147      Add(new Result(TestTruePositiveRateResultName, "Sensitivity/True positive rate of the model on the test partition\n(TP/(TP+FN)).", new PercentValue()));
148      Add(new Result(TestTrueNegativeRateResultName, "Specificity/True negative rate of the model on the test partition\n(TN/(FP+TN)).", new PercentValue()));
149      Add(new Result(TestPositivePredictiveValueResultName, "Precision/Positive predictive value of the model on the test partition\n(TP/(TP+FP)).", new PercentValue()));
150      Add(new Result(TestNegativePredictiveValueResultName, "Negative predictive value of the model on the test partition\n(TN/(TN+FN)).", new PercentValue()));
151      Add(new Result(TestFalsePositiveRateResultName, "The false positive rate is the complement of the true negative rate of the model on the test partition.", new PercentValue()));
152      Add(new Result(TestFalseDiscoveryRateResultName, "The false discovery rate is the complement of the positive predictive value of the model on the test partition.", new PercentValue()));
153      Add(new Result(TestF1ScoreResultName, "The F1 score of the model on the test partition.", new DoubleValue()));
154      Add(new Result(TestMatthewsCorrelationResultName, "The Matthews correlation value of the model on the test partition.", new DoubleValue()));
155
156      Reset();
157    }
158
159
160    public void Reset() {
161      TrainingTruePositiveRate = double.NaN;
162      TrainingTrueNegativeRate = double.NaN;
163      TrainingPositivePredictiveValue = double.NaN;
164      TrainingNegativePredictiveValue = double.NaN;
165      TrainingFalsePositiveRate = double.NaN;
166      TrainingFalseDiscoveryRate = double.NaN;
167      TrainingF1Score = double.NaN;
168      TrainingMatthewsCorrelation = double.NaN;
169      TestTruePositiveRate = double.NaN;
170      TestTrueNegativeRate = double.NaN;
171      TestPositivePredictiveValue = double.NaN;
172      TestNegativePredictiveValue = double.NaN;
173      TestFalsePositiveRate = double.NaN;
174      TestFalseDiscoveryRate = double.NaN;
175      TestF1Score = double.NaN;
176      TestMatthewsCorrelation = double.NaN;
177    }
178
179    public void SetTrainingResults(ClassificationPerformanceMeasuresCalculator trainingPerformanceCalculator) {
180      if (!string.IsNullOrWhiteSpace(ClassificationPositiveClassName)
181              && !ClassificationPositiveClassName.Equals(trainingPerformanceCalculator.PositiveClassName))
182        throw new ArgumentException("Classification positive class of the training data doesn't match with the data of test partition.");
183      ClassificationPositiveClassName = trainingPerformanceCalculator.PositiveClassName;
184      TrainingTruePositiveRate = trainingPerformanceCalculator.TruePositiveRate;
185      TrainingTrueNegativeRate = trainingPerformanceCalculator.TrueNegativeRate;
186      TrainingPositivePredictiveValue = trainingPerformanceCalculator.PositivePredictiveValue;
187      TrainingNegativePredictiveValue = trainingPerformanceCalculator.NegativePredictiveValue;
188      TrainingFalsePositiveRate = trainingPerformanceCalculator.FalsePositiveRate;
189      TrainingFalseDiscoveryRate = trainingPerformanceCalculator.FalseDiscoveryRate;
190    }
191
192    public void SetTestResults(ClassificationPerformanceMeasuresCalculator testPerformanceCalculator) {
193      if (!string.IsNullOrWhiteSpace(ClassificationPositiveClassName)
194                && !ClassificationPositiveClassName.Equals(testPerformanceCalculator.PositiveClassName))
195        throw new ArgumentException("Classification positive class of the test data doesn't match with the data of training partition.");
196      ClassificationPositiveClassName = testPerformanceCalculator.PositiveClassName;
197      TestTruePositiveRate = testPerformanceCalculator.TruePositiveRate;
198      TestTrueNegativeRate = testPerformanceCalculator.TrueNegativeRate;
199      TestPositivePredictiveValue = testPerformanceCalculator.PositivePredictiveValue;
200      TestNegativePredictiveValue = testPerformanceCalculator.NegativePredictiveValue;
201      TestFalsePositiveRate = testPerformanceCalculator.FalsePositiveRate;
202      TestFalseDiscoveryRate = testPerformanceCalculator.FalseDiscoveryRate;
203    }
204  }
205}
Note: See TracBrowser for help on using the repository browser.