- Timestamp:
- 11/02/15 22:32:59 (9 years ago)
- Location:
- trunk/sources/HeuristicLab.Problems.DataAnalysis
- Files:
-
- 8 edited
- 2 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Problems.DataAnalysis
- Property svn:mergeinfo changed
/branches/ClassificationModelComparison/HeuristicLab.Problems.DataAnalysis (added) merged: 9073-9074,9119,10553,10556,13081,13083,13086,13089,13091,13097-13099
- Property svn:mergeinfo changed
-
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ClassificationPerformanceMeasures.cs
r12012 r13100 37 37 protected const string TrainingFalsePositiveRateResultName = "False positive rate (training)"; 38 38 protected const string TrainingFalseDiscoveryRateResultName = "False discovery rate (training)"; 39 protected const string TrainingF1ScoreResultName = "F1 score (training)"; 40 protected const string TrainingMatthewsCorrelationResultName = "Matthews Correlation (training)"; 39 41 protected const string TestTruePositiveRateResultName = "True positive rate (test)"; 40 42 protected const string TestTrueNegativeRateResultName = "True negative rate (test)"; … … 43 45 protected const string TestFalsePositiveRateResultName = "False positive rate (test)"; 44 46 protected const string TestFalseDiscoveryRateResultName = "False discovery rate (test)"; 47 protected const string TestF1ScoreResultName = "F1 score (test)"; 48 protected const string TestMatthewsCorrelationResultName = "Matthews Correlation (test)"; 45 49 #endregion 46 50 … … 89 93 set { ((DoubleValue)this[TrainingFalseDiscoveryRateResultName].Value).Value = value; } 90 94 } 95 public double TrainingF1Score { 96 get { return ((DoubleValue)this[TrainingF1ScoreResultName].Value).Value; } 97 set { ((DoubleValue)this[TrainingF1ScoreResultName].Value).Value = value; } 98 } 99 public double TrainingMatthewsCorrelation { 100 get { return ((DoubleValue)this[TrainingMatthewsCorrelationResultName].Value).Value; } 101 set { ((DoubleValue)this[TrainingMatthewsCorrelationResultName].Value).Value = value; } 102 } 91 103 public double TestTruePositiveRate { 92 104 get { return ((DoubleValue)this[TestTruePositiveRateResultName].Value).Value; } … … 113 125 set { ((DoubleValue)this[TestFalseDiscoveryRateResultName].Value).Value = value; } 114 126 } 127 public double TestF1Score { 128 get { return ((DoubleValue)this[TestF1ScoreResultName].Value).Value; } 129 set { ((DoubleValue)this[TestF1ScoreResultName].Value).Value = value; } 130 } 131 public double TestMatthewsCorrelation { 132 get { return ((DoubleValue)this[TestMatthewsCorrelationResultName].Value).Value; } 133 set { ((DoubleValue)this[TestMatthewsCorrelationResultName].Value).Value = value; } 134 } 115 135 #endregion 116 136 … … 123 143 Add(new Result(TrainingFalsePositiveRateResultName, "The false positive rate is the complement of the true negative rate of the model on the training partition.", new PercentValue())); 124 144 Add(new Result(TrainingFalseDiscoveryRateResultName, "The false discovery rate is the complement of the positive predictive value of the model on the training partition.", new PercentValue())); 145 Add(new Result(TrainingF1ScoreResultName, "The F1 score of the model on the training partition.", new DoubleValue())); 146 Add(new Result(TrainingMatthewsCorrelationResultName, "The Matthews correlation value of the model on the training partition.", new DoubleValue())); 125 147 Add(new Result(TestTruePositiveRateResultName, "Sensitivity/True positive rate of the model on the test partition\n(TP/(TP+FN)).", new PercentValue())); 126 148 Add(new Result(TestTrueNegativeRateResultName, "Specificity/True negative rate of the model on the test partition\n(TN/(FP+TN)).", new PercentValue())); … … 129 151 Add(new Result(TestFalsePositiveRateResultName, "The false positive rate is the complement of the true negative rate of the model on the test partition.", new PercentValue())); 130 152 Add(new Result(TestFalseDiscoveryRateResultName, "The false discovery rate is the complement of the positive predictive value of the model on the test partition.", new PercentValue())); 153 Add(new Result(TestF1ScoreResultName, "The F1 score of the model on the test partition.", new DoubleValue())); 154 Add(new Result(TrainingMatthewsCorrelationResultName, "The Matthews correlation value of the model on the test partition.", new DoubleValue())); 131 155 TrainingTruePositiveRate = double.NaN; 132 156 TrainingTrueNegativeRate = double.NaN; … … 135 159 TrainingFalsePositiveRate = double.NaN; 136 160 TrainingFalseDiscoveryRate = double.NaN; 161 TrainingF1Score = double.NaN; 162 TrainingMatthewsCorrelation = double.NaN; 137 163 TestTruePositiveRate = double.NaN; 138 164 TestTrueNegativeRate = double.NaN; … … 141 167 TestFalsePositiveRate = double.NaN; 142 168 TestFalseDiscoveryRate = double.NaN; 169 TestF1Score = double.NaN; 170 TestMatthewsCorrelation = double.NaN; 143 171 } 144 172 -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ClassificationSolutionBase.cs
r12012 r13100 26 26 using HeuristicLab.Optimization; 27 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 using HeuristicLab.Problems.DataAnalysis.OnlineCalculators; 28 29 29 30 namespace HeuristicLab.Problems.DataAnalysis { … … 135 136 if (testPerformanceCalculator.ErrorState == OnlineCalculatorError.None) 136 137 ClassificationPerformanceMeasures.SetTestResults(testPerformanceCalculator); 138 139 var f1Training = FOneScoreCalculator.Calculate(originalTrainingClassValues, estimatedTrainingClassValues, out errorState); 140 if (errorState == OnlineCalculatorError.None) ClassificationPerformanceMeasures.TrainingF1Score = f1Training; 141 var f1Test = FOneScoreCalculator.Calculate(originalTestClassValues, estimatedTestClassValues, out errorState); 142 if (errorState == OnlineCalculatorError.None) ClassificationPerformanceMeasures.TestF1Score = f1Test; 143 144 145 var mccTraining = MatthewsCorrelationCoefficientCalculator.Calculate(originalTrainingClassValues, estimatedTrainingClassValues, out errorState); 146 if (errorState == OnlineCalculatorError.None) ClassificationPerformanceMeasures.TrainingMatthewsCorrelation = mccTraining; 147 var mccTest = MatthewsCorrelationCoefficientCalculator.Calculate(originalTestClassValues, estimatedTestClassValues, out errorState); 148 if (errorState == OnlineCalculatorError.None) ClassificationPerformanceMeasures.TestMatthewsCorrelation = mccTest; 137 149 } 138 150 -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/ConstantRegressionModel.cs
r13001 r13100 31 31 [StorableClass] 32 32 [Item("Constant Regression Model", "A model that always returns the same constant value regardless of the presented input data.")] 33 [Obsolete] 33 34 public class ConstantRegressionModel : NamedItem, IRegressionModel, IStringConvertibleValue { 34 35 [Storable] … … 60 61 61 62 public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 62 return new ConstantRegressionSolution( this, new RegressionProblemData(problemData));63 return new ConstantRegressionSolution(new ConstantModel(constant), new RegressionProblemData(problemData)); 63 64 } 64 65 -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/ConstantRegressionSolution.cs
r12012 r13100 28 28 [Item(Name = "Constant Regression Solution", Description = "Represents a constant regression solution (model + data).")] 29 29 public class ConstantRegressionSolution : RegressionSolution { 30 public new Constant RegressionModel Model {31 get { return (Constant RegressionModel)base.Model; }30 public new ConstantModel Model { 31 get { return (ConstantModel)base.Model; } 32 32 set { base.Model = value; } 33 33 } … … 36 36 protected ConstantRegressionSolution(bool deserializing) : base(deserializing) { } 37 37 protected ConstantRegressionSolution(ConstantRegressionSolution original, Cloner cloner) : base(original, cloner) { } 38 public ConstantRegressionSolution(Constant RegressionModel model, IRegressionProblemData problemData)38 public ConstantRegressionSolution(ConstantModel model, IRegressionProblemData problemData) 39 39 : base(model, problemData) { 40 40 RecalculateResults(); -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/Models/ConstantTimeSeriesPrognosisModel.cs
r12509 r13100 20 20 #endregion 21 21 22 using System; 22 23 using System.Collections.Generic; 23 24 using System.Linq; … … 29 30 [StorableClass] 30 31 [Item("Constant TimeSeries Model", "A time series model that returns for all prediciton the same constant value.")] 32 [Obsolete] 31 33 public class ConstantTimeSeriesPrognosisModel : ConstantRegressionModel, ITimeSeriesPrognosisModel { 32 34 [StorableConstructor] -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisResults.cs
r12641 r13100 373 373 //mean model 374 374 double trainingMean = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).Average(); 375 var meanModel = new Constant TimeSeriesPrognosisModel(trainingMean);375 var meanModel = new ConstantModel(trainingMean); 376 376 377 377 //AR1 model … … 448 448 //mean model 449 449 double trainingMean = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).Average(); 450 var meanModel = new Constant TimeSeriesPrognosisModel(trainingMean);450 var meanModel = new ConstantModel(trainingMean); 451 451 452 452 //AR1 model -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisSolutionBase.cs
r12012 r13100 150 150 OnlineCalculatorError errorState; 151 151 double trainingMean = ProblemData.TrainingIndices.Any() ? ProblemData.Dataset.GetDoubleValues(ProblemData.TargetVariable, ProblemData.TrainingIndices).Average() : double.NaN; 152 var meanModel = new Constant TimeSeriesPrognosisModel(trainingMean);152 var meanModel = new ConstantModel(trainingMean); 153 153 154 154 double alpha, beta;
Note: See TracChangeset
for help on using the changeset viewer.