Changeset 5942 for trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/RegressionSolution.cs
- Timestamp:
- 04/04/11 15:38:16 (13 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/RegressionSolution.cs
r5894 r5942 40 40 private const string TrainingRelativeErrorResultName = "Average relative error (training)"; 41 41 private const string TestRelativeErrorResultName = "Average relative error (test)"; 42 private const string TrainingNormalizedMeanSquaredErrorResultName = "Normalized mean squared error (training)"; 43 private const string TestNormalizedMeanSquaredErrorResultName = "Normalized mean squared error (test)"; 42 44 43 45 public new IRegressionModel Model { … … 81 83 } 82 84 85 public double TrainingNormalizedMeanSquaredError { 86 get { return ((DoubleValue)this[TrainingNormalizedMeanSquaredErrorResultName].Value).Value; } 87 private set { ((DoubleValue)this[TrainingNormalizedMeanSquaredErrorResultName].Value).Value = value; } 88 } 89 90 public double TestNormalizedMeanSquaredError { 91 get { return ((DoubleValue)this[TestNormalizedMeanSquaredErrorResultName].Value).Value; } 92 private set { ((DoubleValue)this[TestNormalizedMeanSquaredErrorResultName].Value).Value = value; } 93 } 94 83 95 84 96 [StorableConstructor] … … 95 107 Add(new Result(TrainingRelativeErrorResultName, "Average of the relative errors of the model output and the actual values on the training partition", new PercentValue())); 96 108 Add(new Result(TestRelativeErrorResultName, "Average of the relative errors of the model output and the actual values on the test partition", new PercentValue())); 109 Add(new Result(TrainingNormalizedMeanSquaredErrorResultName, "", new DoubleValue())); 110 Add(new Result(TestNormalizedMeanSquaredErrorResultName, "", new DoubleValue())); 97 111 98 112 RecalculateResults(); … … 114 128 IEnumerable<double> originalTestValues = ProblemData.Dataset.GetEnumeratedVariableValues(ProblemData.TargetVariable, ProblemData.TestIndizes); 115 129 116 Online EvaluatorError errorState;117 double trainingMSE = OnlineMeanSquaredError Evaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState);118 TrainingMeanSquaredError = errorState == Online EvaluatorError.None ? trainingMSE : double.NaN;119 double testMSE = OnlineMeanSquaredError Evaluator.Calculate(estimatedTestValues, originalTestValues, out errorState);120 TestMeanSquaredError = errorState == Online EvaluatorError.None ? testMSE : double.NaN;130 OnlineCalculatorError errorState; 131 double trainingMSE = OnlineMeanSquaredErrorCalculator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState); 132 TrainingMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingMSE : double.NaN; 133 double testMSE = OnlineMeanSquaredErrorCalculator.Calculate(estimatedTestValues, originalTestValues, out errorState); 134 TestMeanSquaredError = errorState == OnlineCalculatorError.None ? testMSE : double.NaN; 121 135 122 double trainingR2 = OnlinePearsonsRSquared Evaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState);123 TrainingRSquared = errorState == Online EvaluatorError.None ? trainingR2 : double.NaN;124 double testR2 = OnlinePearsonsRSquared Evaluator.Calculate(estimatedTestValues, originalTestValues, out errorState);125 TestRSquared = errorState == Online EvaluatorError.None ? testR2 : double.NaN;136 double trainingR2 = OnlinePearsonsRSquaredCalculator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState); 137 TrainingRSquared = errorState == OnlineCalculatorError.None ? trainingR2 : double.NaN; 138 double testR2 = OnlinePearsonsRSquaredCalculator.Calculate(estimatedTestValues, originalTestValues, out errorState); 139 TestRSquared = errorState == OnlineCalculatorError.None ? testR2 : double.NaN; 126 140 127 double trainingRelError = OnlineMeanAbsolutePercentageErrorEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState); 128 TrainingRelativeError = errorState == OnlineEvaluatorError.None ? trainingRelError : double.NaN; 129 double testRelError = OnlineMeanAbsolutePercentageErrorEvaluator.Calculate(estimatedTestValues, originalTestValues, out errorState); 130 TestRelativeError = errorState == OnlineEvaluatorError.None ? testRelError : double.NaN; 141 double trainingRelError = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState); 142 TrainingRelativeError = errorState == OnlineCalculatorError.None ? trainingRelError : double.NaN; 143 double testRelError = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(estimatedTestValues, originalTestValues, out errorState); 144 TestRelativeError = errorState == OnlineCalculatorError.None ? testRelError : double.NaN; 145 146 double trainingNMSE = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState); 147 TrainingNormalizedMeanSquaredError = errorState == OnlineCalculatorError.None ? trainingNMSE : double.NaN; 148 double testNMSE = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(estimatedTestValues, originalTestValues, out errorState); 149 TestNormalizedMeanSquaredError = errorState == OnlineCalculatorError.None ? testNMSE : double.NaN; 131 150 } 132 151
Note: See TracChangeset
for help on using the changeset viewer.