Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
03/30/11 18:04:03 (14 years ago)
Author:
gkronber
Message:

#1453: Added an ErrorState property to online evaluators to indicate if the result value is valid or if there has been an error in the calculation. Adapted all classes that use one of the online evaluators to check this property.

Location:
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ClassificationSolution.cs

    r5809 r5894  
    8585      IEnumerable<double> originalTestClassValues = ProblemData.Dataset.GetEnumeratedVariableValues(ProblemData.TargetVariable, ProblemData.TestIndizes);
    8686
    87       double trainingAccuracy = OnlineAccuracyEvaluator.Calculate(estimatedTrainingClassValues, originalTrainingClassValues);
    88       double testAccuracy = OnlineAccuracyEvaluator.Calculate(estimatedTestClassValues, originalTestClassValues);
     87      OnlineEvaluatorError errorState;
     88      double trainingAccuracy = OnlineAccuracyEvaluator.Calculate(estimatedTrainingClassValues, originalTrainingClassValues, out errorState);
     89      if (errorState != OnlineEvaluatorError.None) trainingAccuracy = double.NaN;
     90      double testAccuracy = OnlineAccuracyEvaluator.Calculate(estimatedTestClassValues, originalTestClassValues, out errorState);
     91      if (errorState != OnlineEvaluatorError.None) testAccuracy = double.NaN;
    8992
    9093      TrainingAccuracy = trainingAccuracy;
  • trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/DiscriminantFunctionClassificationSolution.cs

    r5889 r5894  
    105105      IEnumerable<double> originalTestValues = ProblemData.Dataset.GetEnumeratedVariableValues(ProblemData.TargetVariable, ProblemData.TestIndizes);
    106106
    107       double trainingMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues);
    108       double testMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTestValues, originalTestValues);
    109       double trainingR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues);
    110       double testR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTestValues, originalTestValues);
     107      OnlineEvaluatorError errorState;
     108      double trainingMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState);
     109      TrainingMeanSquaredError = errorState == OnlineEvaluatorError.None ? trainingMSE : double.NaN;
     110      double testMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTestValues, originalTestValues, out errorState);
     111      TestMeanSquaredError = errorState == OnlineEvaluatorError.None ? testMSE : double.NaN;
    111112
    112       TrainingMeanSquaredError = trainingMSE;
    113       TestMeanSquaredError = testMSE;
    114       TrainingRSquared = trainingR2;
    115       TestRSquared = testR2;
     113      double trainingR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState);
     114      TrainingRSquared = errorState == OnlineEvaluatorError.None ? trainingR2 : double.NaN;
     115      double testR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTestValues, originalTestValues, out errorState);
     116      TestRSquared = errorState == OnlineEvaluatorError.None ? testR2 : double.NaN;
    116117    }
    117118
  • trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ThresholdCalculators/NormalDistributionCutPointsThresholdCalculator.cs

    r5849 r5894  
    6464        double classValue = group.Key;
    6565        double mean, variance;
    66         OnlineMeanAndVarianceCalculator.Calculate(estimatedClassValues, out mean, out variance);
    67         classMean[classValue] = mean;
    68         classStdDev[classValue] = Math.Sqrt(variance);
     66        OnlineEvaluatorError meanErrorState, varianceErrorState;
     67        OnlineMeanAndVarianceCalculator.Calculate(estimatedClassValues, out mean, out variance, out meanErrorState, out varianceErrorState);
     68
     69        if (meanErrorState == OnlineEvaluatorError.None && varianceErrorState == OnlineEvaluatorError.None) {
     70          classMean[classValue] = mean;
     71          classStdDev[classValue] = Math.Sqrt(variance);
     72        }
    6973      }
    7074      double[] originalClasses = classMean.Keys.OrderBy(x => x).ToArray();
  • trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Regression/RegressionSolution.cs

    r5809 r5894  
    114114      IEnumerable<double> originalTestValues = ProblemData.Dataset.GetEnumeratedVariableValues(ProblemData.TargetVariable, ProblemData.TestIndizes);
    115115
    116       double trainingMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues);
    117       double testMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTestValues, originalTestValues);
    118       double trainingR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues);
    119       double testR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTestValues, originalTestValues);
    120       double trainingRelError = OnlineMeanAbsolutePercentageErrorEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues);
    121       double testRelError = OnlineMeanAbsolutePercentageErrorEvaluator.Calculate(estimatedTestValues, originalTestValues);
     116      OnlineEvaluatorError errorState;
     117      double trainingMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState);
     118      TrainingMeanSquaredError = errorState == OnlineEvaluatorError.None ? trainingMSE : double.NaN;
     119      double testMSE = OnlineMeanSquaredErrorEvaluator.Calculate(estimatedTestValues, originalTestValues, out errorState);
     120      TestMeanSquaredError = errorState == OnlineEvaluatorError.None ? testMSE : double.NaN;
    122121
    123       TrainingMeanSquaredError = trainingMSE;
    124       TestMeanSquaredError = testMSE;
    125       TrainingRSquared = trainingR2;
    126       TestRSquared = testR2;
    127       TrainingRelativeError = trainingRelError;
    128       TestRelativeError = testRelError;
     122      double trainingR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState);
     123      TrainingRSquared = errorState == OnlineEvaluatorError.None ? trainingR2 : double.NaN;
     124      double testR2 = OnlinePearsonsRSquaredEvaluator.Calculate(estimatedTestValues, originalTestValues, out errorState);
     125      TestRSquared = errorState == OnlineEvaluatorError.None ? testR2 : double.NaN;
     126
     127      double trainingRelError = OnlineMeanAbsolutePercentageErrorEvaluator.Calculate(estimatedTrainingValues, originalTrainingValues, out errorState);
     128      TrainingRelativeError = errorState == OnlineEvaluatorError.None ? trainingRelError : double.NaN;
     129      double testRelError = OnlineMeanAbsolutePercentageErrorEvaluator.Calculate(estimatedTestValues, originalTestValues, out errorState);
     130      TestRelativeError = errorState == OnlineEvaluatorError.None ? testRelError : double.NaN;
    129131    }
    130132
Note: See TracChangeset for help on using the changeset viewer.