#region License Information
/* HeuristicLab
* Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
[Item("Mean relative error Evaluator", "Evaluator for symbolic regression models that calculates the mean relative error avg( |y' - y| / (|y| + 1))." +
"The +1 is necessary to handle data with the value of 0.0 correctly. " +
"Notice: Linear scaling is ignored for this evaluator.")]
[StorableClass]
public class SymbolicRegressionMeanRelativeErrorEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
public override bool Maximization { get { return false; } }
[StorableConstructor]
protected SymbolicRegressionMeanRelativeErrorEvaluator(bool deserializing) : base(deserializing) { }
protected SymbolicRegressionMeanRelativeErrorEvaluator(SymbolicRegressionMeanRelativeErrorEvaluator original, Cloner cloner)
: base(original, cloner) {
}
public override IDeepCloneable Clone(Cloner cloner) {
return new SymbolicRegressionMeanRelativeErrorEvaluator(this, cloner);
}
public SymbolicRegressionMeanRelativeErrorEvaluator() : base() { }
public override IOperation InstrumentedApply() {
var solution = SymbolicExpressionTreeParameter.ActualValue;
IEnumerable rows = GenerateRowsToEvaluate();
double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows);
QualityParameter.ActualValue = new DoubleValue(quality);
return base.InstrumentedApply();
}
public static double Calculate(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree solution, double lowerEstimationLimit, double upperEstimationLimit, IRegressionProblemData problemData, IEnumerable rows) {
IEnumerable estimatedValues = interpreter.GetSymbolicExpressionTreeValues(solution, problemData.Dataset, rows);
IEnumerable targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows);
IEnumerable boundedEstimatedValues = estimatedValues.LimitToRange(lowerEstimationLimit, upperEstimationLimit);
var relResiduals = boundedEstimatedValues.Zip(targetValues, (e, t) => Math.Abs(t - e) / (Math.Abs(t) + 1.0));
OnlineCalculatorError errorState;
OnlineCalculatorError varErrorState;
double mre;
double variance;
OnlineMeanAndVarianceCalculator.Calculate(relResiduals, out mre, out variance, out errorState, out varErrorState);
if (errorState != OnlineCalculatorError.None) return double.NaN;
return mre;
}
public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable rows) {
SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
EstimationLimitsParameter.ExecutionContext = context;
double mre = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows);
SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
EstimationLimitsParameter.ExecutionContext = null;
return mre;
}
}
}