#region License Information /* HeuristicLab * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL) * * This file is part of HeuristicLab. * * HeuristicLab is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * HeuristicLab is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HeuristicLab. If not, see . */ #endregion using System; using System.Collections.Generic; using System.Linq; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; using HeuristicLab.Optimization; using HeuristicLab.Parameters; using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression { [Item("SymbolicRegressionSingleObjectiveOSGAEvaluator", "An evaluator which tries to predict when a child will not be able to fullfil offspring selection criteria, to save evaluation time.")] [StorableClass] public class SymbolicRegressionSingleObjectiveOsgaEvaluator : SymbolicRegressionSingleObjectiveEvaluator { private const string RelativeParentChildQualityThresholdParameterName = "RelativeParentChildQualityThreshold"; private const string RelativeFitnessEvaluationIntervalSizeParameterName = "RelativeFitnessEvaluationIntervalSize"; private const string ResultCollectionParameterName = "Results"; private const string AggregateStatisticsParameterName = "AggregateStatistics"; #region parameters public ILookupParameter ResultCollectionParameter { get { return (ILookupParameter)Parameters[ResultCollectionParameterName]; } } public IValueParameter AggregateStatisticsParameter { get { return (IValueParameter)Parameters[AggregateStatisticsParameterName]; } } public IValueParameter RejectedStatsParameter { get { return (IValueParameter)Parameters["RejectedStats"]; } } public IValueParameter NotRejectedStatsParameter { get { return (IValueParameter)Parameters["TotalStats"]; } } public IValueLookupParameter ComparisonFactorParameter { get { return (ValueLookupParameter)Parameters["ComparisonFactor"]; } } public IFixedValueParameter RelativeParentChildQualityThresholdParameter { get { return (IFixedValueParameter)Parameters[RelativeParentChildQualityThresholdParameterName]; } } public IFixedValueParameter RelativeFitnessEvaluationIntervalSizeParameter { get { return (IFixedValueParameter)Parameters[RelativeFitnessEvaluationIntervalSizeParameterName]; } } public IScopeTreeLookupParameter ParentQualitiesParameter { get { return (IScopeTreeLookupParameter)Parameters["ParentQualities"]; } } #endregion #region parameter properties public double RelativeParentChildQualityThreshold { get { return RelativeParentChildQualityThresholdParameter.Value.Value; } set { RelativeParentChildQualityThresholdParameter.Value.Value = value; } } public double RelativeFitnessEvaluationIntervalSize { get { return RelativeFitnessEvaluationIntervalSizeParameter.Value.Value; } set { RelativeFitnessEvaluationIntervalSizeParameter.Value.Value = value; } } public IntMatrix RejectedStats { get { return RejectedStatsParameter.Value; } set { RejectedStatsParameter.Value = value; } } public IntMatrix TotalStats { get { return NotRejectedStatsParameter.Value; } set { NotRejectedStatsParameter.Value = value; } } #endregion public override bool Maximization { get { return true; } } public SymbolicRegressionSingleObjectiveOsgaEvaluator() { Parameters.Add(new ValueLookupParameter("ComparisonFactor", "Determines if the quality should be compared to the better parent (1.0), to the worse (0.0) or to any linearly interpolated value between them.")); Parameters.Add(new FixedValueParameter(RelativeParentChildQualityThresholdParameterName, new PercentValue(0.9))); Parameters.Add(new FixedValueParameter(RelativeFitnessEvaluationIntervalSizeParameterName, new PercentValue(0.1))); Parameters.Add(new LookupParameter(ResultCollectionParameterName)); Parameters.Add(new ScopeTreeLookupParameter("ParentQualities") { ActualName = "Quality" }); Parameters.Add(new ValueParameter("RejectedStats", new IntMatrix())); Parameters.Add(new ValueParameter("TotalStats", new IntMatrix())); Parameters.Add(new ValueParameter(AggregateStatisticsParameterName, new BoolValue(false))); } [StorableHook(HookType.AfterDeserialization)] private void AfterDeserialization() { if (!Parameters.ContainsKey(ResultCollectionParameterName)) Parameters.Add(new LookupParameter(ResultCollectionParameterName)); if (!Parameters.ContainsKey("ParentQualities")) Parameters.Add(new ScopeTreeLookupParameter("ParentQualities") { ActualName = "Quality" }); if (!Parameters.ContainsKey("RejectedStats")) Parameters.Add(new ValueParameter("RejectedStats", new IntMatrix())); if (!Parameters.ContainsKey("TotalStats")) Parameters.Add(new ValueParameter("TotalStats", new IntMatrix())); } [StorableConstructor] protected SymbolicRegressionSingleObjectiveOsgaEvaluator(bool deserializing) : base(deserializing) { } protected SymbolicRegressionSingleObjectiveOsgaEvaluator(SymbolicRegressionSingleObjectiveOsgaEvaluator original, Cloner cloner) : base(original, cloner) { } public override IDeepCloneable Clone(Cloner cloner) { return new SymbolicRegressionSingleObjectiveOsgaEvaluator(this, cloner); } public override void ClearState() { base.ClearState(); RejectedStats = new IntMatrix(); TotalStats = new IntMatrix(); } public override IOperation InstrumentedApply() { var solution = SymbolicExpressionTreeParameter.ActualValue; IEnumerable rows = GenerateRowsToEvaluate(); var interpreter = SymbolicDataAnalysisTreeInterpreterParameter.ActualValue; var estimationLimits = EstimationLimitsParameter.ActualValue; var problemData = ProblemDataParameter.ActualValue; var applyLinearScaling = ApplyLinearScalingParameter.ActualValue.Value; double quality; var parentQualities = ParentQualitiesParameter.ActualValue; // parent subscopes are not present during evaluation of the initial population if (parentQualities.Length > 0) { quality = Calculate(interpreter, solution, estimationLimits, problemData, rows, applyLinearScaling); } else { quality = Calculate(interpreter, solution, estimationLimits.Lower, estimationLimits.Upper, problemData, rows, applyLinearScaling); } QualityParameter.ActualValue = new DoubleValue(quality); return base.InstrumentedApply(); } public static double Calculate(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree solution, double lowerEstimationLimit, double upperEstimationLimit, IRegressionProblemData problemData, IEnumerable rows, bool applyLinearScaling) { IEnumerable estimatedValues = interpreter.GetSymbolicExpressionTreeValues(solution, problemData.Dataset, rows); IEnumerable targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows); OnlineCalculatorError errorState; double r; if (applyLinearScaling) { var rCalculator = new OnlinePearsonsRCalculator(); CalculateWithScaling(targetValues, estimatedValues, lowerEstimationLimit, upperEstimationLimit, rCalculator, problemData.Dataset.Rows); errorState = rCalculator.ErrorState; r = rCalculator.R; } else { IEnumerable boundedEstimatedValues = estimatedValues.LimitToRange(lowerEstimationLimit, upperEstimationLimit); r = OnlinePearsonsRCalculator.Calculate(targetValues, boundedEstimatedValues, out errorState); } if (errorState != OnlineCalculatorError.None) return double.NaN; return r * r; } private double Calculate(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree solution, DoubleLimit estimationLimits, IRegressionProblemData problemData, IEnumerable rows, bool applyLinearScaling) { var estimatedValues = interpreter.GetSymbolicExpressionTreeValues(solution, problemData.Dataset, rows).LimitToRange(estimationLimits.Lower, estimationLimits.Upper); var targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows); var parentQualities = ParentQualitiesParameter.ActualValue.Select(x => x.Value); var minQuality = parentQualities.Min(); var maxQuality = parentQualities.Max(); var comparisonFactor = ComparisonFactorParameter.ActualValue.Value; var parentQuality = minQuality + (maxQuality - minQuality) * comparisonFactor; var threshold = parentQuality * RelativeParentChildQualityThreshold; var pearsonRCalculator = new OnlinePearsonsRCalculator(); var targetValuesEnumerator = targetValues.GetEnumerator(); var estimatedValuesEnumerator = estimatedValues.GetEnumerator(); var trainingPartitionSize = problemData.TrainingPartition.Size; var interval = (int)Math.Floor(trainingPartitionSize * RelativeFitnessEvaluationIntervalSize); var aggregateStatistics = AggregateStatisticsParameter.Value.Value; var i = 0; if (aggregateStatistics) { var trainingEnd = problemData.TrainingPartition.End; var qualityPerInterval = new List(); while (targetValuesEnumerator.MoveNext() && estimatedValuesEnumerator.MoveNext()) { pearsonRCalculator.Add(targetValuesEnumerator.Current, estimatedValuesEnumerator.Current); ++i; if (i % interval == 0 || i == trainingPartitionSize) { var q = pearsonRCalculator.ErrorState != OnlineCalculatorError.None ? double.NaN : pearsonRCalculator.R; qualityPerInterval.Add(q * q); } } var r = pearsonRCalculator.ErrorState != OnlineCalculatorError.None ? double.NaN : pearsonRCalculator.R; var actualQuality = r * r; bool predictedRejected = false; i = 0; double quality = actualQuality; foreach (var q in qualityPerInterval) { if (double.IsNaN(q) || !(q > threshold)) { predictedRejected = true; quality = q; break; } ++i; } var actuallyRejected = !(actualQuality > parentQuality); if (RejectedStats.Rows == 0 || TotalStats.Rows == 0) { RejectedStats = new IntMatrix(2, qualityPerInterval.Count); RejectedStats.RowNames = new[] { "Predicted", "Actual" }; RejectedStats.ColumnNames = Enumerable.Range(1, RejectedStats.Columns).Select(x => string.Format("0-{0}", Math.Min(trainingEnd, x * interval))); TotalStats = new IntMatrix(2, 2); TotalStats.RowNames = new[] { "Predicted", "Actual" }; TotalStats.ColumnNames = new[] { "Rejected", "Not Rejected" }; } // gather some statistics if (predictedRejected) { RejectedStats[0, i]++; TotalStats[0, 0]++; } else { TotalStats[0, 1]++; } if (actuallyRejected) { TotalStats[1, 0]++; } else { TotalStats[1, 1]++; } if (predictedRejected && actuallyRejected) { RejectedStats[1, i]++; } return quality; } else { while (targetValuesEnumerator.MoveNext() && estimatedValuesEnumerator.MoveNext()) { pearsonRCalculator.Add(targetValuesEnumerator.Current, estimatedValuesEnumerator.Current); ++i; if (i % interval == 0 || i == trainingPartitionSize) { var q = pearsonRCalculator.ErrorState != OnlineCalculatorError.None ? double.NaN : pearsonRCalculator.R; var quality = q * q; if (!(quality > threshold)) return quality; } } var r = pearsonRCalculator.ErrorState != OnlineCalculatorError.None ? double.NaN : pearsonRCalculator.R; var actualQuality = r * r; return actualQuality; } } public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable rows) { SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context; EstimationLimitsParameter.ExecutionContext = context; ApplyLinearScalingParameter.ExecutionContext = context; var interpreter = SymbolicDataAnalysisTreeInterpreterParameter.ActualValue; var estimationLimits = EstimationLimitsParameter.ActualValue; var applyLinearScaling = ApplyLinearScalingParameter.ActualValue.Value; double r2 = Calculate(interpreter, tree, estimationLimits.Lower, estimationLimits.Upper, problemData, rows, applyLinearScaling); SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null; EstimationLimitsParameter.ExecutionContext = null; ApplyLinearScalingParameter.ExecutionContext = null; return r2; } } }