source: branches/DataAnalysis.ComplexityAnalyzer/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer.cs @ 13211

Last change on this file since 13211 was 13211, checked in by mkommend, 6 years ago

#2175: Added parameter for constant optimization iterations.

File size: 11.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System.Collections.Generic;
23using System.Linq;
24using HeuristicLab.Analysis;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
29using HeuristicLab.Optimization;
30using HeuristicLab.Parameters;
31using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
32
33namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
34  /// <summary>
35  /// An operator that analyzes the training best symbolic regression solution for multi objective symbolic regression problems.
36  /// </summary>
37  [Item("SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer", "An operator that analyzes the training best symbolic regression solution for multi objective symbolic regression problems.")]
38  [StorableClass]
39  public sealed class SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer : SymbolicDataAnalysisMultiObjectiveTrainingBestSolutionAnalyzer<ISymbolicRegressionSolution>,
40    ISymbolicDataAnalysisInterpreterOperator, ISymbolicDataAnalysisBoundedOperator {
41    private const string ProblemDataParameterName = "ProblemData";
42    private const string SymbolicDataAnalysisTreeInterpreterParameterName = "SymbolicDataAnalysisTreeInterpreter";
43    private const string EstimationLimitsParameterName = "EstimationLimits";
44    private const string MaximumSymbolicExpressionTreeLengthParameterName = "MaximumSymbolicExpressionTreeLength";
45    private const string ValidationPartitionParameterName = "ValidationPartition";
46
47    #region parameter properties
48    public ILookupParameter<IRegressionProblemData> ProblemDataParameter {
49      get { return (ILookupParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
50    }
51    public ILookupParameter<ISymbolicDataAnalysisExpressionTreeInterpreter> SymbolicDataAnalysisTreeInterpreterParameter {
52      get { return (ILookupParameter<ISymbolicDataAnalysisExpressionTreeInterpreter>)Parameters[SymbolicDataAnalysisTreeInterpreterParameterName]; }
53    }
54    public IValueLookupParameter<DoubleLimit> EstimationLimitsParameter {
55      get { return (IValueLookupParameter<DoubleLimit>)Parameters[EstimationLimitsParameterName]; }
56    }
57    public ILookupParameter<IntValue> MaximumSymbolicExpressionTreeLengthParameter {
58      get { return (ILookupParameter<IntValue>)Parameters[MaximumSymbolicExpressionTreeLengthParameterName]; }
59    }
60
61    public IValueLookupParameter<IntRange> ValidationPartitionParameter {
62      get { return (IValueLookupParameter<IntRange>)Parameters[ValidationPartitionParameterName]; }
63    }
64    #endregion
65
66    [StorableConstructor]
67    private SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer(bool deserializing) : base(deserializing) { }
68    private SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer(SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer original, Cloner cloner) : base(original, cloner) { }
69    public SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer()
70      : base() {
71      Parameters.Add(new LookupParameter<IRegressionProblemData>(ProblemDataParameterName, "The problem data for the symbolic regression solution.") { Hidden = true });
72      Parameters.Add(new LookupParameter<ISymbolicDataAnalysisExpressionTreeInterpreter>(SymbolicDataAnalysisTreeInterpreterParameterName, "The symbolic data analysis tree interpreter for the symbolic expression tree.") { Hidden = true });
73      Parameters.Add(new ValueLookupParameter<DoubleLimit>(EstimationLimitsParameterName, "The lower and upper limit for the estimated values produced by the symbolic regression model.") { Hidden = true });
74      Parameters.Add(new LookupParameter<IntValue>(MaximumSymbolicExpressionTreeLengthParameterName, "Maximal length of the symbolic expression.") { Hidden = true });
75      Parameters.Add(new ValueLookupParameter<IntRange>(ValidationPartitionParameterName, "The validation partition."));
76    }
77
78    [StorableHook(HookType.AfterDeserialization)]
79    private void AfterDeserialization() {
80      if (!Parameters.ContainsKey(MaximumSymbolicExpressionTreeLengthParameterName))
81        Parameters.Add(new LookupParameter<IntValue>(MaximumSymbolicExpressionTreeLengthParameterName, "Maximal length of the symbolic expression.") { Hidden = true });
82      if (!Parameters.ContainsKey(ValidationPartitionParameterName))
83        Parameters.Add(new ValueLookupParameter<IntRange>(ValidationPartitionParameterName, "The validation partition."));
84    }
85
86    public override IDeepCloneable Clone(Cloner cloner) {
87      return new SymbolicRegressionMultiObjectiveTrainingBestSolutionAnalyzer(this, cloner);
88    }
89
90    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree, double[] bestQuality) {
91      var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
92      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
93      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
94    }
95
96    public override IOperation Apply() {
97      var operation = base.Apply();
98
99      var paretoFront = TrainingBestSolutionsParameter.ActualValue;
100
101      IResult result;
102      ScatterPlot qualityToTreeSize;
103      if (!ResultCollection.TryGetValue("Pareto Front Analysis", out result)) {
104        qualityToTreeSize = new ScatterPlot("Quality vs Tree Size", "");
105        qualityToTreeSize.VisualProperties.XAxisMinimumAuto = false;
106        qualityToTreeSize.VisualProperties.XAxisMaximumAuto = false;
107        qualityToTreeSize.VisualProperties.YAxisMinimumAuto = false;
108        qualityToTreeSize.VisualProperties.YAxisMaximumAuto = false;
109
110        qualityToTreeSize.VisualProperties.XAxisMinimumFixedValue = 0;
111        qualityToTreeSize.VisualProperties.XAxisMaximumFixedValue = MaximumSymbolicExpressionTreeLengthParameter.ActualValue.Value;
112        qualityToTreeSize.VisualProperties.YAxisMinimumFixedValue = 0;
113        qualityToTreeSize.VisualProperties.YAxisMaximumFixedValue = 2;
114        ResultCollection.Add(new Result("Pareto Front Analysis", qualityToTreeSize));
115      } else {
116        qualityToTreeSize = (ScatterPlot)result.Value;
117      }
118
119
120      int previousTreeLength = -1;
121      var sizeParetoFront = new LinkedList<ISymbolicRegressionSolution>();
122      foreach (var solution in paretoFront.OrderBy(s => s.Model.SymbolicExpressionTree.Length)) {
123        int treeLength = solution.Model.SymbolicExpressionTree.Length;
124        if (!sizeParetoFront.Any()) sizeParetoFront.AddLast(solution);
125        if (solution.TrainingNormalizedMeanSquaredError < sizeParetoFront.Last.Value.TrainingNormalizedMeanSquaredError) {
126          if (treeLength == previousTreeLength)
127            sizeParetoFront.RemoveLast();
128          sizeParetoFront.AddLast(solution);
129        }
130        previousTreeLength = treeLength;
131      }
132
133
134
135      qualityToTreeSize.Rows.Clear();
136      var trainingRow = new ScatterPlotDataRow("Training NMSE", "", sizeParetoFront.Select(x => new Point2D<double>(x.Model.SymbolicExpressionTree.Length, x.TrainingNormalizedMeanSquaredError)));
137      trainingRow.VisualProperties.PointSize = 5;
138      qualityToTreeSize.Rows.Add(trainingRow);
139
140      var validationPartition = ValidationPartitionParameter.ActualValue;
141      if (validationPartition.Size != 0) {
142        var problemData = ProblemDataParameter.ActualValue;
143        var validationIndizes = Enumerable.Range(validationPartition.Start, validationPartition.Size).ToList();
144        var targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, validationIndizes).ToList();
145        OnlineCalculatorError error;
146        var validationRow = new ScatterPlotDataRow("Validation NMSE", "",
147          sizeParetoFront.Select(x => new Point2D<double>(x.Model.SymbolicExpressionTree.Length,
148          OnlineNormalizedMeanSquaredErrorCalculator.Calculate(targetValues, x.GetEstimatedValues(validationIndizes), out error))));
149        validationRow.VisualProperties.PointSize = 5;
150        qualityToTreeSize.Rows.Add(validationRow);
151      }
152
153      double trainingArea = sizeParetoFront.Select(s => s.Model.SymbolicExpressionTree.Length * s.TrainingNormalizedMeanSquaredError).Average();
154      double testArea = sizeParetoFront.Select(s => s.Model.SymbolicExpressionTree.Length * s.TestNormalizedMeanSquaredError).Average();
155
156      ResultCollection paretoFrontResults;
157      if (!ResultCollection.TryGetValue("Pareto Front Results", out result)) {
158        paretoFrontResults = new ResultCollection();
159        ResultCollection.Add(new Result("Pareto Front Results", paretoFrontResults));
160      } else paretoFrontResults = (ResultCollection)result.Value;
161
162      DoubleValue trainingAreaResult, testAreaResult, areaDifferenceResult, avgTrainingNMSE, avgTestNMSE;
163      if (!paretoFrontResults.TryGetValue("Non Dominated Area (training)", out result)) {
164        trainingAreaResult = new DoubleValue();
165        paretoFrontResults.Add(new Result("Non Dominated Area (training)", trainingAreaResult));
166      } else trainingAreaResult = (DoubleValue)result.Value;
167      if (!paretoFrontResults.TryGetValue("Non Dominated Area (test)", out result)) {
168        testAreaResult = new DoubleValue();
169        paretoFrontResults.Add(new Result("Non Dominated Area (test)", testAreaResult));
170      } else testAreaResult = (DoubleValue)result.Value;
171      if (!paretoFrontResults.TryGetValue("Non Dominated Area Difference", out result)) {
172        areaDifferenceResult = new DoubleValue();
173        paretoFrontResults.Add(new Result("Non Dominated Area Difference", areaDifferenceResult));
174      } else areaDifferenceResult = (DoubleValue)result.Value;
175      if (!paretoFrontResults.TryGetValue("Average Training NMSE", out result)) {
176        avgTrainingNMSE = new DoubleValue();
177        paretoFrontResults.Add(new Result("Average Training NMSE", avgTrainingNMSE));
178      } else avgTrainingNMSE = (DoubleValue)result.Value;
179      if (!paretoFrontResults.TryGetValue("Average Test NMSE", out result)) {
180        avgTestNMSE = new DoubleValue();
181        paretoFrontResults.Add(new Result("Average Test NMSE", avgTestNMSE));
182      } else avgTestNMSE = (DoubleValue)result.Value;
183
184      trainingAreaResult.Value = trainingArea;
185      testAreaResult.Value = testArea;
186      areaDifferenceResult.Value = trainingArea - testArea;
187      avgTrainingNMSE.Value = sizeParetoFront.Select(s => s.TrainingNormalizedMeanSquaredError).Average();
188      avgTestNMSE.Value = sizeParetoFront.Select(s => s.TestNormalizedMeanSquaredError).Average();
189
190      return operation;
191    }
192
193  }
194}
Note: See TracBrowser for help on using the repository browser.