Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionConstantOptimizationEvaluator.cs @ 6934

Last change on this file since 6934 was 6555, checked in by mkommend, 13 years ago

#1529: Corrected naming errors for GECCO tutorial.

File size: 12.1 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2010 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System.Collections.Generic;
23using System.Linq;
24using HeuristicLab.Common;
25using HeuristicLab.Core;
26using HeuristicLab.Data;
27using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
28using HeuristicLab.Parameters;
29using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
30
31namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
32  [Item("Constant Optimization Evaluator", "Calculates Pearson R² of a symbolic regression solution and optimizes the constant used.")]
33  [StorableClass]
34  public class SymbolicRegressionConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
35    private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations";
36    private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement";
37    private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
38    private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
39
40    private const string EvaluatedTreesResultName = "EvaluatedTrees";
41    private const string EvaluatedTreeNodesResultName = "EvaluatedTreeNodes";
42
43    public ILookupParameter<IntValue> EvaluatedTreesParameter {
44      get { return (ILookupParameter<IntValue>)Parameters[EvaluatedTreesResultName]; }
45    }
46    public ILookupParameter<IntValue> EvaluatedTreeNodesParameter {
47      get { return (ILookupParameter<IntValue>)Parameters[EvaluatedTreeNodesResultName]; }
48    }
49
50    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
51      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
52    }
53    public IFixedValueParameter<DoubleValue> ConstantOptimizationImprovementParameter {
54      get { return (IFixedValueParameter<DoubleValue>)Parameters[ConstantOptimizationImprovementParameterName]; }
55    }
56    public IFixedValueParameter<PercentValue> ConstantOptimizationProbabilityParameter {
57      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationProbabilityParameterName]; }
58    }
59    public IFixedValueParameter<PercentValue> ConstantOptimizationRowsPercentageParameter {
60      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
61    }
62
63    public IntValue ConstantOptimizationIterations {
64      get { return ConstantOptimizationIterationsParameter.Value; }
65    }
66    public DoubleValue ConstantOptimizationImprovement {
67      get { return ConstantOptimizationImprovementParameter.Value; }
68    }
69    public PercentValue ConstantOptimizationProbability {
70      get { return ConstantOptimizationProbabilityParameter.Value; }
71    }
72    public PercentValue ConstantOptimizationRowsPercentage {
73      get { return ConstantOptimizationRowsPercentageParameter.Value; }
74    }
75
76    public override bool Maximization {
77      get { return true; }
78    }
79
80    [StorableConstructor]
81    protected SymbolicRegressionConstantOptimizationEvaluator(bool deserializing) : base(deserializing) { }
82    protected SymbolicRegressionConstantOptimizationEvaluator(SymbolicRegressionConstantOptimizationEvaluator original, Cloner cloner)
83      : base(original, cloner) {
84    }
85    public SymbolicRegressionConstantOptimizationEvaluator()
86      : base() {
87      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(3), true));
88      Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true));
89      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
90      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
91
92      Parameters.Add(new LookupParameter<IntValue>(EvaluatedTreesResultName));
93      Parameters.Add(new LookupParameter<IntValue>(EvaluatedTreeNodesResultName));
94    }
95
96    public override IDeepCloneable Clone(Cloner cloner) {
97      return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner);
98    }
99
100    public override IOperation Apply() {
101      AddResults();
102      int seed = RandomParameter.ActualValue.Next();
103      var solution = SymbolicExpressionTreeParameter.ActualValue;
104      double quality;
105      if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
106        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
107        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
108           constantOptimizationRows, ConstantOptimizationImprovement.Value, ConstantOptimizationIterations.Value, 0.001,
109           EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower,
110          EvaluatedTreesParameter.ActualValue, EvaluatedTreeNodesParameter.ActualValue);
111        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
112          var evaluationRows = GenerateRowsToEvaluate();
113          quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows);
114        }
115      } else {
116        var evaluationRows = GenerateRowsToEvaluate();
117        quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows);
118      }
119      QualityParameter.ActualValue = new DoubleValue(quality);
120      EvaluatedTreesParameter.ActualValue.Value += 1;
121      EvaluatedTreeNodesParameter.ActualValue.Value += solution.Length;
122
123      if (Successor != null)
124        return ExecutionContext.CreateOperation(Successor);
125      else
126        return null;
127    }
128
129    private void AddResults() {
130      if (EvaluatedTreesParameter.ActualValue == null) {
131        var scope = ExecutionContext.Scope;
132        while (scope.Parent != null)
133          scope = scope.Parent;
134        scope.Variables.Add(new Core.Variable(EvaluatedTreesResultName, new IntValue()));
135      }
136      if (EvaluatedTreeNodesParameter.ActualValue == null) {
137        var scope = ExecutionContext.Scope;
138        while (scope.Parent != null)
139          scope = scope.Parent;
140        scope.Variables.Add(new Core.Variable(EvaluatedTreeNodesResultName, new IntValue()));
141      }
142    }
143
144    public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
145      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
146      EstimationLimitsParameter.ExecutionContext = context;
147
148      double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows);
149
150      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
151      EstimationLimitsParameter.ExecutionContext = null;
152
153      return r2;
154    }
155
156    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData,
157      IEnumerable<int> rows, double improvement, int iterations, double differentialStep, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) {
158      List<SymbolicExpressionTreeTerminalNode> terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList();
159      double[] c = new double[terminalNodes.Count];
160      int treeLength = tree.Length;
161
162      //extract inital constants
163      for (int i = 0; i < terminalNodes.Count; i++) {
164        ConstantTreeNode constantTreeNode = terminalNodes[i] as ConstantTreeNode;
165        if (constantTreeNode != null) c[i] = constantTreeNode.Value;
166        VariableTreeNode variableTreeNode = terminalNodes[i] as VariableTreeNode;
167        if (variableTreeNode != null) c[i] = variableTreeNode.Weight;
168      }
169
170      double epsg = 0;
171      double epsf = improvement;
172      double epsx = 0;
173      int maxits = iterations;
174      double diffstep = differentialStep;
175
176      alglib.minlmstate state;
177      alglib.minlmreport report;
178
179      alglib.minlmcreatev(1, c, diffstep, out state);
180      alglib.minlmsetcond(state, epsg, epsf, epsx, maxits);
181      alglib.minlmoptimize(state, CreateCallBack(interpreter, tree, problemData, rows, upperEstimationLimit, lowerEstimationLimit, treeLength, evaluatedTrees, evaluatedTreeNodes), null, terminalNodes);
182      alglib.minlmresults(state, out c, out report);
183
184      for (int i = 0; i < c.Length; i++) {
185        ConstantTreeNode constantTreeNode = terminalNodes[i] as ConstantTreeNode;
186        if (constantTreeNode != null) constantTreeNode.Value = c[i];
187        VariableTreeNode variableTreeNode = terminalNodes[i] as VariableTreeNode;
188        if (variableTreeNode != null) variableTreeNode.Weight = c[i];
189      }
190
191      return (state.fi[0] - 1) * -1;
192    }
193
194    private static alglib.ndimensional_fvec CreateCallBack(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, double upperEstimationLimit, double lowerEstimationLimit, int treeLength, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) {
195      return (double[] arg, double[] fi, object obj) => {
196        // update constants of tree
197        List<SymbolicExpressionTreeTerminalNode> terminalNodes = (List<SymbolicExpressionTreeTerminalNode>)obj;
198        for (int i = 0; i < terminalNodes.Count; i++) {
199          ConstantTreeNode constantTreeNode = terminalNodes[i] as ConstantTreeNode;
200          if (constantTreeNode != null) constantTreeNode.Value = arg[i];
201          VariableTreeNode variableTreeNode = terminalNodes[i] as VariableTreeNode;
202          if (variableTreeNode != null) variableTreeNode.Weight = arg[i];
203        }
204
205        double quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows);
206
207        fi[0] = 1 - quality;
208        if (evaluatedTrees != null) evaluatedTrees.Value++;
209        if (evaluatedTreeNodes != null) evaluatedTreeNodes.Value += treeLength;
210      };
211    }
212
213  }
214}
Note: See TracBrowser for help on using the repository browser.