Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs @ 14378

Last change on this file since 14378 was 14378, checked in by gkronber, 7 years ago

#2697:

  • created a folder for all classes related to transformation from and to trees
  • created a transformator which takes a tree and uses AutoDiff to produce a function and gradient function for the tree.
  • moved code from SymbolicRegressionConstantOptimizationEvaluator to TreeToAutoDiffTermTransformator to make AutoDiff for trees more accessible
File size: 14.5 KB
RevLine 
[6256]1#region License Information
2/* HeuristicLab
[14185]3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[6256]4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
[8704]22using System;
[6256]23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
29using HeuristicLab.Parameters;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31
32namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
[6555]33  [Item("Constant Optimization Evaluator", "Calculates Pearson R² of a symbolic regression solution and optimizes the constant used.")]
[6256]34  [StorableClass]
35  public class SymbolicRegressionConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
36    private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations";
37    private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement";
38    private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
39    private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
[8823]40    private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
[13670]41    private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
[6256]42
43    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
44      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
45    }
46    public IFixedValueParameter<DoubleValue> ConstantOptimizationImprovementParameter {
47      get { return (IFixedValueParameter<DoubleValue>)Parameters[ConstantOptimizationImprovementParameterName]; }
48    }
49    public IFixedValueParameter<PercentValue> ConstantOptimizationProbabilityParameter {
50      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationProbabilityParameterName]; }
51    }
52    public IFixedValueParameter<PercentValue> ConstantOptimizationRowsPercentageParameter {
53      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
54    }
[8823]55    public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
56      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
57    }
[13670]58    public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
59      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
60    }
[6256]61
[13670]62
[6256]63    public IntValue ConstantOptimizationIterations {
64      get { return ConstantOptimizationIterationsParameter.Value; }
65    }
66    public DoubleValue ConstantOptimizationImprovement {
67      get { return ConstantOptimizationImprovementParameter.Value; }
68    }
69    public PercentValue ConstantOptimizationProbability {
70      get { return ConstantOptimizationProbabilityParameter.Value; }
71    }
72    public PercentValue ConstantOptimizationRowsPercentage {
73      get { return ConstantOptimizationRowsPercentageParameter.Value; }
74    }
[8823]75    public bool UpdateConstantsInTree {
76      get { return UpdateConstantsInTreeParameter.Value.Value; }
77      set { UpdateConstantsInTreeParameter.Value.Value = value; }
78    }
[6256]79
[13670]80    public bool UpdateVariableWeights {
81      get { return UpdateVariableWeightsParameter.Value.Value; }
82      set { UpdateVariableWeightsParameter.Value.Value = value; }
83    }
84
[6256]85    public override bool Maximization {
86      get { return true; }
87    }
88
89    [StorableConstructor]
90    protected SymbolicRegressionConstantOptimizationEvaluator(bool deserializing) : base(deserializing) { }
91    protected SymbolicRegressionConstantOptimizationEvaluator(SymbolicRegressionConstantOptimizationEvaluator original, Cloner cloner)
92      : base(original, cloner) {
93    }
94    public SymbolicRegressionConstantOptimizationEvaluator()
95      : base() {
[8938]96      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10), true));
[13916]97      Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true) { Hidden = true });
[6256]98      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
99      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
[13916]100      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
101      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)) { Hidden = true });
[6256]102    }
103
104    public override IDeepCloneable Clone(Cloner cloner) {
105      return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner);
106    }
107
[8823]108    [StorableHook(HookType.AfterDeserialization)]
109    private void AfterDeserialization() {
110      if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
111        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
[13670]112      if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
113        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)));
[8823]114    }
115
[10291]116    public override IOperation InstrumentedApply() {
[6256]117      var solution = SymbolicExpressionTreeParameter.ActualValue;
118      double quality;
119      if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
120        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
121        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
[13670]122           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree);
[8938]123
[6256]124        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
125          var evaluationRows = GenerateRowsToEvaluate();
[8664]126          quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
[6256]127        }
128      } else {
129        var evaluationRows = GenerateRowsToEvaluate();
[8664]130        quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
[6256]131      }
132      QualityParameter.ActualValue = new DoubleValue(quality);
133
[10291]134      return base.InstrumentedApply();
[6256]135    }
136
137    public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
138      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
139      EstimationLimitsParameter.ExecutionContext = context;
[8664]140      ApplyLinearScalingParameter.ExecutionContext = context;
[6256]141
[9209]142      // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
143      // because Evaluate() is used to get the quality of evolved models on
144      // different partitions of the dataset (e.g., best validation model)
[8664]145      double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
[6256]146
147      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
148      EstimationLimitsParameter.ExecutionContext = null;
[9209]149      ApplyLinearScalingParameter.ExecutionContext = null;
[6256]150
151      return r2;
152    }
153
[13670]154    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, bool updateVariableWeights = true, double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, bool updateConstantsInTree = true) {
[8704]155
[14378]156      string[] variableNames;
157      int[] lags;
158      double[] constants;
[8704]159
[14378]160      TreeToAutoDiffTermTransformator.ParametricFunction func;
161      TreeToAutoDiffTermTransformator.ParametricFunctionGradient func_grad;
162      if (!TreeToAutoDiffTermTransformator.TryTransformToAutoDiff(tree, updateVariableWeights, out variableNames, out lags, out constants, out func, out func_grad))
[8828]163        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
[14378]164      if (variableNames.Length == 0) return 0.0;
[8704]165
[13670]166      //extract inital constants
[14378]167      double[] c = new double[constants.Length + 2];
168      c[0] = 0.0;
169      c[1] = 1.0;
170      Array.Copy(constants, 0, c, 2, constants.Length);
[8938]171      double[] originalConstants = (double[])c.Clone();
172      double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
[6256]173
[8704]174      alglib.lsfitstate state;
175      alglib.lsfitreport rep;
176      int info;
[6256]177
[14378]178      // TODO: refactor
[12509]179      IDataset ds = problemData.Dataset;
[14378]180      double[,] x = new double[rows.Count(), variableNames.Length];
[8704]181      int row = 0;
182      foreach (var r in rows) {
[14378]183        for (int col = 0; col < variableNames.Length; col++) {
[14349]184          int lag = lags[col];
185          x[row, col] = ds.GetDoubleValue(variableNames[col], r + lag);
[8704]186        }
187        row++;
188      }
189      double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
190      int n = x.GetLength(0);
191      int m = x.GetLength(1);
192      int k = c.Length;
[6256]193
[14378]194      alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(func);
195      alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(func_grad);
[6256]196
[8704]197      try {
198        alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
[8938]199        alglib.lsfitsetcond(state, 0.0, 0.0, maxIterations);
200        //alglib.lsfitsetgradientcheck(state, 0.001);
[8704]201        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, null, null);
202        alglib.lsfitresults(state, out info, out c, out rep);
[14349]203      } catch (ArithmeticException) {
[8984]204        return originalQuality;
[14349]205      } catch (alglib.alglibexception) {
[8984]206        return originalQuality;
[8704]207      }
[8823]208
[8938]209      //info == -7  => constant optimization failed due to wrong gradient
[13670]210      if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
[8938]211      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
212
[13670]213      if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
[8938]214      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
[13670]215        UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
[8938]216        return originalQuality;
[8704]217      }
[8938]218      return quality;
[6256]219    }
220
[13670]221    private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) {
[8938]222      int i = 0;
223      foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
224        ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
225        VariableTreeNode variableTreeNode = node as VariableTreeNode;
226        if (constantTreeNode != null)
227          constantTreeNode.Value = constants[i++];
[13670]228        else if (updateVariableWeights && variableTreeNode != null)
[8938]229          variableTreeNode.Weight = constants[i++];
230      }
231    }
232
[14378]233    private static alglib.ndimensional_pfunc CreatePFunc(TreeToAutoDiffTermTransformator.ParametricFunction func) {
234      return (double[] c, double[] x, ref double fx, object o) => {
235        fx = func(c, x);
[8704]236      };
237    }
[6256]238
[14378]239    private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermTransformator.ParametricFunctionGradient func_grad) {
240      return (double[] c, double[] x, ref double fx, double[] grad, object o) => {
241        var tupel = func_grad(c, x);
242        fx = tupel.Item2;
[8704]243        Array.Copy(tupel.Item1, grad, grad.Length);
[6256]244      };
245    }
246
[8730]247    public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
[14378]248      return TreeToAutoDiffTermTransformator.IsCompatible(tree);
[8730]249    }
[6256]250  }
251}
Note: See TracBrowser for help on using the repository browser.