[6256] | 1 | #region License Information
|
---|
| 2 | /* HeuristicLab
|
---|
[17180] | 3 | * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
[6256] | 4 | *
|
---|
| 5 | * This file is part of HeuristicLab.
|
---|
| 6 | *
|
---|
| 7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
| 8 | * it under the terms of the GNU General Public License as published by
|
---|
| 9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
| 10 | * (at your option) any later version.
|
---|
| 11 | *
|
---|
| 12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 15 | * GNU General Public License for more details.
|
---|
| 16 | *
|
---|
| 17 | * You should have received a copy of the GNU General Public License
|
---|
| 18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
| 19 | */
|
---|
| 20 | #endregion
|
---|
| 21 |
|
---|
[8704] | 22 | using System;
|
---|
[6256] | 23 | using System.Collections.Generic;
|
---|
| 24 | using System.Linq;
|
---|
[16875] | 25 | using HEAL.Attic;
|
---|
[6256] | 26 | using HeuristicLab.Common;
|
---|
| 27 | using HeuristicLab.Core;
|
---|
| 28 | using HeuristicLab.Data;
|
---|
| 29 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
[15448] | 30 | using HeuristicLab.Optimization;
|
---|
[6256] | 31 | using HeuristicLab.Parameters;
|
---|
| 32 |
|
---|
| 33 | namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
|
---|
[18113] | 34 | [Item("Parameter Optimization Evaluator", "Calculates Pearson R² of a symbolic regression solution and optimizes the parameters used.")]
|
---|
[16565] | 35 | [StorableType("24B68851-036D-4446-BD6F-3823E9028FF4")]
|
---|
[18113] | 36 | public class SymbolicRegressionParameterOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
|
---|
| 37 | private const string ParameterOptimizationIterationsParameterName = "ParameterOptimizationIterations";
|
---|
| 38 | private const string ParameterOptimizationImprovementParameterName = "ParameterOptimizationImprovement";
|
---|
| 39 | private const string ParameterOptimizationProbabilityParameterName = "ParameterOptimizationProbability";
|
---|
| 40 | private const string ParameterOptimizationRowsPercentageParameterName = "ParameterOptimizationRowsPercentage";
|
---|
| 41 | private const string UpdateParametersInTreeParameterName = "UpdateParametersInSymbolicExpressionTree";
|
---|
[13670] | 42 | private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
|
---|
[6256] | 43 |
|
---|
[18113] | 44 | private const string FunctionEvaluationsResultParameterName = "Parameters Optimization Function Evaluations";
|
---|
| 45 | private const string GradientEvaluationsResultParameterName = "Parameters Optimization Gradient Evaluations";
|
---|
[15483] | 46 | private const string CountEvaluationsParameterName = "Count Function and Gradient Evaluations";
|
---|
[15448] | 47 |
|
---|
[18113] | 48 | public IFixedValueParameter<IntValue> ParameterOptimizationIterationsParameter {
|
---|
| 49 | get { return (IFixedValueParameter<IntValue>)Parameters[ParameterOptimizationIterationsParameterName]; }
|
---|
[6256] | 50 | }
|
---|
[18113] | 51 | public IFixedValueParameter<DoubleValue> ParameterOptimizationImprovementParameter {
|
---|
| 52 | get { return (IFixedValueParameter<DoubleValue>)Parameters[ParameterOptimizationImprovementParameterName]; }
|
---|
[6256] | 53 | }
|
---|
[18113] | 54 | public IFixedValueParameter<PercentValue> ParameterOptimizationProbabilityParameter {
|
---|
| 55 | get { return (IFixedValueParameter<PercentValue>)Parameters[ParameterOptimizationProbabilityParameterName]; }
|
---|
[6256] | 56 | }
|
---|
[18113] | 57 | public IFixedValueParameter<PercentValue> ParameterOptimizationRowsPercentageParameter {
|
---|
| 58 | get { return (IFixedValueParameter<PercentValue>)Parameters[ParameterOptimizationRowsPercentageParameterName]; }
|
---|
[6256] | 59 | }
|
---|
[18113] | 60 | public IFixedValueParameter<BoolValue> UpdateParametersInTreeParameter {
|
---|
| 61 | get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateParametersInTreeParameterName]; }
|
---|
[8823] | 62 | }
|
---|
[13670] | 63 | public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
|
---|
| 64 | get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
|
---|
| 65 | }
|
---|
[6256] | 66 |
|
---|
[15448] | 67 | public IResultParameter<IntValue> FunctionEvaluationsResultParameter {
|
---|
| 68 | get { return (IResultParameter<IntValue>)Parameters[FunctionEvaluationsResultParameterName]; }
|
---|
| 69 | }
|
---|
| 70 | public IResultParameter<IntValue> GradientEvaluationsResultParameter {
|
---|
| 71 | get { return (IResultParameter<IntValue>)Parameters[GradientEvaluationsResultParameterName]; }
|
---|
| 72 | }
|
---|
[15483] | 73 | public IFixedValueParameter<BoolValue> CountEvaluationsParameter {
|
---|
| 74 | get { return (IFixedValueParameter<BoolValue>)Parameters[CountEvaluationsParameterName]; }
|
---|
| 75 | }
|
---|
[13670] | 76 |
|
---|
[15448] | 77 |
|
---|
[18113] | 78 | public IntValue ParameterOptimizationIterations {
|
---|
| 79 | get { return ParameterOptimizationIterationsParameter.Value; }
|
---|
[6256] | 80 | }
|
---|
[18113] | 81 | public DoubleValue ParameterOptimizationImprovement {
|
---|
| 82 | get { return ParameterOptimizationImprovementParameter.Value; }
|
---|
[6256] | 83 | }
|
---|
[18113] | 84 | public PercentValue ParameterOptimizationProbability {
|
---|
| 85 | get { return ParameterOptimizationProbabilityParameter.Value; }
|
---|
[6256] | 86 | }
|
---|
[18113] | 87 | public PercentValue ParameterOptimizationRowsPercentage {
|
---|
| 88 | get { return ParameterOptimizationRowsPercentageParameter.Value; }
|
---|
[6256] | 89 | }
|
---|
[18113] | 90 | public bool UpdateParametersInTree {
|
---|
| 91 | get { return UpdateParametersInTreeParameter.Value.Value; }
|
---|
| 92 | set { UpdateParametersInTreeParameter.Value.Value = value; }
|
---|
[8823] | 93 | }
|
---|
[6256] | 94 |
|
---|
[13670] | 95 | public bool UpdateVariableWeights {
|
---|
| 96 | get { return UpdateVariableWeightsParameter.Value.Value; }
|
---|
| 97 | set { UpdateVariableWeightsParameter.Value.Value = value; }
|
---|
| 98 | }
|
---|
| 99 |
|
---|
[15483] | 100 | public bool CountEvaluations {
|
---|
| 101 | get { return CountEvaluationsParameter.Value.Value; }
|
---|
| 102 | set { CountEvaluationsParameter.Value.Value = value; }
|
---|
| 103 | }
|
---|
| 104 |
|
---|
[6256] | 105 | public override bool Maximization {
|
---|
| 106 | get { return true; }
|
---|
| 107 | }
|
---|
| 108 |
|
---|
| 109 | [StorableConstructor]
|
---|
[18113] | 110 | protected SymbolicRegressionParameterOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
|
---|
| 111 | protected SymbolicRegressionParameterOptimizationEvaluator(SymbolicRegressionParameterOptimizationEvaluator original, Cloner cloner)
|
---|
[6256] | 112 | : base(original, cloner) {
|
---|
| 113 | }
|
---|
[18113] | 114 | public SymbolicRegressionParameterOptimizationEvaluator()
|
---|
[6256] | 115 | : base() {
|
---|
[18113] | 116 | Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the parameter of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10)));
|
---|
| 117 | Parameters.Add(new FixedValueParameter<DoubleValue>(ParameterOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the parameter optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0)) { Hidden = true });
|
---|
| 118 | Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationProbabilityParameterName, "Determines the probability that the parameters are optimized", new PercentValue(1)));
|
---|
| 119 | Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for parameter optimization", new PercentValue(1)));
|
---|
| 120 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateParametersInTreeParameterName, "Determines if the parameters in the tree should be overwritten by the optimized parameters.", new BoolValue(true)) { Hidden = true });
|
---|
[13916] | 121 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)) { Hidden = true });
|
---|
[15448] | 122 |
|
---|
[15483] | 123 | Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
|
---|
[18113] | 124 | Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
|
---|
| 125 | Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
|
---|
[6256] | 126 | }
|
---|
| 127 |
|
---|
| 128 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
[18113] | 129 | return new SymbolicRegressionParameterOptimizationEvaluator(this, cloner);
|
---|
[6256] | 130 | }
|
---|
| 131 |
|
---|
[8823] | 132 | [StorableHook(HookType.AfterDeserialization)]
|
---|
| 133 | private void AfterDeserialization() {
|
---|
[18113] | 134 | if (!Parameters.ContainsKey(UpdateParametersInTreeParameterName)) {
|
---|
| 135 | if (Parameters.ContainsKey("UpdateConstantsInSymbolicExpressionTree")) {
|
---|
| 136 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateParametersInTreeParameterName, "Determines if the parameters in the tree should be overwritten by the optimized parameters.", (BoolValue)Parameters["UpdateConstantsInSymbolicExpressionTree"].ActualValue));
|
---|
| 137 | Parameters.Remove("UpdateConstantsInSymbolicExpressionTree");
|
---|
| 138 | } else {
|
---|
| 139 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateParametersInTreeParameterName, "Determines if the parameters in the tree should be overwritten by the optimized parameters.", new BoolValue(true)));
|
---|
| 140 | }
|
---|
| 141 | }
|
---|
| 142 |
|
---|
[13670] | 143 | if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
|
---|
| 144 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)));
|
---|
[15448] | 145 |
|
---|
[15483] | 146 | if (!Parameters.ContainsKey(CountEvaluationsParameterName))
|
---|
| 147 | Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
|
---|
| 148 |
|
---|
[18113] | 149 | if (!Parameters.ContainsKey(FunctionEvaluationsResultParameterName)) {
|
---|
| 150 | if (Parameters.ContainsKey("Constants Optimization Function Evaluations")) {
|
---|
| 151 | Parameters.Remove("Constants Optimization Function Evaluations");
|
---|
| 152 | }
|
---|
| 153 | Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
|
---|
| 154 | }
|
---|
| 155 |
|
---|
| 156 | if (!Parameters.ContainsKey(GradientEvaluationsResultParameterName)) {
|
---|
| 157 | if (Parameters.ContainsKey("Constants Optimization Gradient Evaluations")) {
|
---|
| 158 | Parameters.Remove("Constants Optimization Gradient Evaluations");
|
---|
| 159 | }
|
---|
| 160 | Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
|
---|
| 161 | }
|
---|
| 162 |
|
---|
| 163 | if (!Parameters.ContainsKey(ParameterOptimizationIterationsParameterName)) {
|
---|
| 164 | if (Parameters.ContainsKey("ConstantOptimizationIterations")) {
|
---|
| 165 | Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the parameter of a symbolic expression tree (0 indicates other or default stopping criterion).", (IntValue)Parameters["ConstantOptimizationIterations"].ActualValue));
|
---|
| 166 | Parameters.Remove("ConstantOptimizationIterations");
|
---|
| 167 | } else {
|
---|
| 168 | Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the parameter of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10)));
|
---|
| 169 | }
|
---|
| 170 | }
|
---|
| 171 |
|
---|
| 172 | if (!Parameters.ContainsKey(ParameterOptimizationImprovementParameterName)) {
|
---|
| 173 | if (Parameters.ContainsKey("CosntantOptimizationImprovement")) {
|
---|
| 174 | Parameters.Add(new FixedValueParameter<DoubleValue>(ParameterOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the parameter optimization to continue with it (0 indicates other or default stopping criterion).",
|
---|
| 175 | (DoubleValue)Parameters["CosntantOptimizationImprovement"].ActualValue) { Hidden = true });
|
---|
| 176 | Parameters.Remove("CosntantOptimizationImprovement");
|
---|
| 177 | } else {
|
---|
| 178 | Parameters.Add(new FixedValueParameter<DoubleValue>(ParameterOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the parameter optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0)) { Hidden = true });
|
---|
| 179 | }
|
---|
| 180 | }
|
---|
| 181 |
|
---|
| 182 | if (!Parameters.ContainsKey(ParameterOptimizationProbabilityParameterName)) {
|
---|
| 183 | if (Parameters.ContainsKey("ConstantOptimizationProbability")) {
|
---|
| 184 | Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationProbabilityParameterName, "Determines the probability that the parameters are optimized",
|
---|
| 185 | (PercentValue)Parameters["ConstantOptimizationProbability"].ActualValue));
|
---|
| 186 | Parameters.Remove("ConstantOptimizationProbability");
|
---|
| 187 | } else {
|
---|
| 188 | Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationProbabilityParameterName, "Determines the probability that the parameters are optimized", new PercentValue(1)));
|
---|
| 189 | }
|
---|
| 190 | }
|
---|
| 191 |
|
---|
| 192 | if (!Parameters.ContainsKey(ParameterOptimizationRowsPercentageParameterName)) {
|
---|
| 193 | if (Parameters.ContainsKey("ConstantOptimizationRowsPercentage")) {
|
---|
| 194 | Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for parameter optimization", (PercentValue)Parameters["ConstantOptimizationRowsPercentage"].ActualValue));
|
---|
| 195 | Parameters.Remove("ConstantOptimizationRowsPercentage");
|
---|
| 196 | } else {
|
---|
| 197 | Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for parameter optimization", new PercentValue(1)));
|
---|
| 198 | }
|
---|
| 199 |
|
---|
| 200 | }
|
---|
[8823] | 201 | }
|
---|
| 202 |
|
---|
[15448] | 203 | private static readonly object locker = new object();
|
---|
[10291] | 204 | public override IOperation InstrumentedApply() {
|
---|
[6256] | 205 | var solution = SymbolicExpressionTreeParameter.ActualValue;
|
---|
| 206 | double quality;
|
---|
[18113] | 207 | if (RandomParameter.ActualValue.NextDouble() < ParameterOptimizationProbability.Value) {
|
---|
| 208 | IEnumerable<int> parameterOptimizationRows = GenerateRowsToEvaluate(ParameterOptimizationRowsPercentage.Value);
|
---|
[15448] | 209 | var counter = new EvaluationsCounter();
|
---|
[18113] | 210 | quality = OptimizeParameters(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
|
---|
| 211 | parameterOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ParameterOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateParametersInTree: UpdateParametersInTree, counter: counter);
|
---|
[8938] | 212 |
|
---|
[18113] | 213 | if (ParameterOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
|
---|
[6256] | 214 | var evaluationRows = GenerateRowsToEvaluate();
|
---|
[8664] | 215 | quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
[6256] | 216 | }
|
---|
[15448] | 217 |
|
---|
[15483] | 218 | if (CountEvaluations) {
|
---|
| 219 | lock (locker) {
|
---|
| 220 | FunctionEvaluationsResultParameter.ActualValue.Value += counter.FunctionEvaluations;
|
---|
| 221 | GradientEvaluationsResultParameter.ActualValue.Value += counter.GradientEvaluations;
|
---|
| 222 | }
|
---|
[15448] | 223 | }
|
---|
| 224 |
|
---|
[6256] | 225 | } else {
|
---|
| 226 | var evaluationRows = GenerateRowsToEvaluate();
|
---|
[8664] | 227 | quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
[6256] | 228 | }
|
---|
| 229 | QualityParameter.ActualValue = new DoubleValue(quality);
|
---|
| 230 |
|
---|
[10291] | 231 | return base.InstrumentedApply();
|
---|
[6256] | 232 | }
|
---|
| 233 |
|
---|
| 234 | public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
|
---|
| 235 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
|
---|
| 236 | EstimationLimitsParameter.ExecutionContext = context;
|
---|
[8664] | 237 | ApplyLinearScalingParameter.ExecutionContext = context;
|
---|
[15448] | 238 | FunctionEvaluationsResultParameter.ExecutionContext = context;
|
---|
| 239 | GradientEvaluationsResultParameter.ExecutionContext = context;
|
---|
[6256] | 240 |
|
---|
[9209] | 241 | // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
|
---|
| 242 | // because Evaluate() is used to get the quality of evolved models on
|
---|
| 243 | // different partitions of the dataset (e.g., best validation model)
|
---|
[8664] | 244 | double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
[6256] | 245 |
|
---|
| 246 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
|
---|
| 247 | EstimationLimitsParameter.ExecutionContext = null;
|
---|
[9209] | 248 | ApplyLinearScalingParameter.ExecutionContext = null;
|
---|
[15448] | 249 | FunctionEvaluationsResultParameter.ExecutionContext = null;
|
---|
| 250 | GradientEvaluationsResultParameter.ExecutionContext = null;
|
---|
[6256] | 251 |
|
---|
| 252 | return r2;
|
---|
| 253 | }
|
---|
| 254 |
|
---|
[15448] | 255 | public class EvaluationsCounter {
|
---|
| 256 | public int FunctionEvaluations = 0;
|
---|
| 257 | public int GradientEvaluations = 0;
|
---|
| 258 | }
|
---|
| 259 |
|
---|
[18113] | 260 | public static double OptimizeParameters(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
|
---|
[14826] | 261 | ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling,
|
---|
| 262 | int maxIterations, bool updateVariableWeights = true,
|
---|
| 263 | double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
|
---|
[18113] | 264 | bool updateParametersInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
|
---|
[8704] | 265 |
|
---|
[18113] | 266 | // Numeric parameters in the tree become variables for parameter optimization.
|
---|
[17817] | 267 | // Variables in the tree become parameters (fixed values) for parameter optimization.
|
---|
| 268 | // For each parameter (variable in the original tree) we store the
|
---|
[14826] | 269 | // variable name, variable value (for factor vars) and lag as a DataForVariable object.
|
---|
| 270 | // A dictionary is used to find parameters
|
---|
[18113] | 271 | double[] initialParameters;
|
---|
[14843] | 272 | var parameters = new List<TreeToAutoDiffTermConverter.DataForVariable>();
|
---|
[14826] | 273 |
|
---|
[14843] | 274 | TreeToAutoDiffTermConverter.ParametricFunction func;
|
---|
| 275 | TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
|
---|
[18113] | 276 | if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialParameters, out func, out func_grad))
|
---|
| 277 | throw new NotSupportedException("Could not optimize parameters of symbolic expression tree due to not supported symbols used in the tree.");
|
---|
[17817] | 278 | if (parameters.Count == 0) return 0.0; // constant expressions always have a R² of 0.0
|
---|
[14826] | 279 | var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
|
---|
[14400] | 280 |
|
---|
[18113] | 281 | // extract inital parameters
|
---|
[15447] | 282 | double[] c;
|
---|
| 283 | if (applyLinearScaling) {
|
---|
[18113] | 284 | c = new double[initialParameters.Length + 2];
|
---|
[15481] | 285 | c[0] = 0.0;
|
---|
| 286 | c[1] = 1.0;
|
---|
[18113] | 287 | Array.Copy(initialParameters, 0, c, 2, initialParameters.Length);
|
---|
[15447] | 288 | } else {
|
---|
[18113] | 289 | c = (double[])initialParameters.Clone();
|
---|
[14400] | 290 | }
|
---|
[15447] | 291 |
|
---|
[8938] | 292 | double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
|
---|
[6256] | 293 |
|
---|
[15448] | 294 | if (counter == null) counter = new EvaluationsCounter();
|
---|
| 295 | var rowEvaluationsCounter = new EvaluationsCounter();
|
---|
| 296 |
|
---|
[8704] | 297 | alglib.lsfitstate state;
|
---|
| 298 | alglib.lsfitreport rep;
|
---|
[14826] | 299 | int retVal;
|
---|
[6256] | 300 |
|
---|
[12509] | 301 | IDataset ds = problemData.Dataset;
|
---|
[14826] | 302 | double[,] x = new double[rows.Count(), parameters.Count];
|
---|
[8704] | 303 | int row = 0;
|
---|
| 304 | foreach (var r in rows) {
|
---|
[14826] | 305 | int col = 0;
|
---|
[14840] | 306 | foreach (var info in parameterEntries) {
|
---|
[14826] | 307 | if (ds.VariableHasType<double>(info.variableName)) {
|
---|
[14946] | 308 | x[row, col] = ds.GetDoubleValue(info.variableName, r + info.lag);
|
---|
[14826] | 309 | } else if (ds.VariableHasType<string>(info.variableName)) {
|
---|
| 310 | x[row, col] = ds.GetStringValue(info.variableName, r) == info.variableValue ? 1 : 0;
|
---|
| 311 | } else throw new InvalidProgramException("found a variable of unknown type");
|
---|
| 312 | col++;
|
---|
[8704] | 313 | }
|
---|
| 314 | row++;
|
---|
| 315 | }
|
---|
| 316 | double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
|
---|
| 317 | int n = x.GetLength(0);
|
---|
| 318 | int m = x.GetLength(1);
|
---|
| 319 | int k = c.Length;
|
---|
[6256] | 320 |
|
---|
[14840] | 321 | alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(func);
|
---|
| 322 | alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(func_grad);
|
---|
[15371] | 323 | alglib.ndimensional_rep xrep = (p, f, obj) => iterationCallback(p, f, obj);
|
---|
[6256] | 324 |
|
---|
[8704] | 325 | try {
|
---|
| 326 | alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
|
---|
[17931] | 327 | alglib.lsfitsetcond(state, 0.0, maxIterations);
|
---|
[15371] | 328 | alglib.lsfitsetxrep(state, iterationCallback != null);
|
---|
[15448] | 329 | alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter);
|
---|
[14826] | 330 | alglib.lsfitresults(state, out retVal, out c, out rep);
|
---|
[15447] | 331 | } catch (ArithmeticException) {
|
---|
[8984] | 332 | return originalQuality;
|
---|
[15447] | 333 | } catch (alglib.alglibexception) {
|
---|
[8984] | 334 | return originalQuality;
|
---|
[8704] | 335 | }
|
---|
[8823] | 336 |
|
---|
[15448] | 337 | counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n;
|
---|
| 338 | counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;
|
---|
| 339 |
|
---|
[18113] | 340 | //retVal == -7 => parameter optimization failed due to wrong gradient
|
---|
[17944] | 341 | // -8 => optimizer detected NAN / INF in the target
|
---|
| 342 | // function and/ or gradient
|
---|
| 343 | if (retVal != -7 && retVal != -8) {
|
---|
[15481] | 344 | if (applyLinearScaling) {
|
---|
| 345 | var tmp = new double[c.Length - 2];
|
---|
| 346 | Array.Copy(c, 2, tmp, 0, tmp.Length);
|
---|
[18113] | 347 | UpdateParameters(tree, tmp, updateVariableWeights);
|
---|
| 348 | } else UpdateParameters(tree, c, updateVariableWeights);
|
---|
[15447] | 349 | }
|
---|
[8938] | 350 | var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
|
---|
| 351 |
|
---|
[18113] | 352 | if (!updateParametersInTree) UpdateParameters(tree, initialParameters, updateVariableWeights);
|
---|
[15447] | 353 |
|
---|
[8938] | 354 | if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
|
---|
[18113] | 355 | UpdateParameters(tree, initialParameters, updateVariableWeights);
|
---|
[8938] | 356 | return originalQuality;
|
---|
[8704] | 357 | }
|
---|
[8938] | 358 | return quality;
|
---|
[6256] | 359 | }
|
---|
| 360 |
|
---|
[18113] | 361 | private static void UpdateParameters(ISymbolicExpressionTree tree, double[] parameters, bool updateVariableWeights) {
|
---|
[8938] | 362 | int i = 0;
|
---|
| 363 | foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
|
---|
[18100] | 364 | NumberTreeNode numberTreeNode = node as NumberTreeNode;
|
---|
[14951] | 365 | VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase;
|
---|
[14826] | 366 | FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode;
|
---|
[18100] | 367 | if (numberTreeNode != null) {
|
---|
| 368 | if (numberTreeNode.Parent.Symbol is Power
|
---|
| 369 | && numberTreeNode.Parent.GetSubtree(1) == numberTreeNode) continue; // exponents in powers are not optimizated (see TreeToAutoDiffTermConverter)
|
---|
[18113] | 370 | numberTreeNode.Value = parameters[i++];
|
---|
[17817] | 371 | } else if (updateVariableWeights && variableTreeNodeBase != null)
|
---|
[18113] | 372 | variableTreeNodeBase.Weight = parameters[i++];
|
---|
[14826] | 373 | else if (factorVarTreeNode != null) {
|
---|
| 374 | for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
|
---|
[18113] | 375 | factorVarTreeNode.Weights[j] = parameters[i++];
|
---|
[14826] | 376 | }
|
---|
[8938] | 377 | }
|
---|
| 378 | }
|
---|
| 379 |
|
---|
[14843] | 380 | private static alglib.ndimensional_pfunc CreatePFunc(TreeToAutoDiffTermConverter.ParametricFunction func) {
|
---|
[14840] | 381 | return (double[] c, double[] x, ref double fx, object o) => {
|
---|
| 382 | fx = func(c, x);
|
---|
[15448] | 383 | var counter = (EvaluationsCounter)o;
|
---|
| 384 | counter.FunctionEvaluations++;
|
---|
[8704] | 385 | };
|
---|
| 386 | }
|
---|
[6256] | 387 |
|
---|
[14843] | 388 | private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) {
|
---|
[14840] | 389 | return (double[] c, double[] x, ref double fx, double[] grad, object o) => {
|
---|
[15480] | 390 | var tuple = func_grad(c, x);
|
---|
| 391 | fx = tuple.Item2;
|
---|
| 392 | Array.Copy(tuple.Item1, grad, grad.Length);
|
---|
[15448] | 393 | var counter = (EvaluationsCounter)o;
|
---|
| 394 | counter.GradientEvaluations++;
|
---|
[6256] | 395 | };
|
---|
| 396 | }
|
---|
[18113] | 397 | public static bool CanOptimizeParameters(ISymbolicExpressionTree tree) {
|
---|
[14843] | 398 | return TreeToAutoDiffTermConverter.IsCompatible(tree);
|
---|
[8730] | 399 | }
|
---|
[6256] | 400 | }
|
---|
| 401 | }
|
---|