[15311] | 1 | #region License Information
|
---|
| 2 | /* HeuristicLab
|
---|
| 3 | * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
| 4 | *
|
---|
| 5 | * This file is part of HeuristicLab.
|
---|
| 6 | *
|
---|
| 7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
| 8 | * it under the terms of the GNU General Public License as published by
|
---|
| 9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
| 10 | * (at your option) any later version.
|
---|
| 11 | *
|
---|
| 12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 15 | * GNU General Public License for more details.
|
---|
| 16 | *
|
---|
| 17 | * You should have received a copy of the GNU General Public License
|
---|
| 18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
| 19 | */
|
---|
| 20 | #endregion
|
---|
| 21 |
|
---|
| 22 | using System;
|
---|
| 23 | using System.Collections.Generic;
|
---|
| 24 | using System.Linq;
|
---|
| 25 | using HeuristicLab.Common;
|
---|
| 26 | using HeuristicLab.Core;
|
---|
| 27 | using HeuristicLab.Data;
|
---|
| 28 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
| 29 | using HeuristicLab.Parameters;
|
---|
| 30 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
| 31 | using HeuristicLab.Problems.DataAnalysis;
|
---|
| 32 | using HeuristicLab.Problems.DataAnalysis.Symbolic;
|
---|
| 33 | using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
|
---|
[15313] | 34 | using DiffSharp.Interop.Float64;
|
---|
[15322] | 35 | using System.Diagnostics;
|
---|
[15311] | 36 |
|
---|
| 37 | namespace HeuristicLab.Algorithms.DataAnalysis.Experimental {
|
---|
| 38 | [Item("Constant Optimization Evaluator with Constraints", "")]
|
---|
| 39 | [StorableClass]
|
---|
| 40 | public class SymbolicRegressionConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
|
---|
| 41 | private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations";
|
---|
| 42 | private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement";
|
---|
| 43 | private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
|
---|
| 44 | private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
|
---|
| 45 | private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
|
---|
| 46 | private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
|
---|
| 47 |
|
---|
| 48 | public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
|
---|
| 49 | get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
|
---|
| 50 | }
|
---|
| 51 | public IFixedValueParameter<DoubleValue> ConstantOptimizationImprovementParameter {
|
---|
| 52 | get { return (IFixedValueParameter<DoubleValue>)Parameters[ConstantOptimizationImprovementParameterName]; }
|
---|
| 53 | }
|
---|
| 54 | public IFixedValueParameter<PercentValue> ConstantOptimizationProbabilityParameter {
|
---|
| 55 | get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationProbabilityParameterName]; }
|
---|
| 56 | }
|
---|
| 57 | public IFixedValueParameter<PercentValue> ConstantOptimizationRowsPercentageParameter {
|
---|
| 58 | get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
|
---|
| 59 | }
|
---|
| 60 | public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
|
---|
| 61 | get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
|
---|
| 62 | }
|
---|
| 63 | public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
|
---|
| 64 | get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
|
---|
| 65 | }
|
---|
| 66 |
|
---|
| 67 |
|
---|
| 68 | public IntValue ConstantOptimizationIterations {
|
---|
| 69 | get { return ConstantOptimizationIterationsParameter.Value; }
|
---|
| 70 | }
|
---|
| 71 | public DoubleValue ConstantOptimizationImprovement {
|
---|
| 72 | get { return ConstantOptimizationImprovementParameter.Value; }
|
---|
| 73 | }
|
---|
| 74 | public PercentValue ConstantOptimizationProbability {
|
---|
| 75 | get { return ConstantOptimizationProbabilityParameter.Value; }
|
---|
| 76 | }
|
---|
| 77 | public PercentValue ConstantOptimizationRowsPercentage {
|
---|
| 78 | get { return ConstantOptimizationRowsPercentageParameter.Value; }
|
---|
| 79 | }
|
---|
| 80 | public bool UpdateConstantsInTree {
|
---|
| 81 | get { return UpdateConstantsInTreeParameter.Value.Value; }
|
---|
| 82 | set { UpdateConstantsInTreeParameter.Value.Value = value; }
|
---|
| 83 | }
|
---|
| 84 |
|
---|
| 85 | public bool UpdateVariableWeights {
|
---|
| 86 | get { return UpdateVariableWeightsParameter.Value.Value; }
|
---|
| 87 | set { UpdateVariableWeightsParameter.Value.Value = value; }
|
---|
| 88 | }
|
---|
| 89 |
|
---|
| 90 | public override bool Maximization {
|
---|
| 91 | get { return true; }
|
---|
| 92 | }
|
---|
| 93 |
|
---|
| 94 | [StorableConstructor]
|
---|
| 95 | protected SymbolicRegressionConstantOptimizationEvaluator(bool deserializing) : base(deserializing) { }
|
---|
| 96 | protected SymbolicRegressionConstantOptimizationEvaluator(SymbolicRegressionConstantOptimizationEvaluator original, Cloner cloner)
|
---|
| 97 | : base(original, cloner) {
|
---|
| 98 | }
|
---|
| 99 | public SymbolicRegressionConstantOptimizationEvaluator()
|
---|
| 100 | : base() {
|
---|
| 101 | Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10), true));
|
---|
| 102 | Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true) { Hidden = true });
|
---|
| 103 | Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
|
---|
| 104 | Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
|
---|
| 105 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
|
---|
| 106 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)) { Hidden = true });
|
---|
| 107 | }
|
---|
| 108 |
|
---|
| 109 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
| 110 | return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner);
|
---|
| 111 | }
|
---|
| 112 |
|
---|
| 113 | [StorableHook(HookType.AfterDeserialization)]
|
---|
| 114 | private void AfterDeserialization() {
|
---|
| 115 | if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
|
---|
| 116 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
|
---|
| 117 | if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
|
---|
| 118 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)));
|
---|
| 119 | }
|
---|
| 120 |
|
---|
| 121 | public override IOperation InstrumentedApply() {
|
---|
| 122 | var solution = SymbolicExpressionTreeParameter.ActualValue;
|
---|
| 123 | double quality;
|
---|
| 124 | if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
|
---|
| 125 | IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
|
---|
| 126 | quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
|
---|
| 127 | constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree);
|
---|
| 128 |
|
---|
| 129 | if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
|
---|
| 130 | var evaluationRows = GenerateRowsToEvaluate();
|
---|
| 131 | quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
| 132 | }
|
---|
| 133 | } else {
|
---|
| 134 | var evaluationRows = GenerateRowsToEvaluate();
|
---|
| 135 | quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
| 136 | }
|
---|
| 137 | QualityParameter.ActualValue = new DoubleValue(quality);
|
---|
| 138 |
|
---|
| 139 | return base.InstrumentedApply();
|
---|
| 140 | }
|
---|
| 141 |
|
---|
| 142 | public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
|
---|
| 143 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
|
---|
| 144 | EstimationLimitsParameter.ExecutionContext = context;
|
---|
| 145 | ApplyLinearScalingParameter.ExecutionContext = context;
|
---|
| 146 |
|
---|
| 147 | // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
|
---|
| 148 | // because Evaluate() is used to get the quality of evolved models on
|
---|
| 149 | // different partitions of the dataset (e.g., best validation model)
|
---|
| 150 | double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
| 151 |
|
---|
| 152 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
|
---|
| 153 | EstimationLimitsParameter.ExecutionContext = null;
|
---|
| 154 | ApplyLinearScalingParameter.ExecutionContext = null;
|
---|
| 155 |
|
---|
| 156 | return r2;
|
---|
| 157 | }
|
---|
| 158 |
|
---|
| 159 | public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
|
---|
| 160 | ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling,
|
---|
| 161 | int maxIterations, bool updateVariableWeights = true,
|
---|
| 162 | double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
|
---|
| 163 | bool updateConstantsInTree = true) {
|
---|
| 164 |
|
---|
| 165 | // numeric constants in the tree become variables for constant opt
|
---|
| 166 | // variables in the tree become parameters (fixed values) for constant opt
|
---|
| 167 | // for each parameter (variable in the original tree) we store the
|
---|
| 168 | // variable name, variable value (for factor vars) and lag as a DataForVariable object.
|
---|
| 169 | // A dictionary is used to find parameters
|
---|
| 170 | double[] initialConstants;
|
---|
[15313] | 171 | var parameters = new List<TreeToDiffSharpConverter.DataForVariable>();
|
---|
[15311] | 172 |
|
---|
[15313] | 173 | Func<DV, D> func;
|
---|
| 174 | if (!TreeToDiffSharpConverter.TryConvertToDiffSharp(tree, updateVariableWeights, out parameters, out initialConstants,
|
---|
| 175 | out func))
|
---|
[15311] | 176 | throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
|
---|
| 177 | if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0
|
---|
| 178 |
|
---|
| 179 | var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
|
---|
| 180 |
|
---|
| 181 | // extract inital constants
|
---|
[15322] | 182 | double[] c = new double[initialConstants.Length];
|
---|
[15313] | 183 | double[] s = new double[c.Length];
|
---|
[15311] | 184 | {
|
---|
[15322] | 185 | Array.Copy(initialConstants, 0, c, 0, initialConstants.Length);
|
---|
[15313] | 186 |
|
---|
[15322] | 187 | // c[0] = 1.0;
|
---|
| 188 | // c[1] = 0.0;
|
---|
| 189 | // Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
|
---|
| 190 |
|
---|
[15313] | 191 | // s[0] = 1.0;
|
---|
| 192 | // s[1] = 1.0;
|
---|
| 193 | // Array.Copy(initialConstants.Select(ci=>Math.Abs(ci)).ToArray()
|
---|
| 194 | // , 0, s, 2, initialConstants.Length);
|
---|
[15311] | 195 | }
|
---|
[15314] | 196 | s = Enumerable.Repeat(0.01, c.Length).ToArray();
|
---|
[15313] | 197 |
|
---|
[15311] | 198 | double[] originalConstants = (double[])c.Clone();
|
---|
| 199 | double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
|
---|
| 200 |
|
---|
| 201 | alglib.minnlcstate state;
|
---|
| 202 | alglib.minnlcreport rep;
|
---|
| 203 |
|
---|
| 204 | IDataset ds = problemData.Dataset;
|
---|
[15322] | 205 | double[,] x = new double[rows.Count(), parameters.Count];
|
---|
[15313] | 206 | int col = 0;
|
---|
[15311] | 207 | int row = 0;
|
---|
[15313] | 208 | foreach (var info in parameterEntries) {
|
---|
| 209 | row = 0;
|
---|
[15314] | 210 | // copy training rows
|
---|
[15322] | 211 | foreach (var r in rows) {
|
---|
[15311] | 212 | if (ds.VariableHasType<double>(info.variableName)) {
|
---|
| 213 | x[row, col] = ds.GetDoubleValue(info.variableName, r + info.lag);
|
---|
| 214 | } else if (ds.VariableHasType<string>(info.variableName)) {
|
---|
| 215 | x[row, col] = ds.GetStringValue(info.variableName, r) == info.variableValue ? 1 : 0;
|
---|
| 216 | } else throw new InvalidProgramException("found a variable of unknown type");
|
---|
| 217 |
|
---|
[15313] | 218 | row++;
|
---|
| 219 | }
|
---|
| 220 | col++;
|
---|
| 221 | }
|
---|
[15311] | 222 |
|
---|
[15313] | 223 | var target = problemData.TargetVariable;
|
---|
[15314] | 224 |
|
---|
[15352] | 225 | // determine rows with constraints by checking of any constraint column contains a value
|
---|
| 226 | var constraintColNames = parameterEntries
|
---|
| 227 | .Select(e => e.variableName)
|
---|
| 228 | .Select(name => string.Format("df/d({0})", name))
|
---|
| 229 | .ToArray();
|
---|
| 230 | var constraintRows = Enumerable.Range(0, problemData.Dataset.Rows)
|
---|
| 231 | .Where(rIdx => constraintColNames.Any(name => !double.IsNaN(ds.GetDoubleValue(name, rIdx))));
|
---|
[15322] | 232 |
|
---|
| 233 | double[,] constraintX = new double[constraintRows.Count(), parameters.Count]; // inputs for constraint values
|
---|
| 234 | double[,] constraints = new double[constraintRows.Count(), parameters.Count + 1]; // constraint values +1 column for constraint values for f(x)
|
---|
| 235 | string[,] comp = new string[constraintRows.Count(), parameters.Count + 1]; // comparison types <= = >=
|
---|
[15313] | 236 | int eqConstraints = 0;
|
---|
| 237 | int ieqConstraints = 0;
|
---|
| 238 | col = 0;
|
---|
| 239 | foreach (var info in parameterEntries) {
|
---|
| 240 | row = 0;
|
---|
| 241 | // find the matching df/dx column
|
---|
| 242 | var colName = string.Format("df/d({0})", info.variableName);
|
---|
| 243 | var compColName = string.Format("df/d({0}) constraint-type", info.variableName);
|
---|
| 244 |
|
---|
[15322] | 245 | foreach (var r in constraintRows) {
|
---|
| 246 | constraintX[row, col] = ds.GetDoubleValue(info.variableName, r);
|
---|
| 247 | if (ds.VariableNames.Contains(colName)) {
|
---|
[15313] | 248 | constraints[row, col] = ds.GetDoubleValue(colName, r);
|
---|
| 249 | comp[row, col] = ds.GetStringValue(compColName, r);
|
---|
| 250 |
|
---|
| 251 | if (comp[row, col] == "EQ") eqConstraints++;
|
---|
| 252 | else if (comp[row, col] == "LEQ" || comp[row, col] == "GEQ") ieqConstraints++;
|
---|
[15322] | 253 | }
|
---|
[15313] | 254 |
|
---|
[15322] | 255 | row++;
|
---|
[15311] | 256 | }
|
---|
[15313] | 257 | col++;
|
---|
| 258 | }
|
---|
[15314] | 259 |
|
---|
[15313] | 260 | // f(x) constraint
|
---|
| 261 | row = 0;
|
---|
| 262 | col = constraints.GetLength(1) - 1;
|
---|
| 263 | foreach (var r in constraintRows) {
|
---|
[15311] | 264 | constraints[row, col] = ds.GetDoubleValue("f(x)", r);
|
---|
| 265 | comp[row, col] = ds.GetStringValue("f(x) constraint-type", r);
|
---|
[15313] | 266 | if (comp[row, col] == "EQ") eqConstraints++;
|
---|
| 267 | else if (comp[row, col] == "LEQ" || comp[row, col] == "GEQ") ieqConstraints++;
|
---|
[15311] | 268 | row++;
|
---|
| 269 | }
|
---|
[15313] | 270 |
|
---|
[15311] | 271 | double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
|
---|
| 272 |
|
---|
[15322] | 273 | alglib.ndimensional_jac jac = CreateJac(x, y, constraintX, constraints, comp, func);
|
---|
| 274 | double rho = 10000;
|
---|
[15311] | 275 | int outeriters = 3;
|
---|
| 276 | int updateFreq = 10;
|
---|
| 277 | try {
|
---|
| 278 | alglib.minnlccreate(c, out state);
|
---|
| 279 | alglib.minnlcsetalgoaul(state, rho, outeriters);
|
---|
| 280 | alglib.minnlcsetcond(state, 0.0, 0.0, 0.0, maxIterations);
|
---|
| 281 | alglib.minnlcsetscale(state, s);
|
---|
| 282 | alglib.minnlcsetprecexactlowrank(state, updateFreq);
|
---|
[15313] | 283 | alglib.minnlcsetnlc(state, eqConstraints, ieqConstraints);
|
---|
[15311] | 284 | alglib.minnlcoptimize(state, jac, null, null);
|
---|
| 285 | alglib.minnlcresults(state, out c, out rep);
|
---|
| 286 | } catch (ArithmeticException) {
|
---|
| 287 | return originalQuality;
|
---|
| 288 | } catch (alglib.alglibexception) {
|
---|
| 289 | return originalQuality;
|
---|
| 290 | }
|
---|
| 291 |
|
---|
| 292 | // -7 => constant optimization failed due to wrong gradient
|
---|
| 293 | // -8 => integrity check failed (e.g. gradient NaN
|
---|
| 294 | if (rep.terminationtype != -7 && rep.terminationtype != -8)
|
---|
[15322] | 295 | UpdateConstants(tree, c.ToArray(), updateVariableWeights);
|
---|
| 296 | else {
|
---|
| 297 | UpdateConstants(tree, Enumerable.Repeat(0.0, c.Length).ToArray(), updateVariableWeights);
|
---|
| 298 | }
|
---|
[15311] | 299 | var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
|
---|
[15322] | 300 | Console.WriteLine("{0:N4} {1:N4} {2} {3}", originalQuality, quality, state.fi.Skip(1).Where(fii => fii > 0).Count(), rep.terminationtype);
|
---|
[15311] | 301 |
|
---|
[15322] | 302 | if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.ToArray(), updateVariableWeights);
|
---|
[15314] | 303 | if (
|
---|
| 304 | // originalQuality - quality > 0.001 ||
|
---|
| 305 | double.IsNaN(quality)) {
|
---|
[15322] | 306 | UpdateConstants(tree, originalConstants.ToArray(), updateVariableWeights);
|
---|
[15311] | 307 | return originalQuality;
|
---|
| 308 | }
|
---|
| 309 | return quality;
|
---|
| 310 | }
|
---|
| 311 |
|
---|
| 312 | private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) {
|
---|
| 313 | int i = 0;
|
---|
| 314 | foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
|
---|
| 315 | ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
|
---|
| 316 | VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase;
|
---|
| 317 | FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode;
|
---|
| 318 | if (constantTreeNode != null)
|
---|
| 319 | constantTreeNode.Value = constants[i++];
|
---|
| 320 | else if (updateVariableWeights && variableTreeNodeBase != null)
|
---|
| 321 | variableTreeNodeBase.Weight = constants[i++];
|
---|
| 322 | else if (factorVarTreeNode != null) {
|
---|
| 323 | for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
|
---|
| 324 | factorVarTreeNode.Weights[j] = constants[i++];
|
---|
| 325 | }
|
---|
| 326 | }
|
---|
| 327 | }
|
---|
| 328 |
|
---|
| 329 | private static alglib.ndimensional_jac CreateJac(
|
---|
[15322] | 330 | double[,] x, // x is same size as y
|
---|
[15311] | 331 | double[] y, // only targets
|
---|
[15322] | 332 | double[,] constraintX, // inputs for constraints
|
---|
| 333 | double[,] constraints, // df/d(xi), same size as constraintX, same number of rows as x less columns
|
---|
| 334 | string[,] comparison, // {LEQ, GEQ, EQ } same size as constraints
|
---|
[15314] | 335 | Func<DV, D> func) {
|
---|
[15322] | 336 | Trace.Assert(x.GetLength(0) == y.Length);
|
---|
| 337 | Trace.Assert(x.GetLength(1) == constraintX.GetLength(1) - 1);
|
---|
| 338 | Trace.Assert(constraintX.GetLength(0) == constraints.GetLength(0));
|
---|
| 339 | Trace.Assert(constraintX.GetLength(1) == constraints.GetLength(1));
|
---|
| 340 | Trace.Assert(constraints.GetLength(0) == comparison.GetLength(0));
|
---|
| 341 | Trace.Assert(constraints.GetLength(1) == comparison.GetLength(1));
|
---|
[15311] | 342 | return (double[] c, double[] fi, double[,] jac, object o) => {
|
---|
| 343 | // objective function is sum of squared errors
|
---|
| 344 | int nRows = y.Length;
|
---|
| 345 | int nParams = x.GetLength(1);
|
---|
| 346 | // zero fi and jac
|
---|
| 347 | Array.Clear(fi, 0, fi.Length);
|
---|
| 348 | Array.Clear(jac, 0, jac.Length);
|
---|
[15313] | 349 | var p = new double[nParams + c.Length];
|
---|
| 350 | Array.Copy(c, 0, p, nParams, c.Length); // copy c to the end of the function parameters vector
|
---|
[15311] | 351 | for (int rowIdx = 0; rowIdx < nRows; rowIdx++) {
|
---|
[15313] | 352 | // copy x_i to the beginning of the function parameters vector
|
---|
[15311] | 353 | for (int cIdx = 0; cIdx < nParams; cIdx++)
|
---|
[15313] | 354 | p[cIdx] = x[rowIdx, cIdx];
|
---|
| 355 |
|
---|
| 356 | double f = (double)func(p);
|
---|
[15314] | 357 | double[] g = (double[])AD.Grad(func, p);
|
---|
[15311] | 358 | var e = y[rowIdx] - f;
|
---|
| 359 | fi[0] += e * e;
|
---|
| 360 | // update gradient
|
---|
| 361 | for (int colIdx = 0; colIdx < c.Length; colIdx++) {
|
---|
[15313] | 362 | jac[0, colIdx] += -2 * e * g[nParams + colIdx]; // skip the elements for the variable values
|
---|
[15311] | 363 | }
|
---|
| 364 | }
|
---|
| 365 |
|
---|
[15313] | 366 | int fidx = 1;
|
---|
[15311] | 367 |
|
---|
[15313] | 368 | // eq constraints
|
---|
[15322] | 369 | for (int rowIdx = 0; rowIdx < constraintX.GetLength(0); rowIdx++) {
|
---|
| 370 | for (var colIdx = 0; colIdx < constraintX.GetLength(1); colIdx++) {
|
---|
| 371 | if (comparison[rowIdx, colIdx] == "EQ") {
|
---|
[15313] | 372 | throw new NotSupportedException();
|
---|
| 373 | }
|
---|
| 374 | }
|
---|
| 375 | }
|
---|
| 376 | // ineq constraints
|
---|
[15322] | 377 | for (int rowIdx = 0; rowIdx < constraintX.GetLength(0); rowIdx++) {
|
---|
[15313] | 378 | for (int colIdx = 0; colIdx < constraints.GetLength(1); colIdx++) {
|
---|
| 379 | // there is a constraint value
|
---|
| 380 | if (!double.IsNaN(constraints[rowIdx, colIdx]) && !string.IsNullOrEmpty(comparison[rowIdx, colIdx])) {
|
---|
| 381 | var factor = (comparison[rowIdx, colIdx] == "LEQ") ? 1.0
|
---|
| 382 | : comparison[rowIdx, colIdx] == "GEQ" ? -1.0 : 0.0;
|
---|
[15311] | 383 |
|
---|
[15322] | 384 | // copy x_i to the beginning of the function parameters vector
|
---|
| 385 | for (int cIdx = 0; cIdx < nParams; cIdx++)
|
---|
| 386 | p[cIdx] = constraintX[rowIdx, cIdx];
|
---|
| 387 |
|
---|
[15313] | 388 | // f(x) constraint
|
---|
| 389 | if (colIdx == constraints.GetLength(1) - 1) {
|
---|
| 390 |
|
---|
| 391 | fi[fidx] = factor * ((double)(func(p)) - constraints[rowIdx, colIdx]);
|
---|
[15314] | 392 | var g = (double[])AD.Grad(func, p);
|
---|
[15313] | 393 | for (int jacIdx = 0; jacIdx < c.Length; jacIdx++) {
|
---|
| 394 | jac[fidx, jacIdx] = factor * g[nParams + jacIdx]; // skip the elements for the variable values
|
---|
| 395 | }
|
---|
| 396 | fidx++;
|
---|
| 397 | } else {
|
---|
| 398 | // df / dxi constraint
|
---|
[15314] | 399 | var g = (double[])AD.Grad(func, p);
|
---|
[15313] | 400 | fi[fidx] = factor * g[colIdx];
|
---|
[15322] | 401 |
|
---|
[15313] | 402 | var hess = AD.Hessian(func, p);
|
---|
| 403 | for (int jacIdx = 0; jacIdx < c.Length; jacIdx++) {
|
---|
[15322] | 404 | jac[fidx, jacIdx] = factor * (double)hess[colIdx, nParams + jacIdx]; // skip the elements for the variable values
|
---|
[15313] | 405 | }
|
---|
| 406 | fidx++;
|
---|
| 407 | }
|
---|
| 408 | }
|
---|
| 409 | }
|
---|
| 410 | }
|
---|
[15311] | 411 | };
|
---|
| 412 | }
|
---|
| 413 |
|
---|
| 414 | public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
|
---|
| 415 | return TreeToAutoDiffTermConverter.IsCompatible(tree);
|
---|
| 416 | }
|
---|
| 417 | }
|
---|
| 418 | }
|
---|