1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using System;
|
---|
23 | using System.Collections.Generic;
|
---|
24 | using System.Linq;
|
---|
25 | using HeuristicLab.Common;
|
---|
26 | using HeuristicLab.Core;
|
---|
27 | using HeuristicLab.Data;
|
---|
28 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
29 | using HeuristicLab.Parameters;
|
---|
30 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
31 | using HeuristicLab.Problems.DataAnalysis;
|
---|
32 | using HeuristicLab.Problems.DataAnalysis.Symbolic;
|
---|
33 | using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
|
---|
34 | using DiffSharp.Interop.Float64;
|
---|
35 |
|
---|
36 | namespace HeuristicLab.Algorithms.DataAnalysis.Experimental {
|
---|
37 | [Item("Constant Optimization Evaluator with Constraints", "")]
|
---|
38 | [StorableClass]
|
---|
39 | public class SymbolicRegressionConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
|
---|
40 | private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations";
|
---|
41 | private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement";
|
---|
42 | private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
|
---|
43 | private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
|
---|
44 | private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
|
---|
45 | private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
|
---|
46 |
|
---|
47 | public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
|
---|
48 | get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
|
---|
49 | }
|
---|
50 | public IFixedValueParameter<DoubleValue> ConstantOptimizationImprovementParameter {
|
---|
51 | get { return (IFixedValueParameter<DoubleValue>)Parameters[ConstantOptimizationImprovementParameterName]; }
|
---|
52 | }
|
---|
53 | public IFixedValueParameter<PercentValue> ConstantOptimizationProbabilityParameter {
|
---|
54 | get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationProbabilityParameterName]; }
|
---|
55 | }
|
---|
56 | public IFixedValueParameter<PercentValue> ConstantOptimizationRowsPercentageParameter {
|
---|
57 | get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
|
---|
58 | }
|
---|
59 | public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
|
---|
60 | get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
|
---|
61 | }
|
---|
62 | public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
|
---|
63 | get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
|
---|
64 | }
|
---|
65 |
|
---|
66 |
|
---|
67 | public IntValue ConstantOptimizationIterations {
|
---|
68 | get { return ConstantOptimizationIterationsParameter.Value; }
|
---|
69 | }
|
---|
70 | public DoubleValue ConstantOptimizationImprovement {
|
---|
71 | get { return ConstantOptimizationImprovementParameter.Value; }
|
---|
72 | }
|
---|
73 | public PercentValue ConstantOptimizationProbability {
|
---|
74 | get { return ConstantOptimizationProbabilityParameter.Value; }
|
---|
75 | }
|
---|
76 | public PercentValue ConstantOptimizationRowsPercentage {
|
---|
77 | get { return ConstantOptimizationRowsPercentageParameter.Value; }
|
---|
78 | }
|
---|
79 | public bool UpdateConstantsInTree {
|
---|
80 | get { return UpdateConstantsInTreeParameter.Value.Value; }
|
---|
81 | set { UpdateConstantsInTreeParameter.Value.Value = value; }
|
---|
82 | }
|
---|
83 |
|
---|
84 | public bool UpdateVariableWeights {
|
---|
85 | get { return UpdateVariableWeightsParameter.Value.Value; }
|
---|
86 | set { UpdateVariableWeightsParameter.Value.Value = value; }
|
---|
87 | }
|
---|
88 |
|
---|
89 | public override bool Maximization {
|
---|
90 | get { return true; }
|
---|
91 | }
|
---|
92 |
|
---|
93 | [StorableConstructor]
|
---|
94 | protected SymbolicRegressionConstantOptimizationEvaluator(bool deserializing) : base(deserializing) { }
|
---|
95 | protected SymbolicRegressionConstantOptimizationEvaluator(SymbolicRegressionConstantOptimizationEvaluator original, Cloner cloner)
|
---|
96 | : base(original, cloner) {
|
---|
97 | }
|
---|
98 | public SymbolicRegressionConstantOptimizationEvaluator()
|
---|
99 | : base() {
|
---|
100 | Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10), true));
|
---|
101 | Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true) { Hidden = true });
|
---|
102 | Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
|
---|
103 | Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
|
---|
104 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
|
---|
105 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)) { Hidden = true });
|
---|
106 | }
|
---|
107 |
|
---|
108 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
109 | return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner);
|
---|
110 | }
|
---|
111 |
|
---|
112 | [StorableHook(HookType.AfterDeserialization)]
|
---|
113 | private void AfterDeserialization() {
|
---|
114 | if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
|
---|
115 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
|
---|
116 | if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
|
---|
117 | Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)));
|
---|
118 | }
|
---|
119 |
|
---|
120 | public override IOperation InstrumentedApply() {
|
---|
121 | var solution = SymbolicExpressionTreeParameter.ActualValue;
|
---|
122 | double quality;
|
---|
123 | if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
|
---|
124 | IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
|
---|
125 | quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
|
---|
126 | constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree);
|
---|
127 |
|
---|
128 | if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
|
---|
129 | var evaluationRows = GenerateRowsToEvaluate();
|
---|
130 | quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
131 | }
|
---|
132 | } else {
|
---|
133 | var evaluationRows = GenerateRowsToEvaluate();
|
---|
134 | quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
135 | }
|
---|
136 | QualityParameter.ActualValue = new DoubleValue(quality);
|
---|
137 |
|
---|
138 | return base.InstrumentedApply();
|
---|
139 | }
|
---|
140 |
|
---|
141 | public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
|
---|
142 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
|
---|
143 | EstimationLimitsParameter.ExecutionContext = context;
|
---|
144 | ApplyLinearScalingParameter.ExecutionContext = context;
|
---|
145 |
|
---|
146 | // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
|
---|
147 | // because Evaluate() is used to get the quality of evolved models on
|
---|
148 | // different partitions of the dataset (e.g., best validation model)
|
---|
149 | double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
|
---|
150 |
|
---|
151 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
|
---|
152 | EstimationLimitsParameter.ExecutionContext = null;
|
---|
153 | ApplyLinearScalingParameter.ExecutionContext = null;
|
---|
154 |
|
---|
155 | return r2;
|
---|
156 | }
|
---|
157 |
|
---|
158 | public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
|
---|
159 | ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling,
|
---|
160 | int maxIterations, bool updateVariableWeights = true,
|
---|
161 | double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
|
---|
162 | bool updateConstantsInTree = true) {
|
---|
163 |
|
---|
164 | // numeric constants in the tree become variables for constant opt
|
---|
165 | // variables in the tree become parameters (fixed values) for constant opt
|
---|
166 | // for each parameter (variable in the original tree) we store the
|
---|
167 | // variable name, variable value (for factor vars) and lag as a DataForVariable object.
|
---|
168 | // A dictionary is used to find parameters
|
---|
169 | double[] initialConstants;
|
---|
170 | var parameters = new List<TreeToDiffSharpConverter.DataForVariable>();
|
---|
171 |
|
---|
172 | Func<DV, D> func;
|
---|
173 | if (!TreeToDiffSharpConverter.TryConvertToDiffSharp(tree, updateVariableWeights, out parameters, out initialConstants,
|
---|
174 | out func))
|
---|
175 | throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
|
---|
176 | if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0
|
---|
177 |
|
---|
178 | var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
|
---|
179 |
|
---|
180 | // extract inital constants
|
---|
181 | double[] c = new double[initialConstants.Length + 2];
|
---|
182 | double[] s = new double[c.Length];
|
---|
183 | {
|
---|
184 | c[0] = 1.0;
|
---|
185 | c[1] = 0.0;
|
---|
186 | Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
|
---|
187 |
|
---|
188 | // s[0] = 1.0;
|
---|
189 | // s[1] = 1.0;
|
---|
190 | // Array.Copy(initialConstants.Select(ci=>Math.Abs(ci)).ToArray()
|
---|
191 | // , 0, s, 2, initialConstants.Length);
|
---|
192 | }
|
---|
193 | s = Enumerable.Repeat(0.01, c.Length).ToArray();
|
---|
194 |
|
---|
195 | double[] originalConstants = (double[])c.Clone();
|
---|
196 | double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
|
---|
197 |
|
---|
198 | alglib.minnlcstate state;
|
---|
199 | alglib.minnlcreport rep;
|
---|
200 |
|
---|
201 | IDataset ds = problemData.Dataset;
|
---|
202 | double[,] x = new double[ds.Rows, parameters.Count];
|
---|
203 | int col = 0;
|
---|
204 | int row = 0;
|
---|
205 | foreach (var info in parameterEntries) {
|
---|
206 | row = 0;
|
---|
207 | // copy training rows
|
---|
208 | foreach (var r in rows) {
|
---|
209 | if (ds.VariableHasType<double>(info.variableName)) {
|
---|
210 | x[row, col] = ds.GetDoubleValue(info.variableName, r + info.lag);
|
---|
211 | } else if (ds.VariableHasType<string>(info.variableName)) {
|
---|
212 | x[row, col] = ds.GetStringValue(info.variableName, r) == info.variableValue ? 1 : 0;
|
---|
213 | } else throw new InvalidProgramException("found a variable of unknown type");
|
---|
214 |
|
---|
215 | row++;
|
---|
216 | }
|
---|
217 | col++;
|
---|
218 | }
|
---|
219 |
|
---|
220 | var target = problemData.TargetVariable;
|
---|
221 |
|
---|
222 | // only count constraints
|
---|
223 | var constraintRows = Enumerable.Range(0, problemData.Dataset.Rows).Where(rIdx => !string.IsNullOrEmpty(ds.GetStringValue("f(x) constraint-type", rIdx)));
|
---|
224 | double[,] constraints = new double[constraintRows.Count(), parameters.Count + 1]; // +1 for constraint for f(x)
|
---|
225 | string[,] comp = new string[constraintRows.Count(), parameters.Count + 1];
|
---|
226 | int eqConstraints = 0;
|
---|
227 | int ieqConstraints = 0;
|
---|
228 | col = 0;
|
---|
229 | foreach (var info in parameterEntries) {
|
---|
230 | row = 0;
|
---|
231 | // find the matching df/dx column
|
---|
232 | var colName = string.Format("df/d({0})", info.variableName);
|
---|
233 | var compColName = string.Format("df/d({0}) constraint-type", info.variableName);
|
---|
234 |
|
---|
235 | if (ds.VariableNames.Contains(colName)) {
|
---|
236 | foreach (var r in constraintRows) {
|
---|
237 | constraints[row, col] = ds.GetDoubleValue(colName, r);
|
---|
238 | comp[row, col] = ds.GetStringValue(compColName, r);
|
---|
239 |
|
---|
240 | if (comp[row, col] == "EQ") eqConstraints++;
|
---|
241 | else if (comp[row, col] == "LEQ" || comp[row, col] == "GEQ") ieqConstraints++;
|
---|
242 |
|
---|
243 | row++;
|
---|
244 | }
|
---|
245 | }
|
---|
246 | col++;
|
---|
247 | }
|
---|
248 |
|
---|
249 | // f(x) constraint
|
---|
250 | row = 0;
|
---|
251 | col = constraints.GetLength(1) - 1;
|
---|
252 | foreach (var r in constraintRows) {
|
---|
253 | constraints[row, col] = ds.GetDoubleValue("f(x)", r);
|
---|
254 | comp[row, col] = ds.GetStringValue("f(x) constraint-type", r);
|
---|
255 | if (comp[row, col] == "EQ") eqConstraints++;
|
---|
256 | else if (comp[row, col] == "LEQ" || comp[row, col] == "GEQ") ieqConstraints++;
|
---|
257 | row++;
|
---|
258 | }
|
---|
259 |
|
---|
260 | double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
|
---|
261 |
|
---|
262 | alglib.ndimensional_jac jac = CreateJac(x, y, ds, func);
|
---|
263 | double rho = 1000;
|
---|
264 | int outeriters = 3;
|
---|
265 | int updateFreq = 10;
|
---|
266 | try {
|
---|
267 | // alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
|
---|
268 | // alglib.lsfitsetcond(state, 0.0, maxIterations);
|
---|
269 | // //alglib.lsfitsetgradientcheck(state, 0.001);
|
---|
270 | // alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, null, null);
|
---|
271 | // alglib.lsfitresults(state, out retVal, out c, out rep);
|
---|
272 | alglib.minnlccreate(c, out state);
|
---|
273 | alglib.minnlcsetalgoaul(state, rho, outeriters);
|
---|
274 | alglib.minnlcsetcond(state, 0.0, 0.0, 0.0, maxIterations);
|
---|
275 | alglib.minnlcsetscale(state, s);
|
---|
276 | alglib.minnlcsetprecexactlowrank(state, updateFreq);
|
---|
277 | // TODO set constraints;
|
---|
278 | alglib.minnlcsetnlc(state, eqConstraints, ieqConstraints);
|
---|
279 | alglib.minnlcoptimize(state, jac, null, null);
|
---|
280 | alglib.minnlcresults(state, out c, out rep);
|
---|
281 | } catch (ArithmeticException) {
|
---|
282 | return originalQuality;
|
---|
283 | } catch (alglib.alglibexception) {
|
---|
284 | return originalQuality;
|
---|
285 | }
|
---|
286 |
|
---|
287 | // -7 => constant optimization failed due to wrong gradient
|
---|
288 | // -8 => integrity check failed (e.g. gradient NaN
|
---|
289 | if (rep.terminationtype != -7 && rep.terminationtype != -8)
|
---|
290 | UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
|
---|
291 | var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
|
---|
292 |
|
---|
293 | if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
|
---|
294 | if (
|
---|
295 | // originalQuality - quality > 0.001 ||
|
---|
296 | double.IsNaN(quality)) {
|
---|
297 | UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
|
---|
298 | return originalQuality;
|
---|
299 | }
|
---|
300 | return quality;
|
---|
301 | }
|
---|
302 |
|
---|
303 | private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) {
|
---|
304 | int i = 0;
|
---|
305 | foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
|
---|
306 | ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
|
---|
307 | VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase;
|
---|
308 | FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode;
|
---|
309 | if (constantTreeNode != null)
|
---|
310 | constantTreeNode.Value = constants[i++];
|
---|
311 | else if (updateVariableWeights && variableTreeNodeBase != null)
|
---|
312 | variableTreeNodeBase.Weight = constants[i++];
|
---|
313 | else if (factorVarTreeNode != null) {
|
---|
314 | for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
|
---|
315 | factorVarTreeNode.Weights[j] = constants[i++];
|
---|
316 | }
|
---|
317 | }
|
---|
318 | }
|
---|
319 |
|
---|
320 | private static alglib.ndimensional_jac CreateJac(
|
---|
321 | double[,] x, // x is larger than y
|
---|
322 | double[] y, // only targets
|
---|
323 | // double[,] constraints, // df/d(xi), same order as for x, same number of rows as x less columns
|
---|
324 | // string[,] comparison, // {LEQ, GEQ, EQ } same size as constraints
|
---|
325 | IDataset ds,
|
---|
326 | Func<DV, D> func) {
|
---|
327 | return (double[] c, double[] fi, double[,] jac, object o) => {
|
---|
328 | // objective function is sum of squared errors
|
---|
329 | int nRows = y.Length;
|
---|
330 | int nParams = x.GetLength(1);
|
---|
331 | // zero fi and jac
|
---|
332 | Array.Clear(fi, 0, fi.Length);
|
---|
333 | Array.Clear(jac, 0, jac.Length);
|
---|
334 | var p = new double[nParams + c.Length];
|
---|
335 | Array.Copy(c, 0, p, nParams, c.Length); // copy c to the end of the function parameters vector
|
---|
336 | for (int rowIdx = 0; rowIdx < nRows; rowIdx++) {
|
---|
337 | // copy x_i to the beginning of the function parameters vector
|
---|
338 | for (int cIdx = 0; cIdx < nParams; cIdx++)
|
---|
339 | p[cIdx] = x[rowIdx, cIdx];
|
---|
340 |
|
---|
341 | double f = (double)func(p);
|
---|
342 | double[] g = (double[])AD.Grad(func, p);
|
---|
343 | var e = y[rowIdx] - f;
|
---|
344 | fi[0] += e * e;
|
---|
345 | // update gradient
|
---|
346 | for (int colIdx = 0; colIdx < c.Length; colIdx++) {
|
---|
347 | jac[0, colIdx] += -2 * e * g[nParams + colIdx]; // skip the elements for the variable values
|
---|
348 | }
|
---|
349 | }
|
---|
350 |
|
---|
351 | int fidx = 1;
|
---|
352 | var constraintRows = Enumerable.Range(0, ds.Rows).Where(rIdx => !string.IsNullOrEmpty(ds.GetStringValue("f(x) constraint-type", rIdx)));
|
---|
353 |
|
---|
354 | // eq constraints
|
---|
355 | foreach (var rowIdx in constraintRows) {
|
---|
356 | foreach (var varName in ds.VariableNames.Where(vn => vn.EndsWith("constraint-type"))) {
|
---|
357 | if (ds.GetStringValue(varName, rowIdx) == "EQ") {
|
---|
358 | throw new NotSupportedException();
|
---|
359 | }
|
---|
360 | }
|
---|
361 | }
|
---|
362 | // ineq constraints
|
---|
363 | foreach (var rowIdx in constraintRows) {
|
---|
364 | XXX
|
---|
365 | for (int colIdx = 0; colIdx < constraints.GetLength(1); colIdx++) {
|
---|
366 | // there is a constraint value
|
---|
367 | if (!double.IsNaN(constraints[rowIdx, colIdx]) && !string.IsNullOrEmpty(comparison[rowIdx, colIdx])) {
|
---|
368 | var factor = (comparison[rowIdx, colIdx] == "LEQ") ? 1.0
|
---|
369 | : comparison[rowIdx, colIdx] == "GEQ" ? -1.0 : 0.0;
|
---|
370 |
|
---|
371 | // f(x) constraint
|
---|
372 | if (colIdx == constraints.GetLength(1) - 1) {
|
---|
373 | // copy x_i to the beginning of the function parameters vector
|
---|
374 | for (int cIdx = 0; cIdx < nParams; cIdx++)
|
---|
375 | p[cIdx] = x[rowIdx, cIdx];
|
---|
376 |
|
---|
377 | fi[fidx] = factor * ((double)(func(p)) - constraints[rowIdx, colIdx]);
|
---|
378 | var g = (double[])AD.Grad(func, p);
|
---|
379 | for (int jacIdx = 0; jacIdx < c.Length; jacIdx++) {
|
---|
380 | jac[fidx, jacIdx] = factor * g[nParams + jacIdx]; // skip the elements for the variable values
|
---|
381 | }
|
---|
382 | fidx++;
|
---|
383 | } else {
|
---|
384 | // df / dxi constraint
|
---|
385 | var g = (double[])AD.Grad(func, p);
|
---|
386 | fi[fidx] = factor * g[colIdx];
|
---|
387 | var hess = AD.Hessian(func, p);
|
---|
388 | for (int jacIdx = 0; jacIdx < c.Length; jacIdx++) {
|
---|
389 | jac[fidx, jacIdx] = factor * (double)hess[nParams + colIdx, nParams + jacIdx]; // skip the elements for the variable values
|
---|
390 | }
|
---|
391 | fidx++;
|
---|
392 | }
|
---|
393 | }
|
---|
394 | }
|
---|
395 | }
|
---|
396 | };
|
---|
397 | }
|
---|
398 |
|
---|
399 | public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
|
---|
400 | return TreeToAutoDiffTermConverter.IsCompatible(tree);
|
---|
401 | }
|
---|
402 | }
|
---|
403 | }
|
---|