1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using System;
|
---|
23 | using System.Collections.Generic;
|
---|
24 | using System.Linq;
|
---|
25 | using HEAL.Attic;
|
---|
26 | using HeuristicLab.Common;
|
---|
27 | using HeuristicLab.Core;
|
---|
28 | using HeuristicLab.Data;
|
---|
29 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
30 | using HeuristicLab.Parameters;
|
---|
31 |
|
---|
32 | namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
|
---|
33 | [Item("NMSE Evaluator with shape-constraints (single-objective)", "Calculates NMSE of a symbolic regression solution and checks constraints. The fitness is a combination of NMSE and constraint violations.")]
|
---|
34 | [StorableType("27473973-DD8D-4375-997D-942E2280AE8E")]
|
---|
35 | public class NMSESingleObjectiveConstraintsEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
|
---|
36 | #region Parameter/Properties
|
---|
37 |
|
---|
38 | private const string OptimizeParametersParameterName = "OptimizeParameters";
|
---|
39 | private const string ParameterOptimizationIterationsParameterName = "ParameterOptimizationIterations";
|
---|
40 | private const string UseSoftConstraintsParameterName = "UseSoftConstraintsEvaluation";
|
---|
41 | private const string BoundsEstimatorParameterName = "BoundsEstimator";
|
---|
42 | private const string PenaltyFactorParameterName = "PenaltyFactor";
|
---|
43 |
|
---|
44 |
|
---|
45 | public IFixedValueParameter<BoolValue> OptimizerParametersParameter =>
|
---|
46 | (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersParameterName];
|
---|
47 |
|
---|
48 | public IFixedValueParameter<IntValue> ParameterOptimizationIterationsParameter =>
|
---|
49 | (IFixedValueParameter<IntValue>)Parameters[ParameterOptimizationIterationsParameterName];
|
---|
50 |
|
---|
51 | public IFixedValueParameter<BoolValue> UseSoftConstraintsParameter =>
|
---|
52 | (IFixedValueParameter<BoolValue>)Parameters[UseSoftConstraintsParameterName];
|
---|
53 |
|
---|
54 | public IValueParameter<IBoundsEstimator> BoundsEstimatorParameter =>
|
---|
55 | (IValueParameter<IBoundsEstimator>)Parameters[BoundsEstimatorParameterName];
|
---|
56 | public IFixedValueParameter<DoubleValue> PenaltyFactorParameter =>
|
---|
57 | (IFixedValueParameter<DoubleValue>)Parameters[PenaltyFactorParameterName];
|
---|
58 |
|
---|
59 | public bool OptimizeParameters {
|
---|
60 | get => OptimizerParametersParameter.Value.Value;
|
---|
61 | set => OptimizerParametersParameter.Value.Value = value;
|
---|
62 | }
|
---|
63 |
|
---|
64 | public int ParameterOptimizationIterations {
|
---|
65 | get => ParameterOptimizationIterationsParameter.Value.Value;
|
---|
66 | set => ParameterOptimizationIterationsParameter.Value.Value = value;
|
---|
67 | }
|
---|
68 |
|
---|
69 | public bool UseSoftConstraints {
|
---|
70 | get => UseSoftConstraintsParameter.Value.Value;
|
---|
71 | set => UseSoftConstraintsParameter.Value.Value = value;
|
---|
72 | }
|
---|
73 |
|
---|
74 | public IBoundsEstimator BoundsEstimator {
|
---|
75 | get => BoundsEstimatorParameter.Value;
|
---|
76 | set => BoundsEstimatorParameter.Value = value;
|
---|
77 | }
|
---|
78 |
|
---|
79 | public double PenalityFactor {
|
---|
80 | get => PenaltyFactorParameter.Value.Value;
|
---|
81 | set => PenaltyFactorParameter.Value.Value = value;
|
---|
82 | }
|
---|
83 |
|
---|
84 |
|
---|
85 | public override bool Maximization => false; // NMSE is minimized
|
---|
86 |
|
---|
87 | #endregion
|
---|
88 |
|
---|
89 | #region Constructors/Cloning
|
---|
90 |
|
---|
91 | [StorableConstructor]
|
---|
92 | protected NMSESingleObjectiveConstraintsEvaluator(StorableConstructorFlag _) : base(_) { }
|
---|
93 |
|
---|
94 | protected NMSESingleObjectiveConstraintsEvaluator(
|
---|
95 | NMSESingleObjectiveConstraintsEvaluator original, Cloner cloner) : base(original, cloner) { }
|
---|
96 |
|
---|
97 | public NMSESingleObjectiveConstraintsEvaluator() {
|
---|
98 | Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersParameterName,
|
---|
99 | "Define whether optimization of parameters is active or not (default: false).", new BoolValue(false)));
|
---|
100 | Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptimizationIterationsParameterName,
|
---|
101 | "Define how many parameter optimization steps should be performed (default: 10).", new IntValue(10)));
|
---|
102 | Parameters.Add(new FixedValueParameter<BoolValue>(UseSoftConstraintsParameterName,
|
---|
103 | "Define whether the constraints are penalized by soft or hard constraints (default: false).", new BoolValue(false)));
|
---|
104 | Parameters.Add(new ValueParameter<IBoundsEstimator>(BoundsEstimatorParameterName,
|
---|
105 | "The estimator which is used to estimate output ranges of models (default: interval arithmetic).", new IntervalArithBoundsEstimator()));
|
---|
106 | Parameters.Add(new FixedValueParameter<DoubleValue>(PenaltyFactorParameterName,
|
---|
107 | "Punishment factor for constraint violations for soft constraint handling (fitness = NMSE + penaltyFactor * avg(violations)) (default: 1.0)", new DoubleValue(1.0)));
|
---|
108 | }
|
---|
109 |
|
---|
110 | [StorableHook(HookType.AfterDeserialization)]
|
---|
111 | private void AfterDeserialization() { }
|
---|
112 |
|
---|
113 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
114 | return new NMSESingleObjectiveConstraintsEvaluator(this, cloner);
|
---|
115 | }
|
---|
116 |
|
---|
117 | #endregion
|
---|
118 |
|
---|
119 | public override IOperation InstrumentedApply() {
|
---|
120 | var rows = GenerateRowsToEvaluate();
|
---|
121 | var tree = SymbolicExpressionTreeParameter.ActualValue;
|
---|
122 | var problemData = ProblemDataParameter.ActualValue;
|
---|
123 | var interpreter = SymbolicDataAnalysisTreeInterpreterParameter.ActualValue;
|
---|
124 | var estimationLimits = EstimationLimitsParameter.ActualValue;
|
---|
125 | var applyLinearScaling = ApplyLinearScalingParameter.ActualValue.Value;
|
---|
126 |
|
---|
127 | var quality = Evaluate(tree, problemData, rows, interpreter, applyLinearScaling, estimationLimits.Lower, estimationLimits.Upper);
|
---|
128 | QualityParameter.ActualValue = new DoubleValue(quality);
|
---|
129 |
|
---|
130 | return base.InstrumentedApply();
|
---|
131 | }
|
---|
132 |
|
---|
133 | private static void CalcLinearScalingTerms(
|
---|
134 | ISymbolicExpressionTree tree,
|
---|
135 | IRegressionProblemData problemData,
|
---|
136 | IEnumerable<int> rows,
|
---|
137 | ISymbolicDataAnalysisExpressionTreeInterpreter interpreter) {
|
---|
138 | var rootNode = new ProgramRootSymbol().CreateTreeNode();
|
---|
139 | var startNode = new StartSymbol().CreateTreeNode();
|
---|
140 | var offset = tree.Root.GetSubtree(0) //Start
|
---|
141 | .GetSubtree(0); //Offset
|
---|
142 | var scaling = offset.GetSubtree(0);
|
---|
143 |
|
---|
144 | //Check if tree contains offset and scaling nodes
|
---|
145 | if (!(offset.Symbol is Addition) || !(scaling.Symbol is Multiplication))
|
---|
146 | throw new ArgumentException($"Scaling can only be used with LinearScalingGrammar.");
|
---|
147 |
|
---|
148 | var t = (ISymbolicExpressionTreeNode)scaling.GetSubtree(0).Clone();
|
---|
149 | rootNode.AddSubtree(startNode);
|
---|
150 | startNode.AddSubtree(t);
|
---|
151 | var newTree = new SymbolicExpressionTree(rootNode);
|
---|
152 |
|
---|
153 | //calculate alpha and beta for scaling
|
---|
154 | var estimatedValues = interpreter.GetSymbolicExpressionTreeValues(newTree, problemData.Dataset, rows);
|
---|
155 |
|
---|
156 | var targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows);
|
---|
157 | OnlineLinearScalingParameterCalculator.Calculate(estimatedValues, targetValues, out var alpha, out var beta,
|
---|
158 | out var errorState);
|
---|
159 |
|
---|
160 | if (errorState == OnlineCalculatorError.None) {
|
---|
161 | //Set alpha and beta to the scaling nodes from grammar
|
---|
162 | var offsetParameter = offset.GetSubtree(1) as NumberTreeNode;
|
---|
163 | offsetParameter.Value = alpha;
|
---|
164 | var scalingParameter = scaling.GetSubtree(1) as NumberTreeNode;
|
---|
165 | scalingParameter.Value = beta;
|
---|
166 | }
|
---|
167 | }
|
---|
168 |
|
---|
169 | public static double Calculate(
|
---|
170 | ISymbolicExpressionTree tree,
|
---|
171 | IRegressionProblemData problemData, IEnumerable<int> rows,
|
---|
172 | ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
|
---|
173 | double lowerEstimationLimit, double upperEstimationLimit,
|
---|
174 | IBoundsEstimator estimator,
|
---|
175 | bool useSoftConstraints = false, double penaltyFactor = 1.0) {
|
---|
176 |
|
---|
177 | var constraints = Enumerable.Empty<ShapeConstraint>();
|
---|
178 | if (problemData is ShapeConstrainedRegressionProblemData scProbData)
|
---|
179 | constraints = scProbData.ShapeConstraints.EnabledConstraints;
|
---|
180 |
|
---|
181 | var estimatedValues = interpreter.GetSymbolicExpressionTreeValues(tree, problemData.Dataset, rows);
|
---|
182 | var boundedEstimatedValues = estimatedValues.LimitToRange(lowerEstimationLimit, upperEstimationLimit);
|
---|
183 | var targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows);
|
---|
184 | var nmse = OnlineNormalizedMeanSquaredErrorCalculator.Calculate(targetValues, boundedEstimatedValues, out var errorState);
|
---|
185 | if (errorState != OnlineCalculatorError.None)
|
---|
186 | return 1.0;
|
---|
187 |
|
---|
188 | if (!constraints.Any())
|
---|
189 | return nmse;
|
---|
190 |
|
---|
191 | var intervalCollection = problemData.VariableRanges;
|
---|
192 | var constraintViolations = IntervalUtil.GetConstraintViolations(constraints, estimator, intervalCollection, tree);
|
---|
193 |
|
---|
194 | // infinite/NaN constraints
|
---|
195 | if (constraintViolations.Any(x => double.IsNaN(x) || double.IsInfinity(x)))
|
---|
196 | return 1.0;
|
---|
197 |
|
---|
198 | // hard constraints
|
---|
199 | if (!useSoftConstraints) {
|
---|
200 | if (constraintViolations.Any(x => x > 0.0))
|
---|
201 | return 1.0;
|
---|
202 | return nmse;
|
---|
203 | }
|
---|
204 |
|
---|
205 | // soft constraints
|
---|
206 | if (penaltyFactor < 0.0)
|
---|
207 | throw new ArgumentException("The parameter has to be >= 0.0.", nameof(penaltyFactor));
|
---|
208 |
|
---|
209 | var weightedViolationsAvg = constraints
|
---|
210 | .Zip(constraintViolations, (c, v) => c.Weight * v)
|
---|
211 | .Average();
|
---|
212 |
|
---|
213 | return Math.Min(nmse, 1.0) + penaltyFactor * weightedViolationsAvg;
|
---|
214 | }
|
---|
215 |
|
---|
216 | public override double Evaluate(
|
---|
217 | IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData,
|
---|
218 | IEnumerable<int> rows) {
|
---|
219 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
|
---|
220 | EstimationLimitsParameter.ExecutionContext = context;
|
---|
221 | ApplyLinearScalingParameter.ExecutionContext = context;
|
---|
222 |
|
---|
223 | var nmse = Calculate(
|
---|
224 | tree, problemData, rows,
|
---|
225 | SymbolicDataAnalysisTreeInterpreterParameter.ActualValue,
|
---|
226 | EstimationLimitsParameter.ActualValue.Lower,
|
---|
227 | EstimationLimitsParameter.ActualValue.Upper,
|
---|
228 | BoundsEstimator,
|
---|
229 | UseSoftConstraints,
|
---|
230 | PenalityFactor);
|
---|
231 |
|
---|
232 | SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
|
---|
233 | EstimationLimitsParameter.ExecutionContext = null;
|
---|
234 | ApplyLinearScalingParameter.ExecutionContext = null;
|
---|
235 |
|
---|
236 | return nmse;
|
---|
237 | }
|
---|
238 |
|
---|
239 | public override double Evaluate(
|
---|
240 | ISymbolicExpressionTree tree,
|
---|
241 | IRegressionProblemData problemData,
|
---|
242 | IEnumerable<int> rows,
|
---|
243 | ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
|
---|
244 | bool applyLinearScaling = true,
|
---|
245 | double lowerEstimationLimit = double.MinValue,
|
---|
246 | double upperEstimationLimit = double.MaxValue) {
|
---|
247 |
|
---|
248 | if (OptimizeParameters)
|
---|
249 | Optimize(
|
---|
250 | interpreter, tree,
|
---|
251 | problemData, rows,
|
---|
252 | ParameterOptimizationIterations,
|
---|
253 | updateVariableWeights: true,
|
---|
254 | lowerEstimationLimit,
|
---|
255 | upperEstimationLimit);
|
---|
256 |
|
---|
257 | else if (applyLinearScaling) // extra scaling terms, which are included in tree
|
---|
258 | CalcLinearScalingTerms(tree, problemData, rows, interpreter);
|
---|
259 |
|
---|
260 | return Calculate(
|
---|
261 | tree, problemData,
|
---|
262 | rows, interpreter,
|
---|
263 | lowerEstimationLimit,
|
---|
264 | upperEstimationLimit,
|
---|
265 | BoundsEstimator,
|
---|
266 | UseSoftConstraints,
|
---|
267 | PenalityFactor);
|
---|
268 | }
|
---|
269 |
|
---|
270 | public static double Optimize(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
|
---|
271 | ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows,
|
---|
272 | int maxIterations, bool updateVariableWeights = true,
|
---|
273 | double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
|
---|
274 | bool updateParametersInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
|
---|
275 |
|
---|
276 | // Numeric parameters in the tree become variables for parameter optimization.
|
---|
277 | // Variables in the tree become parameters (fixed values) for parameter optimization.
|
---|
278 | // For each parameter (variable in the original tree) we store the
|
---|
279 | // variable name, variable value (for factor vars) and lag as a DataForVariable object.
|
---|
280 | // A dictionary is used to find parameters
|
---|
281 | double[] initialParameters;
|
---|
282 | var parameters = new List<TreeToAutoDiffTermConverter.DataForVariable>();
|
---|
283 |
|
---|
284 | TreeToAutoDiffTermConverter.ParametricFunction func;
|
---|
285 | TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
|
---|
286 | if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, addLinearScalingTerms: false, out parameters, out initialParameters, out func, out func_grad))
|
---|
287 | throw new NotSupportedException("Could not optimize parameters of symbolic expression tree due to not supported symbols used in the tree.");
|
---|
288 | var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
|
---|
289 |
|
---|
290 | // extract inital parameters
|
---|
291 | double[] c = (double[])initialParameters.Clone();
|
---|
292 |
|
---|
293 | double originalQuality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(
|
---|
294 | tree, problemData, rows,
|
---|
295 | interpreter, applyLinearScaling: false,
|
---|
296 | lowerEstimationLimit,
|
---|
297 | upperEstimationLimit);
|
---|
298 |
|
---|
299 | if (counter == null) counter = new EvaluationsCounter();
|
---|
300 | var rowEvaluationsCounter = new EvaluationsCounter();
|
---|
301 |
|
---|
302 | alglib.minlmreport rep;
|
---|
303 |
|
---|
304 | IDataset ds = problemData.Dataset;
|
---|
305 | int n = rows.Count();
|
---|
306 | int k = parameters.Count;
|
---|
307 |
|
---|
308 | double[,] x = new double[n, k];
|
---|
309 | int row = 0;
|
---|
310 | foreach (var r in rows) {
|
---|
311 | int col = 0;
|
---|
312 | foreach (var info in parameterEntries) {
|
---|
313 | if (ds.VariableHasType<double>(info.variableName)) {
|
---|
314 | x[row, col] = ds.GetDoubleValue(info.variableName, r + info.lag);
|
---|
315 | } else if (ds.VariableHasType<string>(info.variableName)) {
|
---|
316 | x[row, col] = ds.GetStringValue(info.variableName, r) == info.variableValue ? 1 : 0;
|
---|
317 | } else throw new InvalidProgramException("found a variable of unknown type");
|
---|
318 | col++;
|
---|
319 | }
|
---|
320 | row++;
|
---|
321 | }
|
---|
322 | double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
|
---|
323 |
|
---|
324 |
|
---|
325 | alglib.ndimensional_rep xrep = (p, f, obj) => iterationCallback(p, f, obj);
|
---|
326 |
|
---|
327 | try {
|
---|
328 | alglib.minlmcreatevj(y.Length, c, out var lmstate);
|
---|
329 | alglib.minlmsetcond(lmstate, 0.0, maxIterations);
|
---|
330 | alglib.minlmsetxrep(lmstate, iterationCallback != null);
|
---|
331 | // alglib.minlmoptguardgradient(lmstate, 1e-5); // for debugging gradient calculation
|
---|
332 | alglib.minlmoptimize(lmstate, CreateFunc(func, x, y), CreateJac(func_grad, x, y), xrep, rowEvaluationsCounter);
|
---|
333 | alglib.minlmresults(lmstate, out c, out rep);
|
---|
334 | // alglib.minlmoptguardresults(lmstate, out var optGuardReport);
|
---|
335 | } catch (ArithmeticException) {
|
---|
336 | return originalQuality;
|
---|
337 | } catch (alglib.alglibexception) {
|
---|
338 | return originalQuality;
|
---|
339 | }
|
---|
340 |
|
---|
341 | counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n;
|
---|
342 | counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;
|
---|
343 |
|
---|
344 | // * TerminationType, completetion code:
|
---|
345 | // * -8 optimizer detected NAN/INF values either in the function itself,
|
---|
346 | // or in its Jacobian
|
---|
347 | // * -5 inappropriate solver was used:
|
---|
348 | // * solver created with minlmcreatefgh() used on problem with
|
---|
349 | // general linear constraints (set with minlmsetlc() call).
|
---|
350 | // * -3 constraints are inconsistent
|
---|
351 | // * 2 relative step is no more than EpsX.
|
---|
352 | // * 5 MaxIts steps was taken
|
---|
353 | // * 7 stopping conditions are too stringent,
|
---|
354 | // further improvement is impossible
|
---|
355 | // * 8 terminated by user who called MinLMRequestTermination().
|
---|
356 | // X contains point which was "current accepted" when termination
|
---|
357 | // request was submitted.
|
---|
358 | if (rep.terminationtype < 0) {
|
---|
359 | UpdateParameters(tree, c, updateVariableWeights);
|
---|
360 | }
|
---|
361 | var quality = SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(
|
---|
362 | tree, problemData, rows,
|
---|
363 | interpreter, applyLinearScaling: false,
|
---|
364 | lowerEstimationLimit, upperEstimationLimit);
|
---|
365 |
|
---|
366 | if (!updateParametersInTree) UpdateParameters(tree, initialParameters, updateVariableWeights);
|
---|
367 |
|
---|
368 | if (originalQuality < quality || double.IsNaN(quality)) {
|
---|
369 | UpdateParameters(tree, initialParameters, updateVariableWeights);
|
---|
370 | return originalQuality;
|
---|
371 | }
|
---|
372 | return quality;
|
---|
373 | }
|
---|
374 |
|
---|
375 | private static void UpdateParameters(ISymbolicExpressionTree tree, double[] parameters, bool updateVariableWeights) {
|
---|
376 | int i = 0;
|
---|
377 | foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
|
---|
378 | NumberTreeNode numberTreeNode = node as NumberTreeNode;
|
---|
379 | VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase;
|
---|
380 | FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode;
|
---|
381 | if (numberTreeNode != null) {
|
---|
382 | if (numberTreeNode.Parent.Symbol is Power
|
---|
383 | && numberTreeNode.Parent.GetSubtree(1) == numberTreeNode) continue; // exponents in powers are not optimizated (see TreeToAutoDiffTermConverter)
|
---|
384 | numberTreeNode.Value = parameters[i++];
|
---|
385 | } else if (updateVariableWeights && variableTreeNodeBase != null)
|
---|
386 | variableTreeNodeBase.Weight = parameters[i++];
|
---|
387 | else if (factorVarTreeNode != null) {
|
---|
388 | for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
|
---|
389 | factorVarTreeNode.Weights[j] = parameters[i++];
|
---|
390 | }
|
---|
391 | }
|
---|
392 | }
|
---|
393 |
|
---|
394 | private static alglib.ndimensional_fvec CreateFunc(TreeToAutoDiffTermConverter.ParametricFunction func, double[,] x, double[] y) {
|
---|
395 | int d = x.GetLength(1);
|
---|
396 | // row buffer
|
---|
397 | var xi = new double[d];
|
---|
398 | // function must return residuals, alglib optimizes resid²
|
---|
399 | return (double[] c, double[] resid, object o) => {
|
---|
400 | for (int i = 0; i < y.Length; i++) {
|
---|
401 | Buffer.BlockCopy(x, i * d * sizeof(double), xi, 0, d*sizeof(double)); // copy row. We are using BlockCopy instead of Array.Copy because x has rank 2
|
---|
402 | resid[i] = func(c, xi) - y[i];
|
---|
403 | }
|
---|
404 | var counter = (EvaluationsCounter)o;
|
---|
405 | counter.FunctionEvaluations += y.Length;
|
---|
406 | };
|
---|
407 | }
|
---|
408 |
|
---|
409 | private static alglib.ndimensional_jac CreateJac(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad, double[,] x, double[] y) {
|
---|
410 | int numParams = x.GetLength(1);
|
---|
411 | // row buffer
|
---|
412 | var xi = new double[numParams];
|
---|
413 | return (double[] c, double[] resid, double[,] jac, object o) => {
|
---|
414 | int numVars = c.Length;
|
---|
415 | for (int i = 0; i < y.Length; i++) {
|
---|
416 | Buffer.BlockCopy(x, i * numParams * sizeof(double), xi, 0, numParams * sizeof(double)); // copy row
|
---|
417 | var tuple = func_grad(c, xi);
|
---|
418 | resid[i] = tuple.Item2 - y[i];
|
---|
419 | Buffer.BlockCopy(tuple.Item1, 0, jac, i * numVars * sizeof(double), numVars * sizeof(double)); // copy the gradient to jac. BlockCopy because jac has rank 2.
|
---|
420 | }
|
---|
421 | var counter = (EvaluationsCounter)o;
|
---|
422 | counter.FunctionEvaluations += y.Length;
|
---|
423 | counter.GradientEvaluations += y.Length;
|
---|
424 | };
|
---|
425 | }
|
---|
426 |
|
---|
427 | public class EvaluationsCounter {
|
---|
428 | public int FunctionEvaluations = 0;
|
---|
429 | public int GradientEvaluations = 0;
|
---|
430 | }
|
---|
431 | }
|
---|
432 | } |
---|