1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2019 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using System;
|
---|
23 | using System.Collections.Generic;
|
---|
24 | using System.Linq;
|
---|
25 | using System.Threading;
|
---|
26 | using HeuristicLab.Analysis;
|
---|
27 | using HeuristicLab.Common;
|
---|
28 | using HeuristicLab.Core;
|
---|
29 | using HeuristicLab.Data;
|
---|
30 | using HeuristicLab.Optimization;
|
---|
31 | using HeuristicLab.Parameters;
|
---|
32 | using HEAL.Attic;
|
---|
33 | using HeuristicLab.Problems.DataAnalysis;
|
---|
34 | using HeuristicLab.Problems.DataAnalysis.Symbolic;
|
---|
35 | using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
|
---|
36 | using HeuristicLab.Random;
|
---|
37 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
38 |
|
---|
39 | namespace HeuristicLab.Algorithms.DataAnalysis {
|
---|
40 | /// <summary>
|
---|
41 | /// Nonlinear regression data analysis algorithm.
|
---|
42 | /// </summary>
|
---|
43 | [Item("Nonlinear Regression with Constraints (NLR)", "Nonlinear regression (curve fitting) data analysis algorithm that supports interval constraints.")]
|
---|
44 | [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 120)]
|
---|
45 | [StorableType("B235DB6E-591F-4537-8D2F-C2D1232AAEFD")]
|
---|
46 | public sealed class ConstrainedNonlinearRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> {
|
---|
47 | private const string RegressionSolutionResultName = "Regression solution";
|
---|
48 | private const string ModelStructureParameterName = "Model structure";
|
---|
49 | private const string IterationsParameterName = "Iterations";
|
---|
50 | private const string RestartsParameterName = "Restarts";
|
---|
51 | private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
|
---|
52 | private const string SeedParameterName = "Seed";
|
---|
53 | private const string InitParamsRandomlyParameterName = "InitializeParametersRandomly";
|
---|
54 | private const string ApplyLinearScalingParameterName = "Apply linear scaling";
|
---|
55 |
|
---|
56 | public IFixedValueParameter<StringValue> ModelStructureParameter {
|
---|
57 | get { return (IFixedValueParameter<StringValue>)Parameters[ModelStructureParameterName]; }
|
---|
58 | }
|
---|
59 | public IFixedValueParameter<IntValue> IterationsParameter {
|
---|
60 | get { return (IFixedValueParameter<IntValue>)Parameters[IterationsParameterName]; }
|
---|
61 | }
|
---|
62 |
|
---|
63 | public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter {
|
---|
64 | get { return (IFixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
|
---|
65 | }
|
---|
66 |
|
---|
67 | public IFixedValueParameter<IntValue> SeedParameter {
|
---|
68 | get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; }
|
---|
69 | }
|
---|
70 |
|
---|
71 | public IFixedValueParameter<IntValue> RestartsParameter {
|
---|
72 | get { return (IFixedValueParameter<IntValue>)Parameters[RestartsParameterName]; }
|
---|
73 | }
|
---|
74 |
|
---|
75 | public IFixedValueParameter<BoolValue> InitParametersRandomlyParameter {
|
---|
76 | get { return (IFixedValueParameter<BoolValue>)Parameters[InitParamsRandomlyParameterName]; }
|
---|
77 | }
|
---|
78 |
|
---|
79 | public IFixedValueParameter<BoolValue> ApplyLinearScalingParameter {
|
---|
80 | get { return (IFixedValueParameter<BoolValue>)Parameters[ApplyLinearScalingParameterName]; }
|
---|
81 | }
|
---|
82 |
|
---|
83 | public string ModelStructure {
|
---|
84 | get { return ModelStructureParameter.Value.Value; }
|
---|
85 | set { ModelStructureParameter.Value.Value = value; }
|
---|
86 | }
|
---|
87 |
|
---|
88 | public int Iterations {
|
---|
89 | get { return IterationsParameter.Value.Value; }
|
---|
90 | set { IterationsParameter.Value.Value = value; }
|
---|
91 | }
|
---|
92 |
|
---|
93 | public int Restarts {
|
---|
94 | get { return RestartsParameter.Value.Value; }
|
---|
95 | set { RestartsParameter.Value.Value = value; }
|
---|
96 | }
|
---|
97 |
|
---|
98 | public int Seed {
|
---|
99 | get { return SeedParameter.Value.Value; }
|
---|
100 | set { SeedParameter.Value.Value = value; }
|
---|
101 | }
|
---|
102 |
|
---|
103 | public bool SetSeedRandomly {
|
---|
104 | get { return SetSeedRandomlyParameter.Value.Value; }
|
---|
105 | set { SetSeedRandomlyParameter.Value.Value = value; }
|
---|
106 | }
|
---|
107 |
|
---|
108 | public bool InitializeParametersRandomly {
|
---|
109 | get { return InitParametersRandomlyParameter.Value.Value; }
|
---|
110 | set { InitParametersRandomlyParameter.Value.Value = value; }
|
---|
111 | }
|
---|
112 |
|
---|
113 | public bool ApplyLinearScaling {
|
---|
114 | get { return ApplyLinearScalingParameter.Value.Value; }
|
---|
115 | set { ApplyLinearScalingParameter.Value.Value = value; }
|
---|
116 | }
|
---|
117 |
|
---|
118 | [StorableConstructor]
|
---|
119 | private ConstrainedNonlinearRegression(StorableConstructorFlag _) : base(_) { }
|
---|
120 | private ConstrainedNonlinearRegression(ConstrainedNonlinearRegression original, Cloner cloner)
|
---|
121 | : base(original, cloner) {
|
---|
122 | }
|
---|
123 | public ConstrainedNonlinearRegression()
|
---|
124 | : base() {
|
---|
125 | Problem = new RegressionProblem();
|
---|
126 | Parameters.Add(new FixedValueParameter<StringValue>(ModelStructureParameterName, "The function for which the parameters must be fit (only numeric constants are tuned).", new StringValue("1.0 * x*x + 0.0")));
|
---|
127 | Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "The maximum number of iterations for constants optimization.", new IntValue(200)));
|
---|
128 | Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of independent random restarts (>0)", new IntValue(10)));
|
---|
129 | Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The PRNG seed value.", new IntValue()));
|
---|
130 | Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "Switch to determine if the random number seed should be initialized randomly.", new BoolValue(true)));
|
---|
131 | Parameters.Add(new FixedValueParameter<BoolValue>(InitParamsRandomlyParameterName, "Switch to determine if the real-valued model parameters should be initialized randomly in each restart.", new BoolValue(false)));
|
---|
132 | Parameters.Add(new FixedValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Switch to determine if linear scaling terms should be added to the model", new BoolValue(true)));
|
---|
133 |
|
---|
134 | SetParameterHiddenState();
|
---|
135 |
|
---|
136 | InitParametersRandomlyParameter.Value.ValueChanged += (sender, args) => {
|
---|
137 | SetParameterHiddenState();
|
---|
138 | };
|
---|
139 | }
|
---|
140 |
|
---|
141 | private void SetParameterHiddenState() {
|
---|
142 | var hide = !InitializeParametersRandomly;
|
---|
143 | RestartsParameter.Hidden = hide;
|
---|
144 | SeedParameter.Hidden = hide;
|
---|
145 | SetSeedRandomlyParameter.Hidden = hide;
|
---|
146 | }
|
---|
147 |
|
---|
148 | [StorableHook(HookType.AfterDeserialization)]
|
---|
149 | private void AfterDeserialization() {
|
---|
150 | SetParameterHiddenState();
|
---|
151 | InitParametersRandomlyParameter.Value.ValueChanged += (sender, args) => {
|
---|
152 | SetParameterHiddenState();
|
---|
153 | };
|
---|
154 | }
|
---|
155 |
|
---|
156 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
157 | return new ConstrainedNonlinearRegression(this, cloner);
|
---|
158 | }
|
---|
159 |
|
---|
160 | #region nonlinear regression
|
---|
161 | protected override void Run(CancellationToken cancellationToken) {
|
---|
162 | IRegressionSolution bestSolution = null;
|
---|
163 | if (InitializeParametersRandomly) {
|
---|
164 | var qualityTable = new DataTable("RMSE table");
|
---|
165 | qualityTable.VisualProperties.YAxisLogScale = true;
|
---|
166 | var trainRMSERow = new DataRow("RMSE (train)");
|
---|
167 | trainRMSERow.VisualProperties.ChartType = DataRowVisualProperties.DataRowChartType.Points;
|
---|
168 | var testRMSERow = new DataRow("RMSE test");
|
---|
169 | testRMSERow.VisualProperties.ChartType = DataRowVisualProperties.DataRowChartType.Points;
|
---|
170 |
|
---|
171 | qualityTable.Rows.Add(trainRMSERow);
|
---|
172 | qualityTable.Rows.Add(testRMSERow);
|
---|
173 | Results.Add(new Result(qualityTable.Name, qualityTable.Name + " for all restarts", qualityTable));
|
---|
174 | if (SetSeedRandomly) Seed = RandomSeedGenerator.GetSeed();
|
---|
175 | var rand = new MersenneTwister((uint)Seed);
|
---|
176 | bestSolution = CreateRegressionSolution((RegressionProblemData)Problem.ProblemData, ModelStructure, Iterations, ApplyLinearScaling, rand);
|
---|
177 | trainRMSERow.Values.Add(bestSolution.TrainingRootMeanSquaredError);
|
---|
178 | testRMSERow.Values.Add(bestSolution.TestRootMeanSquaredError);
|
---|
179 | for (int r = 0; r < Restarts; r++) {
|
---|
180 | var solution = CreateRegressionSolution((RegressionProblemData)Problem.ProblemData, ModelStructure, Iterations, ApplyLinearScaling, rand);
|
---|
181 | trainRMSERow.Values.Add(solution.TrainingRootMeanSquaredError);
|
---|
182 | testRMSERow.Values.Add(solution.TestRootMeanSquaredError);
|
---|
183 | if (solution.TrainingRootMeanSquaredError < bestSolution.TrainingRootMeanSquaredError) {
|
---|
184 | bestSolution = solution;
|
---|
185 | }
|
---|
186 | }
|
---|
187 | } else {
|
---|
188 | bestSolution = CreateRegressionSolution((RegressionProblemData)Problem.ProblemData, ModelStructure, Iterations, ApplyLinearScaling);
|
---|
189 | }
|
---|
190 |
|
---|
191 | Results.Add(new Result(RegressionSolutionResultName, "The nonlinear regression solution.", bestSolution));
|
---|
192 | Results.Add(new Result("Root mean square error (train)", "The root of the mean of squared errors of the regression solution on the training set.", new DoubleValue(bestSolution.TrainingRootMeanSquaredError)));
|
---|
193 | Results.Add(new Result("Root mean square error (test)", "The root of the mean of squared errors of the regression solution on the test set.", new DoubleValue(bestSolution.TestRootMeanSquaredError)));
|
---|
194 |
|
---|
195 | }
|
---|
196 |
|
---|
197 | /// <summary>
|
---|
198 | /// Fits a model to the data by optimizing the numeric constants.
|
---|
199 | /// Model is specified as infix expression containing variable names and numbers.
|
---|
200 | /// The starting point for the numeric constants is initialized randomly if a random number generator is specified (~N(0,1)). Otherwise the user specified constants are
|
---|
201 | /// used as a starting point.
|
---|
202 | /// </summary>-
|
---|
203 | /// <param name="problemData">Training and test data</param>
|
---|
204 | /// <param name="modelStructure">The function as infix expression</param>
|
---|
205 | /// <param name="maxIterations">Number of constant optimization iterations (using Levenberg-Marquardt algorithm)</param>
|
---|
206 | /// <param name="random">Optional random number generator for random initialization of numeric constants.</param>
|
---|
207 | /// <returns></returns>
|
---|
208 | public static ISymbolicRegressionSolution CreateRegressionSolution(RegressionProblemData problemData, string modelStructure, int maxIterations, bool applyLinearScaling, IRandom rand = null) {
|
---|
209 | if (applyLinearScaling) throw new NotSupportedException("Linear scaling is not yet supported in NLR with constraints.");
|
---|
210 | var parser = new InfixExpressionParser();
|
---|
211 | var tree = parser.Parse(modelStructure);
|
---|
212 | // parser handles double and string variables equally by creating a VariableTreeNode
|
---|
213 | // post-process to replace VariableTreeNodes by FactorVariableTreeNodes for all string variables
|
---|
214 | var factorSymbol = new FactorVariable();
|
---|
215 | factorSymbol.VariableNames =
|
---|
216 | problemData.AllowedInputVariables.Where(name => problemData.Dataset.VariableHasType<string>(name));
|
---|
217 | factorSymbol.AllVariableNames = factorSymbol.VariableNames;
|
---|
218 | factorSymbol.VariableValues =
|
---|
219 | factorSymbol.VariableNames.Select(name =>
|
---|
220 | new KeyValuePair<string, Dictionary<string, int>>(name,
|
---|
221 | problemData.Dataset.GetReadOnlyStringValues(name).Distinct()
|
---|
222 | .Select((n, i) => Tuple.Create(n, i))
|
---|
223 | .ToDictionary(tup => tup.Item1, tup => tup.Item2)));
|
---|
224 |
|
---|
225 | foreach (var parent in tree.IterateNodesPrefix().ToArray()) {
|
---|
226 | for (int i = 0; i < parent.SubtreeCount; i++) {
|
---|
227 | var varChild = parent.GetSubtree(i) as VariableTreeNode;
|
---|
228 | var factorVarChild = parent.GetSubtree(i) as FactorVariableTreeNode;
|
---|
229 | if (varChild != null && factorSymbol.VariableNames.Contains(varChild.VariableName)) {
|
---|
230 | parent.RemoveSubtree(i);
|
---|
231 | var factorTreeNode = (FactorVariableTreeNode)factorSymbol.CreateTreeNode();
|
---|
232 | factorTreeNode.VariableName = varChild.VariableName;
|
---|
233 | factorTreeNode.Weights =
|
---|
234 | factorTreeNode.Symbol.GetVariableValues(factorTreeNode.VariableName).Select(_ => 1.0).ToArray();
|
---|
235 | // weight = 1.0 for each value
|
---|
236 | parent.InsertSubtree(i, factorTreeNode);
|
---|
237 | } else if (factorVarChild != null && factorSymbol.VariableNames.Contains(factorVarChild.VariableName)) {
|
---|
238 | if (factorSymbol.GetVariableValues(factorVarChild.VariableName).Count() != factorVarChild.Weights.Length)
|
---|
239 | throw new ArgumentException(
|
---|
240 | string.Format("Factor variable {0} needs exactly {1} weights",
|
---|
241 | factorVarChild.VariableName,
|
---|
242 | factorSymbol.GetVariableValues(factorVarChild.VariableName).Count()));
|
---|
243 | parent.RemoveSubtree(i);
|
---|
244 | var factorTreeNode = (FactorVariableTreeNode)factorSymbol.CreateTreeNode();
|
---|
245 | factorTreeNode.VariableName = factorVarChild.VariableName;
|
---|
246 | factorTreeNode.Weights = factorVarChild.Weights;
|
---|
247 | parent.InsertSubtree(i, factorTreeNode);
|
---|
248 | }
|
---|
249 | }
|
---|
250 | }
|
---|
251 | var intervalConstraints = problemData.IntervalConstraints;
|
---|
252 | var dataIntervals = problemData.VariableRanges.GetIntervals();
|
---|
253 |
|
---|
254 | // convert constants to variables named theta...
|
---|
255 | var treeForDerivation = ReplaceConstWithVar(tree, out List<string> thetaNames, out List<double> thetaValues);
|
---|
256 |
|
---|
257 | // create trees for relevant derivatives
|
---|
258 | Dictionary<string, ISymbolicExpressionTree> derivatives = new Dictionary<string, ISymbolicExpressionTree>();
|
---|
259 | var allThetaNodes = thetaNames.Select(_ => new List<ConstantTreeNode>()).ToArray();
|
---|
260 | var constraintTrees = new List<ISymbolicExpressionTree>();
|
---|
261 | foreach (var constraint in intervalConstraints.Constraints) {
|
---|
262 | if (constraint.IsDerivation) {
|
---|
263 | if (!problemData.AllowedInputVariables.Contains(constraint.Variable))
|
---|
264 | throw new ArgumentException($"Invalid constraint: the variable {constraint.Variable} does not exist in the dataset.");
|
---|
265 | var df = DerivativeCalculator.Derive(treeForDerivation, constraint.Variable);
|
---|
266 |
|
---|
267 | // alglib requires constraint expressions of the form c(x) <= 0
|
---|
268 | // -> we make two expressions, one for the lower bound and one for the upper bound
|
---|
269 |
|
---|
270 | if (constraint.Interval.UpperBound < double.PositiveInfinity) {
|
---|
271 | var df_smaller_upper = Subtract((ISymbolicExpressionTree)df.Clone(), CreateConstant(constraint.Interval.UpperBound));
|
---|
272 | // convert variables named theta back to constants
|
---|
273 | var df_prepared = ReplaceVarWithConst(df_smaller_upper, thetaNames, thetaValues, allThetaNodes);
|
---|
274 | constraintTrees.Add(df_prepared);
|
---|
275 | }
|
---|
276 | if (constraint.Interval.LowerBound > double.NegativeInfinity) {
|
---|
277 | var df_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)df.Clone());
|
---|
278 | // convert variables named theta back to constants
|
---|
279 | var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes);
|
---|
280 | constraintTrees.Add(df_prepared);
|
---|
281 | }
|
---|
282 | } else {
|
---|
283 | if (constraint.Interval.UpperBound < double.PositiveInfinity) {
|
---|
284 | var f_smaller_upper = Subtract((ISymbolicExpressionTree)treeForDerivation.Clone(), CreateConstant(constraint.Interval.UpperBound));
|
---|
285 | // convert variables named theta back to constants
|
---|
286 | var df_prepared = ReplaceVarWithConst(f_smaller_upper, thetaNames, thetaValues, allThetaNodes);
|
---|
287 | constraintTrees.Add(df_prepared);
|
---|
288 | }
|
---|
289 | if (constraint.Interval.LowerBound > double.NegativeInfinity) {
|
---|
290 | var f_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)treeForDerivation.Clone());
|
---|
291 | // convert variables named theta back to constants
|
---|
292 | var df_prepared = ReplaceVarWithConst(f_larger_lower, thetaNames, thetaValues, allThetaNodes);
|
---|
293 | constraintTrees.Add(df_prepared);
|
---|
294 | }
|
---|
295 | }
|
---|
296 | }
|
---|
297 |
|
---|
298 | var preparedTree = ReplaceVarWithConst(treeForDerivation, thetaNames, thetaValues, allThetaNodes);
|
---|
299 |
|
---|
300 | // initialize constants randomly
|
---|
301 | if (rand != null) {
|
---|
302 | for (int i = 0; i < allThetaNodes.Length; i++) {
|
---|
303 | double f = Math.Exp(NormalDistributedRandom.NextDouble(rand, 0, 1));
|
---|
304 | double scale = rand.NextDouble() < 0.5 ? -1 : 1;
|
---|
305 | thetaValues[i] = scale * thetaValues[i] * f;
|
---|
306 | foreach (var constNode in allThetaNodes[i]) constNode.Value = thetaValues[i];
|
---|
307 | }
|
---|
308 | }
|
---|
309 |
|
---|
310 | // local function
|
---|
311 | void UpdateThetaValues(double[] theta) {
|
---|
312 | for (int i = 0; i < theta.Length; ++i) {
|
---|
313 | foreach (var constNode in allThetaNodes[i]) constNode.Value = theta[i];
|
---|
314 | }
|
---|
315 | }
|
---|
316 |
|
---|
317 | // buffers for calculate_jacobian
|
---|
318 | var target = problemData.TargetVariableTrainingValues.ToArray();
|
---|
319 | var fi_eval = new double[target.Length];
|
---|
320 | var jac_eval = new double[target.Length, thetaValues.Count];
|
---|
321 |
|
---|
322 | // define the callback used by the alglib optimizer
|
---|
323 | // the x argument for this callback represents our theta
|
---|
324 | // local function
|
---|
325 | void calculate_jacobian(double[] x, double[] fi, double[,] jac, object obj) {
|
---|
326 | UpdateThetaValues(x);
|
---|
327 |
|
---|
328 | var autoDiffEval = new VectorAutoDiffEvaluator();
|
---|
329 | autoDiffEval.Evaluate(preparedTree, problemData.Dataset, problemData.TrainingIndices.ToArray(),
|
---|
330 | GetParameterNodes(preparedTree, allThetaNodes), fi_eval, jac_eval);
|
---|
331 |
|
---|
332 | // calc sum of squared errors and gradient
|
---|
333 | var sse = 0.0;
|
---|
334 | var g = new double[x.Length];
|
---|
335 | for (int i = 0; i < target.Length; i++) {
|
---|
336 | var res = target[i] - fi_eval[i];
|
---|
337 | sse += 0.5 * res * res;
|
---|
338 | for (int j = 0; j < g.Length; j++) {
|
---|
339 | g[j] -= res * jac_eval[i, j];
|
---|
340 | }
|
---|
341 | }
|
---|
342 |
|
---|
343 | fi[0] = sse / target.Length;
|
---|
344 | for (int j = 0; j < x.Length; j++) { jac[0, j] = g[j] / target.Length; }
|
---|
345 |
|
---|
346 | var intervalEvaluator = new IntervalEvaluator();
|
---|
347 | for (int i = 0; i < constraintTrees.Count; i++) {
|
---|
348 | var interval = intervalEvaluator.Evaluate(constraintTrees[i], dataIntervals, GetParameterNodes(constraintTrees[i], allThetaNodes),
|
---|
349 | out double[] lowerGradient, out double[] upperGradient);
|
---|
350 |
|
---|
351 | // we transformed this to a constraint c(x) <= 0, so only the upper bound is relevant for us
|
---|
352 | fi[i + 1] = interval.UpperBound;
|
---|
353 | for (int j = 0; j < x.Length; j++) {
|
---|
354 | jac[i + 1, j] = upperGradient[j];
|
---|
355 | }
|
---|
356 | }
|
---|
357 | }
|
---|
358 |
|
---|
359 |
|
---|
360 |
|
---|
361 | // prepare alglib
|
---|
362 | alglib.minnlcstate state;
|
---|
363 | alglib.minnlcreport rep;
|
---|
364 | //alglib.optguardreport optGuardRep;
|
---|
365 | var x0 = thetaValues.ToArray();
|
---|
366 |
|
---|
367 | alglib.minnlccreate(x0.Length, x0, out state);
|
---|
368 | alglib.minnlcsetalgoslp(state); // SLP is more robust but slower
|
---|
369 | alglib.minnlcsetcond(state, 1E-6, maxIterations);
|
---|
370 | var s = Enumerable.Repeat(1d, x0.Length).ToArray(); // scale is set to unit scale
|
---|
371 | alglib.minnlcsetscale(state, s);
|
---|
372 |
|
---|
373 | // set non-linear constraints: 0 equality constraints, 1 inequality constraint
|
---|
374 | alglib.minnlcsetnlc(state, 0, constraintTrees.Count);
|
---|
375 |
|
---|
376 | // alglib.minnlcoptguardsmoothness(state);
|
---|
377 | // alglib.minnlcoptguardgradient(state, 0.001);
|
---|
378 |
|
---|
379 | alglib.minnlcoptimize(state, calculate_jacobian, null, null);
|
---|
380 | alglib.minnlcresults(state, out double[] xOpt, out rep);
|
---|
381 | // alglib.minnlcoptguardresults(state, out optGuardRep);
|
---|
382 |
|
---|
383 | var interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter();
|
---|
384 | UpdateThetaValues(xOpt);
|
---|
385 | var model = new SymbolicRegressionModel(problemData.TargetVariable, (ISymbolicExpressionTree)preparedTree.Clone(), (ISymbolicDataAnalysisExpressionTreeInterpreter)interpreter.Clone());
|
---|
386 | if (applyLinearScaling)
|
---|
387 | model.Scale(problemData);
|
---|
388 |
|
---|
389 | SymbolicRegressionSolution solution = new SymbolicRegressionSolution(model, (IRegressionProblemData)problemData.Clone());
|
---|
390 | solution.Model.Name = "Regression Model";
|
---|
391 | solution.Name = "Regression Solution";
|
---|
392 | return solution;
|
---|
393 | }
|
---|
394 |
|
---|
395 | private static ISymbolicExpressionTreeNode[] GetParameterNodes(ISymbolicExpressionTree tree, List<ConstantTreeNode>[] allNodes) {
|
---|
396 | // TODO better solution necessary
|
---|
397 | var treeConstNodes = tree.IterateNodesPostfix().OfType<ConstantTreeNode>().ToArray();
|
---|
398 | var paramNodes = new ISymbolicExpressionTreeNode[allNodes.Length];
|
---|
399 | for (int i = 0; i < paramNodes.Length; i++) {
|
---|
400 | paramNodes[i] = allNodes[i].SingleOrDefault(n => treeConstNodes.Contains(n));
|
---|
401 | }
|
---|
402 | return paramNodes;
|
---|
403 | }
|
---|
404 |
|
---|
405 | #endregion
|
---|
406 |
|
---|
407 | #region helper
|
---|
408 | private static ISymbolicExpressionTree ReplaceVarWithConst(ISymbolicExpressionTree tree, List<string> thetaNames, List<double> thetaValues, List<ConstantTreeNode>[] thetaNodes) {
|
---|
409 | var copy = (ISymbolicExpressionTree)tree.Clone();
|
---|
410 | var nodes = copy.IterateNodesPostfix().ToList();
|
---|
411 | for (int i = 0; i < nodes.Count; i++) {
|
---|
412 | var n = nodes[i] as VariableTreeNode;
|
---|
413 | if (n != null) {
|
---|
414 | var thetaIdx = thetaNames.IndexOf(n.VariableName);
|
---|
415 | if (thetaIdx >= 0) {
|
---|
416 | var parent = n.Parent;
|
---|
417 | if (thetaNodes[thetaIdx].Any()) {
|
---|
418 | // HACK: REUSE CONSTANT TREE NODE IN SEVERAL TREES
|
---|
419 | // we use this trick to allow autodiff over thetas when thetas occurr multiple times in the tree (e.g. in derived trees)
|
---|
420 | var constNode = thetaNodes[thetaIdx].First();
|
---|
421 | var childIdx = parent.IndexOfSubtree(n);
|
---|
422 | parent.RemoveSubtree(childIdx);
|
---|
423 | parent.InsertSubtree(childIdx, constNode);
|
---|
424 | } else {
|
---|
425 | var constNode = (ConstantTreeNode)CreateConstant(thetaValues[thetaIdx]);
|
---|
426 | var childIdx = parent.IndexOfSubtree(n);
|
---|
427 | parent.RemoveSubtree(childIdx);
|
---|
428 | parent.InsertSubtree(childIdx, constNode);
|
---|
429 | thetaNodes[thetaIdx].Add(constNode);
|
---|
430 | }
|
---|
431 | }
|
---|
432 | }
|
---|
433 | }
|
---|
434 | return copy;
|
---|
435 | }
|
---|
436 |
|
---|
437 | private static ISymbolicExpressionTree ReplaceConstWithVar(ISymbolicExpressionTree tree, out List<string> thetaNames, out List<double> thetaValues) {
|
---|
438 | thetaNames = new List<string>();
|
---|
439 | thetaValues = new List<double>();
|
---|
440 | var copy = (ISymbolicExpressionTree)tree.Clone();
|
---|
441 | var nodes = copy.IterateNodesPostfix().ToList();
|
---|
442 |
|
---|
443 | int n = 1;
|
---|
444 | for (int i = 0; i < nodes.Count; ++i) {
|
---|
445 | var node = nodes[i];
|
---|
446 | if (node is ConstantTreeNode constantTreeNode) {
|
---|
447 | var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode();
|
---|
448 | thetaVar.Weight = 1;
|
---|
449 | thetaVar.VariableName = $"θ{n++}";
|
---|
450 |
|
---|
451 | thetaNames.Add(thetaVar.VariableName);
|
---|
452 | thetaValues.Add(constantTreeNode.Value);
|
---|
453 |
|
---|
454 | var parent = constantTreeNode.Parent;
|
---|
455 | if (parent != null) {
|
---|
456 | var index = constantTreeNode.Parent.IndexOfSubtree(constantTreeNode);
|
---|
457 | parent.RemoveSubtree(index);
|
---|
458 | parent.InsertSubtree(index, thetaVar);
|
---|
459 | }
|
---|
460 | }
|
---|
461 | }
|
---|
462 | return copy;
|
---|
463 | }
|
---|
464 |
|
---|
465 | private static ISymbolicExpressionTreeNode CreateConstant(double value) {
|
---|
466 | var constantNode = (ConstantTreeNode)new Constant().CreateTreeNode();
|
---|
467 | constantNode.Value = value;
|
---|
468 | return constantNode;
|
---|
469 | }
|
---|
470 |
|
---|
471 | private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTree t, ISymbolicExpressionTreeNode b) {
|
---|
472 | var sub = MakeNode<Subtraction>(t.Root.GetSubtree(0).GetSubtree(0), b);
|
---|
473 | t.Root.GetSubtree(0).RemoveSubtree(0);
|
---|
474 | t.Root.GetSubtree(0).InsertSubtree(0, sub);
|
---|
475 | return t;
|
---|
476 | }
|
---|
477 | private static ISymbolicExpressionTree Subtract(ISymbolicExpressionTreeNode b, ISymbolicExpressionTree t) {
|
---|
478 | var sub = MakeNode<Subtraction>(b, t.Root.GetSubtree(0).GetSubtree(0));
|
---|
479 | t.Root.GetSubtree(0).RemoveSubtree(0);
|
---|
480 | t.Root.GetSubtree(0).InsertSubtree(0, sub);
|
---|
481 | return t;
|
---|
482 | }
|
---|
483 |
|
---|
484 | private static ISymbolicExpressionTreeNode MakeNode<T>(params ISymbolicExpressionTreeNode[] fs) where T : ISymbol, new() {
|
---|
485 | var node = new T().CreateTreeNode();
|
---|
486 | foreach (var f in fs) node.AddSubtree(f);
|
---|
487 | return node;
|
---|
488 | }
|
---|
489 | #endregion
|
---|
490 | }
|
---|
491 | }
|
---|