1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using System;
|
---|
23 | using System.Collections.Generic;
|
---|
24 | using System.Diagnostics;
|
---|
25 | using System.Linq;
|
---|
26 | using HeuristicLab.Analysis;
|
---|
27 | using HeuristicLab.Collections;
|
---|
28 | using HeuristicLab.Common;
|
---|
29 | using HeuristicLab.Core;
|
---|
30 | using HeuristicLab.Data;
|
---|
31 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
32 | using HeuristicLab.Optimization;
|
---|
33 | using HeuristicLab.Parameters;
|
---|
34 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
35 | using HeuristicLab.Problems.DataAnalysis;
|
---|
36 | using HeuristicLab.Problems.DataAnalysis.Symbolic;
|
---|
37 | using HeuristicLab.Problems.Instances;
|
---|
38 | using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
|
---|
39 |
|
---|
40 | namespace HeuristicLab.Problems.DynamicalSystemsModelling {
|
---|
41 | // Eine weitere Möglichkeit ist spline-smoothing der Daten (über Zeit) um damit für jede Zielvariable
|
---|
42 | // einen bereinigten (Rauschen) Wert und die Ableitung dy/dt für alle Beobachtungen zu bekommen
|
---|
43 | // danach kann man die Regression direkt für dy/dt anwenden (mit bereinigten y als inputs)
|
---|
44 | [Item("Dynamical Systems Modelling Problem", "TODO")]
|
---|
45 | [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
|
---|
46 | [StorableClass]
|
---|
47 | public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
|
---|
48 | #region parameter names
|
---|
49 | private const string ProblemDataParameterName = "Data";
|
---|
50 | private const string TargetVariablesParameterName = "Target variables";
|
---|
51 | private const string FunctionSetParameterName = "Function set";
|
---|
52 | private const string MaximumLengthParameterName = "Size limit";
|
---|
53 | private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
|
---|
54 | private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
|
---|
55 | private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
|
---|
56 | private const string TrainingEpisodesParameterName = "Training episodes";
|
---|
57 | private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
|
---|
58 | private const string OdeSolverParameterName = "ODE Solver";
|
---|
59 | #endregion
|
---|
60 |
|
---|
61 | #region Parameter Properties
|
---|
62 | IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
|
---|
63 |
|
---|
64 | public IValueParameter<IRegressionProblemData> ProblemDataParameter {
|
---|
65 | get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
|
---|
66 | }
|
---|
67 | public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
|
---|
68 | get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
|
---|
69 | }
|
---|
70 | public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
|
---|
71 | get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
|
---|
72 | }
|
---|
73 | public IFixedValueParameter<IntValue> MaximumLengthParameter {
|
---|
74 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
|
---|
75 | }
|
---|
76 | public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
|
---|
77 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
|
---|
78 | }
|
---|
79 | public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
|
---|
80 | get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
|
---|
81 | }
|
---|
82 | public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
|
---|
83 | get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
|
---|
84 | }
|
---|
85 | public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
|
---|
86 | get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
|
---|
87 | }
|
---|
88 | public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
|
---|
89 | get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
|
---|
90 | }
|
---|
91 | public IConstrainedValueParameter<StringValue> OdeSolverParameter {
|
---|
92 | get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
|
---|
93 | }
|
---|
94 | #endregion
|
---|
95 |
|
---|
96 | #region Properties
|
---|
97 | public IRegressionProblemData ProblemData {
|
---|
98 | get { return ProblemDataParameter.Value; }
|
---|
99 | set { ProblemDataParameter.Value = value; }
|
---|
100 | }
|
---|
101 | IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
|
---|
102 |
|
---|
103 | public ReadOnlyCheckedItemList<StringValue> TargetVariables {
|
---|
104 | get { return TargetVariablesParameter.Value; }
|
---|
105 | }
|
---|
106 |
|
---|
107 | public ReadOnlyCheckedItemList<StringValue> FunctionSet {
|
---|
108 | get { return FunctionSetParameter.Value; }
|
---|
109 | }
|
---|
110 |
|
---|
111 | public int MaximumLength {
|
---|
112 | get { return MaximumLengthParameter.Value.Value; }
|
---|
113 | }
|
---|
114 | public int MaximumParameterOptimizationIterations {
|
---|
115 | get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
|
---|
116 | }
|
---|
117 | public int NumberOfLatentVariables {
|
---|
118 | get { return NumberOfLatentVariablesParameter.Value.Value; }
|
---|
119 | }
|
---|
120 | public int NumericIntegrationSteps {
|
---|
121 | get { return NumericIntegrationStepsParameter.Value.Value; }
|
---|
122 | }
|
---|
123 | public IEnumerable<IntRange> TrainingEpisodes {
|
---|
124 | get { return TrainingEpisodesParameter.Value; }
|
---|
125 | }
|
---|
126 | public bool OptimizeParametersForEpisodes {
|
---|
127 | get { return OptimizeParametersForEpisodesParameter.Value.Value; }
|
---|
128 | }
|
---|
129 |
|
---|
130 | public string OdeSolver {
|
---|
131 | get { return OdeSolverParameter.Value.Value; }
|
---|
132 | set {
|
---|
133 | var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
|
---|
134 | if (matchingValue == null) throw new ArgumentOutOfRangeException();
|
---|
135 | else OdeSolverParameter.Value = matchingValue;
|
---|
136 | }
|
---|
137 | }
|
---|
138 |
|
---|
139 | #endregion
|
---|
140 |
|
---|
141 | public event EventHandler ProblemDataChanged;
|
---|
142 |
|
---|
143 | public override bool Maximization {
|
---|
144 | get { return false; } // we minimize NMSE
|
---|
145 | }
|
---|
146 |
|
---|
147 | #region item cloning and persistence
|
---|
148 | // persistence
|
---|
149 | [StorableConstructor]
|
---|
150 | private Problem(bool deserializing) : base(deserializing) { }
|
---|
151 | [StorableHook(HookType.AfterDeserialization)]
|
---|
152 | private void AfterDeserialization() {
|
---|
153 | if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
|
---|
154 | Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
|
---|
155 | }
|
---|
156 | RegisterEventHandlers();
|
---|
157 | }
|
---|
158 |
|
---|
159 | // cloning
|
---|
160 | private Problem(Problem original, Cloner cloner)
|
---|
161 | : base(original, cloner) {
|
---|
162 | RegisterEventHandlers();
|
---|
163 | }
|
---|
164 | public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
|
---|
165 | #endregion
|
---|
166 |
|
---|
167 | public Problem()
|
---|
168 | : base() {
|
---|
169 | var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
|
---|
170 | var functions = CreateFunctionSet();
|
---|
171 | Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
|
---|
172 | Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
|
---|
173 | Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
|
---|
174 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
|
---|
175 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
|
---|
176 | Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
|
---|
177 | Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
|
---|
178 | Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
|
---|
179 | Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
|
---|
180 |
|
---|
181 | var solversStr = new string[] { "HeuristicLab", "CVODES" };
|
---|
182 | var solvers = new ItemSet<StringValue>(
|
---|
183 | solversStr.Select(s => new StringValue(s).AsReadOnly())
|
---|
184 | );
|
---|
185 | Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
|
---|
186 |
|
---|
187 | RegisterEventHandlers();
|
---|
188 | InitAllParameters();
|
---|
189 |
|
---|
190 | // TODO: do not clear selection of target variables when the input variables are changed (keep selected target variables)
|
---|
191 | // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item
|
---|
192 | // TODO: use training range as default training episode
|
---|
193 | // TODO: write back optimized parameters to solution?
|
---|
194 | // TODO: optimization of starting values for latent variables in CVODES solver
|
---|
195 |
|
---|
196 | }
|
---|
197 |
|
---|
198 | public override double Evaluate(Individual individual, IRandom random) {
|
---|
199 | var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
|
---|
200 | // write back optimized parameters to tree nodes instead of the separate OptTheta variable
|
---|
201 | // retreive optimized parameters from nodes?
|
---|
202 |
|
---|
203 | var problemData = Standardize(ProblemData);
|
---|
204 | var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
|
---|
205 | var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
|
---|
206 | if (OptimizeParametersForEpisodes) {
|
---|
207 | int eIdx = 0;
|
---|
208 | double totalNMSE = 0.0;
|
---|
209 | int totalSize = 0;
|
---|
210 | foreach (var episode in TrainingEpisodes) {
|
---|
211 | double[] optTheta;
|
---|
212 | double nmse;
|
---|
213 | OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
|
---|
214 | individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
|
---|
215 | eIdx++;
|
---|
216 | totalNMSE += nmse * episode.Size;
|
---|
217 | totalSize += episode.Size;
|
---|
218 | }
|
---|
219 | return totalNMSE / totalSize;
|
---|
220 | } else {
|
---|
221 | double[] optTheta;
|
---|
222 | double nmse;
|
---|
223 | OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
|
---|
224 | individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
|
---|
225 | return nmse;
|
---|
226 | }
|
---|
227 | }
|
---|
228 |
|
---|
229 | private IRegressionProblemData Standardize(IRegressionProblemData problemData) {
|
---|
230 | // var standardizedDataset = new Dataset(
|
---|
231 | // problemData.Dataset.DoubleVariables,
|
---|
232 | // problemData.Dataset.DoubleVariables.Select(v => Standardize(problemData.Dataset.GetReadOnlyDoubleValues(v)).ToList()));
|
---|
233 | // return new RegressionProblemData(standardizedDataset, problemData.AllowedInputVariables, problemData.TargetVariable);
|
---|
234 | return new RegressionProblemData(problemData.Dataset, problemData.AllowedInputVariables, problemData.TargetVariable);
|
---|
235 | }
|
---|
236 |
|
---|
237 | public static void OptimizeForEpisodes(
|
---|
238 | ISymbolicExpressionTree[] trees,
|
---|
239 | IRegressionProblemData problemData,
|
---|
240 | string[] targetVars,
|
---|
241 | string[] latentVariables,
|
---|
242 | IRandom random,
|
---|
243 | IEnumerable<IntRange> episodes,
|
---|
244 | int maxParameterOptIterations,
|
---|
245 | int numericIntegrationSteps,
|
---|
246 | string odeSolver,
|
---|
247 | out double[] optTheta,
|
---|
248 | out double nmse) {
|
---|
249 | var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray();
|
---|
250 | var targetValues = new double[rows.Length, targetVars.Length];
|
---|
251 |
|
---|
252 |
|
---|
253 | // collect values of all target variables
|
---|
254 | var colIdx = 0;
|
---|
255 | foreach (var targetVar in targetVars) {
|
---|
256 | int rowIdx = 0;
|
---|
257 | foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
|
---|
258 | targetValues[rowIdx, colIdx] = value;
|
---|
259 | rowIdx++;
|
---|
260 | }
|
---|
261 | colIdx++;
|
---|
262 | }
|
---|
263 |
|
---|
264 | // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
|
---|
265 | var paramNodes = new List<ISymbolicExpressionTreeNode>();
|
---|
266 | foreach (var t in trees) {
|
---|
267 | foreach (var n in t.IterateNodesPrefix()) {
|
---|
268 | if (IsConstantNode(n))
|
---|
269 | paramNodes.Add(n);
|
---|
270 | }
|
---|
271 | }
|
---|
272 | // init params randomly
|
---|
273 | // theta contains parameter values for trees and then the initial values for latent variables (a separate vector for each episode)
|
---|
274 | // inital values for latent variables are also optimized
|
---|
275 | var theta = new double[paramNodes.Count + latentVariables.Length * episodes.Count()];
|
---|
276 | for (int i = 0; i < theta.Length; i++)
|
---|
277 | theta[i] = random.NextDouble() * 2.0e-1 - 1.0e-1;
|
---|
278 |
|
---|
279 | optTheta = new double[0];
|
---|
280 | if (theta.Length > 0) {
|
---|
281 | alglib.minlmstate state;
|
---|
282 | alglib.minlmreport report;
|
---|
283 | alglib.minlmcreatevj(targetValues.Length, theta, out state);
|
---|
284 | alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
|
---|
285 | alglib.minlmsetgradientcheck(state, 1.0e-3);
|
---|
286 | var myState = new OptimizationData(trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver);
|
---|
287 | alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
|
---|
288 |
|
---|
289 | alglib.minlmresults(state, out optTheta, out report);
|
---|
290 |
|
---|
291 | /*************************************************************************
|
---|
292 | Levenberg-Marquardt algorithm results
|
---|
293 |
|
---|
294 | INPUT PARAMETERS:
|
---|
295 | State - algorithm state
|
---|
296 |
|
---|
297 | OUTPUT PARAMETERS:
|
---|
298 | X - array[0..N-1], solution
|
---|
299 | Rep - optimization report; includes termination codes and
|
---|
300 | additional information. Termination codes are listed below,
|
---|
301 | see comments for this structure for more info.
|
---|
302 | Termination code is stored in rep.terminationtype field:
|
---|
303 | * -8 optimizer detected NAN/INF values either in the
|
---|
304 | function itself, or in its Jacobian
|
---|
305 | * -7 derivative correctness check failed;
|
---|
306 | see rep.funcidx, rep.varidx for
|
---|
307 | more information.
|
---|
308 | * -3 constraints are inconsistent
|
---|
309 | * 2 relative step is no more than EpsX.
|
---|
310 | * 5 MaxIts steps was taken
|
---|
311 | * 7 stopping conditions are too stringent,
|
---|
312 | further improvement is impossible
|
---|
313 | * 8 terminated by user who called minlmrequesttermination().
|
---|
314 | X contains point which was "current accepted" when
|
---|
315 | termination request was submitted.
|
---|
316 |
|
---|
317 | -- ALGLIB --
|
---|
318 | Copyright 10.03.2009 by Bochkanov Sergey
|
---|
319 | *************************************************************************/
|
---|
320 | if (report.terminationtype < 0) { nmse = 10.0; return; }
|
---|
321 |
|
---|
322 | nmse = state.f; //TODO check
|
---|
323 |
|
---|
324 | // var myState = new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver };
|
---|
325 | // EvaluateObjectiveVector(optTheta, ref nmse, grad,myState);
|
---|
326 | if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10.0; return; } // return a large value (TODO: be consistent by using NMSE)
|
---|
327 | } else {
|
---|
328 | // no parameters
|
---|
329 | nmse = targetValues.Length;
|
---|
330 | }
|
---|
331 |
|
---|
332 | }
|
---|
333 |
|
---|
334 | // private static IEnumerable<double> Standardize(IEnumerable<double> xs) {
|
---|
335 | // var m = xs.Average();
|
---|
336 | // var s = xs.StandardDeviationPop();
|
---|
337 | // return xs.Select(xi => (xi - m) / s);
|
---|
338 | // }
|
---|
339 |
|
---|
340 | alglib.ndimensional_fvec fvec;
|
---|
341 | alglib.ndimensional_jac jac;
|
---|
342 |
|
---|
343 | public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
|
---|
344 | public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
|
---|
345 | EvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData);
|
---|
346 | }
|
---|
347 |
|
---|
348 | public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
|
---|
349 | public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
|
---|
350 |
|
---|
351 |
|
---|
352 | Tuple<double, Vector>[][] predicted = null; // one array of predictions for each episode
|
---|
353 | predicted = Integrate(optimizationData, x).ToArray();
|
---|
354 |
|
---|
355 | // clear all result data structures
|
---|
356 | for (int j = 0; j < fi.Length; j++) {
|
---|
357 | fi[j] = 10.0;
|
---|
358 | if (jac != null) Array.Clear(jac, 0, jac.Length);
|
---|
359 | }
|
---|
360 |
|
---|
361 | if (predicted.Length != optimizationData.targetValues.GetLength(0)) {
|
---|
362 | return;
|
---|
363 | }
|
---|
364 |
|
---|
365 | // for normalized MSE = 1/variance(t) * MSE(t, pred)
|
---|
366 | // TODO: Perf. (by standardization of target variables before evaluation of all trees)
|
---|
367 | // var invVar = Enumerable.Range(0, targetVariables.Length)
|
---|
368 | // .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors
|
---|
369 | // .Select(vec => vec.StandardDeviation()) // TODO: variance of stddev
|
---|
370 | // .Select(v => 1.0 / v)
|
---|
371 | // .ToArray();
|
---|
372 |
|
---|
373 | // double[] invVar = Enumerable.Repeat(1.0, targetVariables.Length).ToArray();
|
---|
374 |
|
---|
375 |
|
---|
376 | // objective function is NMSE
|
---|
377 | int n = predicted.Length;
|
---|
378 | double invN = 1.0 / n;
|
---|
379 | int i = 0;
|
---|
380 | int r = 0;
|
---|
381 | foreach (var y_pred in predicted) {
|
---|
382 | // y_pred contains the predicted values for target variables first and then predicted values for latent variables
|
---|
383 | for (int c = 0; c < optimizationData.targetVariables.Length; c++) {
|
---|
384 |
|
---|
385 | var y_pred_f = y_pred[c].Item1;
|
---|
386 | var y = optimizationData.targetValues[r, c];
|
---|
387 |
|
---|
388 | fi[i] = (y - y_pred_f);
|
---|
389 | var g = y_pred[c].Item2;
|
---|
390 | if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[i, j] = -g[j];
|
---|
391 | i++; // we put the errors for each target variable after each other
|
---|
392 | }
|
---|
393 | r++;
|
---|
394 | }
|
---|
395 | }
|
---|
396 |
|
---|
397 | public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
|
---|
398 | base.Analyze(individuals, qualities, results, random);
|
---|
399 |
|
---|
400 | if (!results.ContainsKey("Prediction (training)")) {
|
---|
401 | results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
|
---|
402 | }
|
---|
403 | if (!results.ContainsKey("Prediction (test)")) {
|
---|
404 | results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
|
---|
405 | }
|
---|
406 | if (!results.ContainsKey("Models")) {
|
---|
407 | results.Add(new Result("Models", typeof(VariableCollection)));
|
---|
408 | }
|
---|
409 | if (!results.ContainsKey("SNMSE")) {
|
---|
410 | results.Add(new Result("SNMSE", typeof(DoubleValue)));
|
---|
411 | }
|
---|
412 | if (!results.ContainsKey("Solution")) {
|
---|
413 | results.Add(new Result("Solution", typeof(Solution)));
|
---|
414 | }
|
---|
415 | if (!results.ContainsKey("Squared error and gradient")) {
|
---|
416 | results.Add(new Result("Squared error and gradient", typeof(DataTable)));
|
---|
417 | }
|
---|
418 |
|
---|
419 | var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
|
---|
420 | var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
|
---|
421 |
|
---|
422 | results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
|
---|
423 |
|
---|
424 | var problemData = Standardize(ProblemData);
|
---|
425 | var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
|
---|
426 | var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
|
---|
427 |
|
---|
428 | var trainingList = new ItemList<DataTable>();
|
---|
429 |
|
---|
430 | if (OptimizeParametersForEpisodes) {
|
---|
431 | var eIdx = 0;
|
---|
432 | var trainingPredictions = new List<Tuple<double, Vector>[][]>();
|
---|
433 | foreach (var episode in TrainingEpisodes) {
|
---|
434 | var episodes = new[] { episode };
|
---|
435 | var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta_" + eIdx]).ToArray(); // see evaluate
|
---|
436 | var optimizationData = new OptimizationData(trees, targetVars, problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver);
|
---|
437 | var trainingPrediction = Integrate(optimizationData, optTheta).ToArray();
|
---|
438 | trainingPredictions.Add(trainingPrediction);
|
---|
439 | eIdx++;
|
---|
440 | }
|
---|
441 |
|
---|
442 | // only for target values
|
---|
443 | var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
|
---|
444 | for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
|
---|
445 | var targetVar = targetVars[colIdx];
|
---|
446 | var trainingDataTable = new DataTable(targetVar + " prediction (training)");
|
---|
447 | var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
|
---|
448 | var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
|
---|
449 | trainingDataTable.Rows.Add(actualValuesRow);
|
---|
450 | trainingDataTable.Rows.Add(predictedValuesRow);
|
---|
451 | trainingList.Add(trainingDataTable);
|
---|
452 | }
|
---|
453 | results["Prediction (training)"].Value = trainingList.AsReadOnly();
|
---|
454 |
|
---|
455 |
|
---|
456 | var models = new VariableCollection();
|
---|
457 |
|
---|
458 | foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
|
---|
459 | var targetVarName = tup.Item1;
|
---|
460 | var tree = tup.Item2;
|
---|
461 |
|
---|
462 | var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
|
---|
463 | origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
|
---|
464 | models.Add(origTreeVar);
|
---|
465 | }
|
---|
466 | results["Models"].Value = models;
|
---|
467 | } else {
|
---|
468 | var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate
|
---|
469 | var optimizationData = new OptimizationData(trees, targetVars, problemData, null, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);
|
---|
470 | var trainingPrediction = Integrate(optimizationData, optTheta).ToArray();
|
---|
471 | // for target values and latent variables
|
---|
472 | var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
|
---|
473 | for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
|
---|
474 | // is target variable
|
---|
475 | if (colIdx < targetVars.Length) {
|
---|
476 | var targetVar = targetVars[colIdx];
|
---|
477 | var trainingDataTable = new DataTable(targetVar + " prediction (training)");
|
---|
478 | var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
|
---|
479 | var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
|
---|
480 | trainingDataTable.Rows.Add(actualValuesRow);
|
---|
481 | trainingDataTable.Rows.Add(predictedValuesRow);
|
---|
482 |
|
---|
483 | for (int paramIdx = 0; paramIdx < optTheta.Length; paramIdx++) {
|
---|
484 | var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", trainingPrediction.Select(arr => arr[colIdx].Item2[paramIdx]).ToArray());
|
---|
485 | paramSensitivityRow.VisualProperties.SecondYAxis = true;
|
---|
486 | trainingDataTable.Rows.Add(paramSensitivityRow);
|
---|
487 | }
|
---|
488 | trainingList.Add(trainingDataTable);
|
---|
489 | } else {
|
---|
490 | var latentVar = latentVariables[colIdx - targetVars.Length];
|
---|
491 | var trainingDataTable = new DataTable(latentVar + " prediction (training)");
|
---|
492 | var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
|
---|
493 | var emptyRow = new DataRow(latentVar);
|
---|
494 | trainingDataTable.Rows.Add(emptyRow);
|
---|
495 | trainingDataTable.Rows.Add(predictedValuesRow);
|
---|
496 | trainingList.Add(trainingDataTable);
|
---|
497 | }
|
---|
498 | }
|
---|
499 |
|
---|
500 | var errorTable = new DataTable("Squared error and gradient");
|
---|
501 | var seRow = new DataRow("Squared error");
|
---|
502 | var gradientRows = Enumerable.Range(0, optTheta.Length).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
|
---|
503 | errorTable.Rows.Add(seRow);
|
---|
504 | foreach (var gRow in gradientRows) {
|
---|
505 | gRow.VisualProperties.SecondYAxis = true;
|
---|
506 | errorTable.Rows.Add(gRow);
|
---|
507 | }
|
---|
508 | var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
|
---|
509 | int r = 0;
|
---|
510 | double invN = 1.0 / trainingRows.Count();
|
---|
511 | foreach (var y_pred in trainingPrediction) {
|
---|
512 | // calculate objective function gradient
|
---|
513 | double f_i = 0.0;
|
---|
514 | Vector g_i = Vector.CreateNew(new double[optTheta.Length]);
|
---|
515 | for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
|
---|
516 | var y_pred_f = y_pred[colIdx].Item1;
|
---|
517 | var y = targetValues[colIdx][r];
|
---|
518 |
|
---|
519 | var res = (y - y_pred_f);
|
---|
520 | var ressq = res * res;
|
---|
521 | f_i += ressq * invN /* * Math.Exp(-0.2 * r) */;
|
---|
522 | g_i = g_i - 2.0 * res * y_pred[colIdx].Item2 * invN /* * Math.Exp(-0.2 * r)*/;
|
---|
523 | }
|
---|
524 | seRow.Values.Add(f_i);
|
---|
525 | for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
|
---|
526 | r++;
|
---|
527 | }
|
---|
528 | results["Squared error and gradient"].Value = errorTable;
|
---|
529 |
|
---|
530 | // TODO: DRY for training and test
|
---|
531 | var testList = new ItemList<DataTable>();
|
---|
532 | var testRows = ProblemData.TestIndices.ToArray();
|
---|
533 | var testOptimizationData = new OptimizationData(trees, targetVars, problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver);
|
---|
534 | var testPrediction = Integrate(testOptimizationData, optTheta).ToArray();
|
---|
535 |
|
---|
536 | for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
|
---|
537 | // is target variable
|
---|
538 | if (colIdx < targetVars.Length) {
|
---|
539 | var targetVar = targetVars[colIdx];
|
---|
540 | var testDataTable = new DataTable(targetVar + " prediction (test)");
|
---|
541 | var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
|
---|
542 | var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
|
---|
543 | testDataTable.Rows.Add(actualValuesRow);
|
---|
544 | testDataTable.Rows.Add(predictedValuesRow);
|
---|
545 | testList.Add(testDataTable);
|
---|
546 |
|
---|
547 | } else {
|
---|
548 | var latentVar = latentVariables[colIdx - targetVars.Length];
|
---|
549 | var testDataTable = new DataTable(latentVar + " prediction (test)");
|
---|
550 | var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
|
---|
551 | var emptyRow = new DataRow(latentVar);
|
---|
552 | testDataTable.Rows.Add(emptyRow);
|
---|
553 | testDataTable.Rows.Add(predictedValuesRow);
|
---|
554 | testList.Add(testDataTable);
|
---|
555 | }
|
---|
556 | }
|
---|
557 |
|
---|
558 | results["Prediction (training)"].Value = trainingList.AsReadOnly();
|
---|
559 | results["Prediction (test)"].Value = testList.AsReadOnly();
|
---|
560 |
|
---|
561 |
|
---|
562 | #region simplification of models
|
---|
563 | // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
|
---|
564 | var models = new VariableCollection(); // to store target var names and original version of tree
|
---|
565 |
|
---|
566 | var optimizedTrees = new List<ISymbolicExpressionTree>();
|
---|
567 | int nextParIdx = 0;
|
---|
568 | for (int idx = 0; idx < trees.Length; idx++) {
|
---|
569 | var tree = trees[idx];
|
---|
570 | optimizedTrees.Add(new SymbolicExpressionTree(FixParameters(tree.Root, optTheta.ToArray(), ref nextParIdx)));
|
---|
571 | }
|
---|
572 | var ds = problemData.Dataset;
|
---|
573 | var newVarNames = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
|
---|
574 | var allVarNames = ds.DoubleVariables.Concat(newVarNames);
|
---|
575 | var newVarValues = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
|
---|
576 | var allVarValues = ds.DoubleVariables.Select(varName => ds.GetDoubleValues(varName).ToList())
|
---|
577 | .Concat(Enumerable.Range(0, nextParIdx).Select(i => Enumerable.Repeat(optTheta[i], ds.Rows).ToList()))
|
---|
578 | .ToList();
|
---|
579 | var newDs = new Dataset(allVarNames, allVarValues);
|
---|
580 | var newProblemData = new RegressionProblemData(newDs, problemData.AllowedInputVariables.Concat(newVarValues).ToArray(), problemData.TargetVariable);
|
---|
581 | results["Solution"].Value = new Solution(optimizedTrees.ToArray(),
|
---|
582 | // optTheta,
|
---|
583 | newProblemData,
|
---|
584 | targetVars,
|
---|
585 | latentVariables,
|
---|
586 | TrainingEpisodes,
|
---|
587 | OdeSolver,
|
---|
588 | NumericIntegrationSteps);
|
---|
589 |
|
---|
590 |
|
---|
591 | nextParIdx = 0;
|
---|
592 | for (int idx = 0; idx < trees.Length; idx++) {
|
---|
593 | var varName = string.Empty;
|
---|
594 | if (idx < targetVars.Length) {
|
---|
595 | varName = targetVars[idx];
|
---|
596 | } else {
|
---|
597 | varName = latentVariables[idx - targetVars.Length];
|
---|
598 | }
|
---|
599 | var tree = trees[idx];
|
---|
600 |
|
---|
601 | // when we reference HeuristicLab.Problems.DataAnalysis.Symbolic we can translate symbols
|
---|
602 | var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta.ToArray(),
|
---|
603 | ref nextParIdx));
|
---|
604 |
|
---|
605 | // var shownTree = (SymbolicExpressionTree)tree.Clone();
|
---|
606 | // var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode);
|
---|
607 | // var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode);
|
---|
608 | //
|
---|
609 | // foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) {
|
---|
610 | // double constantsVal = optTheta[nodeIdx[n.original]];
|
---|
611 | //
|
---|
612 | // ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal };
|
---|
613 | //
|
---|
614 | // var parentNode = n.shown.Parent;
|
---|
615 | // int replacementIndex = parentNode.IndexOfSubtree(n.shown);
|
---|
616 | // parentNode.RemoveSubtree(replacementIndex);
|
---|
617 | // parentNode.InsertSubtree(replacementIndex, replacementNode);
|
---|
618 | // }
|
---|
619 |
|
---|
620 | var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
|
---|
621 | origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
|
---|
622 | models.Add(origTreeVar);
|
---|
623 | var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
|
---|
624 | simplifiedTreeVar.Value = TreeSimplifier.Simplify(shownTree);
|
---|
625 | models.Add(simplifiedTreeVar);
|
---|
626 |
|
---|
627 | }
|
---|
628 |
|
---|
629 | results["Models"].Value = models;
|
---|
630 | #endregion
|
---|
631 | }
|
---|
632 | }
|
---|
633 |
|
---|
634 |
|
---|
635 | #region interpretation
|
---|
636 |
|
---|
637 | // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
|
---|
638 | // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
|
---|
639 |
|
---|
640 | // a comparison of three potential calculation methods for the gradient is given in:
|
---|
641 | // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
|
---|
642 | // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
|
---|
643 | // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
|
---|
644 |
|
---|
645 | // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
|
---|
646 |
|
---|
647 | public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData, double[] parameterValues) {
|
---|
648 |
|
---|
649 | var trees = optimizationData.trees;
|
---|
650 | var dataset = optimizationData.problemData.Dataset;
|
---|
651 | var inputVariables = optimizationData.problemData.AllowedInputVariables.ToArray();
|
---|
652 | var targetVariables = optimizationData.targetVariables;
|
---|
653 | var latentVariables = optimizationData.latentVariables;
|
---|
654 | var episodes = optimizationData.episodes;
|
---|
655 | var odeSolver = optimizationData.odeSolver;
|
---|
656 | var numericIntegrationSteps = optimizationData.numericIntegrationSteps;
|
---|
657 |
|
---|
658 |
|
---|
659 | // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
|
---|
660 | var episodeIdx = 0;
|
---|
661 | foreach (var episode in optimizationData.episodes) {
|
---|
662 | var rows = Enumerable.Range(episode.Start, episode.End - episode.Start);
|
---|
663 |
|
---|
664 | // integrate forward starting with known values for the target in t0
|
---|
665 |
|
---|
666 | var variableValues = new Dictionary<string, Tuple<double, Vector>>();
|
---|
667 | var t0 = rows.First();
|
---|
668 | foreach (var varName in inputVariables) {
|
---|
669 | variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
|
---|
670 | }
|
---|
671 | foreach (var varName in targetVariables) {
|
---|
672 | variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
|
---|
673 | }
|
---|
674 | // add value entries for latent variables which are also integrated
|
---|
675 | // initial values are at the end of the parameter vector
|
---|
676 | // separete initial values for each episode
|
---|
677 | var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
|
---|
678 | foreach (var latentVar in latentVariables) {
|
---|
679 | var arr = new double[parameterValues.Length]; // backing array
|
---|
680 | arr[initialValueIdx] = 1.0;
|
---|
681 | var g = new Vector(arr);
|
---|
682 | variableValues.Add(latentVar,
|
---|
683 | Tuple.Create(parameterValues[initialValueIdx], g)); // we don't have observations for latent variables therefore we optimize the initial value for each episode
|
---|
684 | initialValueIdx++;
|
---|
685 | }
|
---|
686 |
|
---|
687 | var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
|
---|
688 |
|
---|
689 | // return first value as stored in the dataset
|
---|
690 | yield return calculatedVariables
|
---|
691 | .Select(calcVarName => variableValues[calcVarName])
|
---|
692 | .ToArray();
|
---|
693 |
|
---|
694 | var prevT = rows.First(); // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
|
---|
695 | foreach (var t in rows.Skip(1)) {
|
---|
696 | if (odeSolver == "HeuristicLab")
|
---|
697 | IntegrateHL(trees, calculatedVariables, variableValues, parameterValues, numericIntegrationSteps);
|
---|
698 | else if (odeSolver == "CVODES")
|
---|
699 | throw new NotImplementedException();
|
---|
700 | // IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT);
|
---|
701 | else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
|
---|
702 | prevT = t;
|
---|
703 |
|
---|
704 | // This check doesn't work with the HeuristicLab integrator if there are input variables
|
---|
705 | //if (variableValues.Count == targetVariables.Length) {
|
---|
706 | // only return the target variables for calculation of errors
|
---|
707 | var res = calculatedVariables
|
---|
708 | .Select(targetVar => variableValues[targetVar])
|
---|
709 | .ToArray();
|
---|
710 | if (res.Any(ri => double.IsNaN(ri.Item1) || double.IsInfinity(ri.Item1))) yield break;
|
---|
711 | yield return res;
|
---|
712 | //} else {
|
---|
713 | // yield break; // stop early on problems
|
---|
714 | //}
|
---|
715 |
|
---|
716 |
|
---|
717 | // update for next time step
|
---|
718 | foreach (var varName in inputVariables) {
|
---|
719 | variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero);
|
---|
720 | }
|
---|
721 | }
|
---|
722 | episodeIdx++;
|
---|
723 | }
|
---|
724 | }
|
---|
725 |
|
---|
726 | #region CVODES
|
---|
727 |
|
---|
728 | /*
|
---|
729 | /// <summary>
|
---|
730 | /// Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
|
---|
731 | /// </summary>
|
---|
732 | /// <param name="trees">Each equation in the ODE represented as a tree</param>
|
---|
733 | /// <param name="calculatedVariables">The names of the calculated variables</param>
|
---|
734 | /// <param name="variableValues">The start values of the calculated variables as well as their sensitivites over parameters</param>
|
---|
735 | /// <param name="parameterValues">The current parameter values</param>
|
---|
736 | /// <param name="t">The time t up to which we need to integrate.</param>
|
---|
737 | private static void IntegrateCVODES(
|
---|
738 | ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
|
---|
739 | string[] calculatedVariables, // names of elements of y
|
---|
740 | Dictionary<string, Tuple<double, Vector>> variableValues, // y (intput and output) input: y(t0), output: y(t0+t)
|
---|
741 | double[] parameterValues, // p
|
---|
742 | double t // duration t for which we want to integrate
|
---|
743 | ) {
|
---|
744 |
|
---|
745 | // the RHS of the ODE
|
---|
746 | // dy/dt = f(y_t,x_t,p)
|
---|
747 | CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, parameterValues);
|
---|
748 | // the Jacobian ∂f/∂y
|
---|
749 | CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, parameterValues);
|
---|
750 |
|
---|
751 | // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
|
---|
752 | CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, parameterValues);
|
---|
753 |
|
---|
754 | // setup solver
|
---|
755 | int numberOfEquations = trees.Length;
|
---|
756 | IntPtr y = IntPtr.Zero;
|
---|
757 | IntPtr cvode_mem = IntPtr.Zero;
|
---|
758 | IntPtr A = IntPtr.Zero;
|
---|
759 | IntPtr yS0 = IntPtr.Zero;
|
---|
760 | IntPtr linearSolver = IntPtr.Zero;
|
---|
761 | var ns = parameterValues.Length; // number of parameters
|
---|
762 |
|
---|
763 | try {
|
---|
764 | y = CVODES.N_VNew_Serial(numberOfEquations);
|
---|
765 | // init y to current values of variables
|
---|
766 | // y must be initialized before calling CVodeInit
|
---|
767 | for (int i = 0; i < calculatedVariables.Length; i++) {
|
---|
768 | CVODES.NV_Set_Ith_S(y, i, variableValues[calculatedVariables[i]].Item1);
|
---|
769 | }
|
---|
770 |
|
---|
771 | cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
|
---|
772 |
|
---|
773 | var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
|
---|
774 | Debug.Assert(CVODES.CV_SUCCESS == flag);
|
---|
775 |
|
---|
776 | double relTol = 1.0e-2;
|
---|
777 | double absTol = 1.0;
|
---|
778 | flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol); // TODO: probably need to adjust absTol per variable
|
---|
779 | Debug.Assert(CVODES.CV_SUCCESS == flag);
|
---|
780 |
|
---|
781 | A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
|
---|
782 | Debug.Assert(A != IntPtr.Zero);
|
---|
783 |
|
---|
784 | linearSolver = CVODES.SUNDenseLinearSolver(y, A);
|
---|
785 | Debug.Assert(linearSolver != IntPtr.Zero);
|
---|
786 |
|
---|
787 | flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
|
---|
788 | Debug.Assert(CVODES.CV_SUCCESS == flag);
|
---|
789 |
|
---|
790 | flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
|
---|
791 | Debug.Assert(CVODES.CV_SUCCESS == flag);
|
---|
792 |
|
---|
793 | yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
|
---|
794 | unsafe {
|
---|
795 | // set to initial sensitivities supplied by caller
|
---|
796 | for (int pIdx = 0; pIdx < ns; pIdx++) {
|
---|
797 | var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
|
---|
798 | for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
|
---|
799 | CVODES.NV_Set_Ith_S(yS0_i, varIdx, variableValues[calculatedVariables[varIdx]].Item2[pIdx]);
|
---|
800 | }
|
---|
801 | }
|
---|
802 | }
|
---|
803 |
|
---|
804 | flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
|
---|
805 | Debug.Assert(CVODES.CV_SUCCESS == flag);
|
---|
806 |
|
---|
807 | flag = CVODES.CVodeSensEEtolerances(cvode_mem);
|
---|
808 | Debug.Assert(CVODES.CV_SUCCESS == flag);
|
---|
809 |
|
---|
810 | // make one forward integration step
|
---|
811 | double tout = 0.0; // first output time
|
---|
812 | flag = CVODES.CVode(cvode_mem, t, y, ref tout, CVODES.CV_NORMAL);
|
---|
813 | if (flag == CVODES.CV_SUCCESS) {
|
---|
814 | Debug.Assert(t == tout);
|
---|
815 |
|
---|
816 | // get sensitivities
|
---|
817 | flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
|
---|
818 | Debug.Assert(CVODES.CV_SUCCESS == flag);
|
---|
819 |
|
---|
820 | // update variableValues based on integration results
|
---|
821 | for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
|
---|
822 | var yi = CVODES.NV_Get_Ith_S(y, varIdx);
|
---|
823 | var gArr = new double[parameterValues.Length];
|
---|
824 | for (var pIdx = 0; pIdx < parameterValues.Length; pIdx++) {
|
---|
825 | unsafe {
|
---|
826 | var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
|
---|
827 | gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
|
---|
828 | }
|
---|
829 | }
|
---|
830 | variableValues[calculatedVariables[varIdx]] = Tuple.Create(yi, new Vector(gArr));
|
---|
831 | }
|
---|
832 | } else {
|
---|
833 | variableValues.Clear(); // indicate problems by not returning new values
|
---|
834 | }
|
---|
835 |
|
---|
836 | // cleanup all allocated objects
|
---|
837 | } finally {
|
---|
838 | if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
|
---|
839 | if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
|
---|
840 | if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
|
---|
841 | if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
|
---|
842 | if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
|
---|
843 | }
|
---|
844 | }
|
---|
845 |
|
---|
846 |
|
---|
847 | private static CVODES.CVRhsFunc CreateOdeRhs(
|
---|
848 | ISymbolicExpressionTree[] trees,
|
---|
849 | string[] calculatedVariables,
|
---|
850 | double[] parameterValues) {
|
---|
851 | // we don't need to calculate a gradient here
|
---|
852 | return (double t,
|
---|
853 | IntPtr y, // N_Vector, current value of y (input)
|
---|
854 | IntPtr ydot, // N_Vector (calculated value of y' (output)
|
---|
855 | IntPtr user_data // optional user data, (unused here)
|
---|
856 | ) => {
|
---|
857 | // TODO: perf
|
---|
858 | var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
|
---|
859 |
|
---|
860 | int pIdx = 0;
|
---|
861 | foreach (var tree in trees) {
|
---|
862 | foreach (var n in tree.IterateNodesPrefix()) {
|
---|
863 | if (IsConstantNode(n)) {
|
---|
864 | nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we do not need a gradient
|
---|
865 | pIdx++;
|
---|
866 | } else if (n.SubtreeCount == 0) {
|
---|
867 | // for variables and latent variables get the value from variableValues
|
---|
868 | var varName = n.Symbol.Name;
|
---|
869 | var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
|
---|
870 | if (varIdx < 0) throw new InvalidProgramException();
|
---|
871 | var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
|
---|
872 | nodeValues.Add(n, Tuple.Create(y_i, Vector.Zero)); // no gradient needed
|
---|
873 | }
|
---|
874 | }
|
---|
875 | }
|
---|
876 | for (int i = 0; i < trees.Length; i++) {
|
---|
877 | var tree = trees[i];
|
---|
878 | var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
|
---|
879 | CVODES.NV_Set_Ith_S(ydot, i, res_i.Item1);
|
---|
880 | }
|
---|
881 | return 0;
|
---|
882 | };
|
---|
883 | }
|
---|
884 |
|
---|
885 | private static CVODES.CVDlsJacFunc CreateJac(
|
---|
886 | ISymbolicExpressionTree[] trees,
|
---|
887 | string[] calculatedVariables,
|
---|
888 | double[] parameterValues) {
|
---|
889 |
|
---|
890 | return (
|
---|
891 | double t, // current time (input)
|
---|
892 | IntPtr y, // N_Vector, current value of y (input)
|
---|
893 | IntPtr fy, // N_Vector, current value of f (input)
|
---|
894 | IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
|
---|
895 | IntPtr user_data, // optional (unused here)
|
---|
896 | IntPtr tmp1, // N_Vector, optional (unused here)
|
---|
897 | IntPtr tmp2, // N_Vector, optional (unused here)
|
---|
898 | IntPtr tmp3 // N_Vector, optional (unused here)
|
---|
899 | ) => {
|
---|
900 | // here we need to calculate partial derivatives for the calculated variables y
|
---|
901 | var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
|
---|
902 | int pIdx = 0;
|
---|
903 | foreach (var tree in trees) {
|
---|
904 | foreach (var n in tree.IterateNodesPrefix()) {
|
---|
905 | if (IsConstantNode(n)) {
|
---|
906 | nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
|
---|
907 | pIdx++;
|
---|
908 | } else if (n.SubtreeCount == 0) {
|
---|
909 | // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
|
---|
910 | var varName = n.Symbol.Name;
|
---|
911 | var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
|
---|
912 | if (varIdx < 0) throw new InvalidProgramException();
|
---|
913 |
|
---|
914 | var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
|
---|
915 | var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
|
---|
916 | gArr[varIdx] = 1.0;
|
---|
917 | var g = new Vector(gArr);
|
---|
918 | nodeValues.Add(n, Tuple.Create(y_i, g));
|
---|
919 | }
|
---|
920 | }
|
---|
921 | }
|
---|
922 |
|
---|
923 | for (int i = 0; i < trees.Length; i++) {
|
---|
924 | var tree = trees[i];
|
---|
925 | var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
|
---|
926 | var g = res.Item2;
|
---|
927 | for (int j = 0; j < calculatedVariables.Length; j++) {
|
---|
928 | CVODES.SUNDenseMatrix_Set(Jac, i, j, g[j]);
|
---|
929 | }
|
---|
930 | }
|
---|
931 | return 0; // on success
|
---|
932 | };
|
---|
933 | }
|
---|
934 |
|
---|
935 |
|
---|
936 | // to calculate sensitivities RHS for all equations at once
|
---|
937 | // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
|
---|
938 | // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
|
---|
939 | private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, double[] parameterValues) {
|
---|
940 | return (
|
---|
941 | int Ns, // number of parameters
|
---|
942 | double t, // current time
|
---|
943 | IntPtr y, // N_Vector y(t) (input)
|
---|
944 | IntPtr ydot, // N_Vector dy/dt(t) (input)
|
---|
945 | IntPtr yS, // N_Vector*, one vector for each parameter (input)
|
---|
946 | IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
|
---|
947 | IntPtr user_data, // optional (unused here)
|
---|
948 | IntPtr tmp1, // N_Vector, optional (unused here)
|
---|
949 | IntPtr tmp2 // N_Vector, optional (unused here)
|
---|
950 | ) => {
|
---|
951 | // here we need to calculate partial derivatives for the calculated variables y as well as for the parameters
|
---|
952 | var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
|
---|
953 | var d = calculatedVariables.Length + parameterValues.Length; // dimensionality of gradient
|
---|
954 | // first collect variable values
|
---|
955 | foreach (var tree in trees) {
|
---|
956 | foreach (var n in tree.IterateNodesPrefix()) {
|
---|
957 | if (IsVariableNode(n)) {
|
---|
958 | // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
|
---|
959 | var varName = n.Symbol.Name;
|
---|
960 | var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
|
---|
961 | if (varIdx < 0) throw new InvalidProgramException();
|
---|
962 |
|
---|
963 | var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
|
---|
964 | var gArr = new double[d]; // backing array
|
---|
965 | gArr[varIdx] = 1.0;
|
---|
966 | var g = new Vector(gArr);
|
---|
967 | nodeValues.Add(n, Tuple.Create(y_i, g));
|
---|
968 | }
|
---|
969 | }
|
---|
970 | }
|
---|
971 | // then collect constants
|
---|
972 | int pIdx = 0;
|
---|
973 | foreach (var tree in trees) {
|
---|
974 | foreach (var n in tree.IterateNodesPrefix()) {
|
---|
975 | if (IsConstantNode(n)) {
|
---|
976 | var gArr = new double[d];
|
---|
977 | gArr[calculatedVariables.Length + pIdx] = 1.0;
|
---|
978 | var g = new Vector(gArr);
|
---|
979 | nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], g));
|
---|
980 | pIdx++;
|
---|
981 | }
|
---|
982 | }
|
---|
983 | }
|
---|
984 | // gradient vector is [∂f/∂y_1, ∂f/∂y_2, ... ∂f/∂yN, ∂f/∂p_1 ... ∂f/∂p_K]
|
---|
985 |
|
---|
986 |
|
---|
987 | for (pIdx = 0; pIdx < Ns; pIdx++) {
|
---|
988 | unsafe {
|
---|
989 | var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
|
---|
990 | CVODES.N_VConst_Serial(0.0, sDot_pi);
|
---|
991 | }
|
---|
992 | }
|
---|
993 |
|
---|
994 | for (int i = 0; i < trees.Length; i++) {
|
---|
995 | var tree = trees[i];
|
---|
996 | var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
|
---|
997 | var g = res.Item2;
|
---|
998 |
|
---|
999 |
|
---|
1000 | // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
|
---|
1001 |
|
---|
1002 | for (pIdx = 0; pIdx < Ns; pIdx++) {
|
---|
1003 | unsafe {
|
---|
1004 | var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
|
---|
1005 | var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
|
---|
1006 |
|
---|
1007 | var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
|
---|
1008 | // (∂f/∂y)s_i(t)
|
---|
1009 | var p = 0.0;
|
---|
1010 | for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
|
---|
1011 | p += g[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
|
---|
1012 | }
|
---|
1013 | // + ∂f/∂p_i
|
---|
1014 | CVODES.NV_Set_Ith_S(sDot_pi, i, v + p + g[calculatedVariables.Length + pIdx]);
|
---|
1015 | }
|
---|
1016 | }
|
---|
1017 |
|
---|
1018 | }
|
---|
1019 | return 0; // on success
|
---|
1020 | };
|
---|
1021 | }
|
---|
1022 | */
|
---|
1023 | #endregion
|
---|
1024 |
|
---|
1025 | private static void IntegrateHL(
|
---|
1026 | ISymbolicExpressionTree[] trees,
|
---|
1027 | string[] calculatedVariables, // names of integrated variables
|
---|
1028 | Dictionary<string, Tuple<double, Vector>> variableValues, //y (intput and output) input: y(t0), output: y(t0+1)
|
---|
1029 | double[] parameterValues,
|
---|
1030 | int numericIntegrationSteps) {
|
---|
1031 |
|
---|
1032 | var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
|
---|
1033 |
|
---|
1034 | // the nodeValues for parameters are constant over time
|
---|
1035 | // TODO: this needs to be done only once for each iteration in gradient descent (whenever parameter values change)
|
---|
1036 | // NOTE: the order must be the same as above (prefix order for nodes)
|
---|
1037 | int pIdx = 0;
|
---|
1038 | foreach (var tree in trees) {
|
---|
1039 | foreach (var node in tree.Root.IterateNodesPrefix()) {
|
---|
1040 | if (IsConstantNode(node)) {
|
---|
1041 | var gArr = new double[parameterValues.Length]; // backing array
|
---|
1042 | gArr[pIdx] = 1.0;
|
---|
1043 | var g = new Vector(gArr);
|
---|
1044 | nodeValues.Add(node, new Tuple<double, Vector>(parameterValues[pIdx], g));
|
---|
1045 | pIdx++;
|
---|
1046 | } else if (node.SubtreeCount == 0) {
|
---|
1047 | // for (latent) variables get the values from variableValues
|
---|
1048 | var varName = node.Symbol.Name;
|
---|
1049 | nodeValues.Add(node, variableValues[varName]);
|
---|
1050 | }
|
---|
1051 | }
|
---|
1052 | }
|
---|
1053 |
|
---|
1054 | double[] deltaF = new double[calculatedVariables.Length];
|
---|
1055 | Vector[] deltaG = new Vector[calculatedVariables.Length];
|
---|
1056 |
|
---|
1057 | double h = 1.0 / numericIntegrationSteps;
|
---|
1058 | for (int step = 0; step < numericIntegrationSteps; step++) {
|
---|
1059 | //var deltaValues = new Dictionary<string, Tuple<double, Vector>>();
|
---|
1060 | for (int i = 0; i < trees.Length; i++) {
|
---|
1061 | var tree = trees[i];
|
---|
1062 | var targetVarName = calculatedVariables[i];
|
---|
1063 |
|
---|
1064 | // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
|
---|
1065 | double f; Vector g;
|
---|
1066 | InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
|
---|
1067 | deltaF[i] = f;
|
---|
1068 | deltaG[i] = g;
|
---|
1069 | }
|
---|
1070 |
|
---|
1071 | // update variableValues for next step, trapezoid integration
|
---|
1072 | for (int i = 0; i < trees.Length; i++) {
|
---|
1073 | var varName = calculatedVariables[i];
|
---|
1074 | var oldVal = variableValues[varName];
|
---|
1075 | var newVal = Tuple.Create(
|
---|
1076 | oldVal.Item1 + h * deltaF[i],
|
---|
1077 | oldVal.Item2 + deltaG[i].Scale(h)
|
---|
1078 | );
|
---|
1079 | variableValues[varName] = newVal;
|
---|
1080 | }
|
---|
1081 |
|
---|
1082 | // TODO perf
|
---|
1083 | foreach (var node in nodeValues.Keys.ToArray()) {
|
---|
1084 | if (node.SubtreeCount == 0 && !IsConstantNode(node)) {
|
---|
1085 | // update values for (latent) variables
|
---|
1086 | var varName = node.Symbol.Name;
|
---|
1087 | nodeValues[node] = variableValues[varName];
|
---|
1088 | }
|
---|
1089 | }
|
---|
1090 | }
|
---|
1091 | }
|
---|
1092 |
|
---|
1093 | private static void InterpretRec(
|
---|
1094 | ISymbolicExpressionTreeNode node,
|
---|
1095 | Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> nodeValues, // contains value and gradient vector for a node (variables and constants only)
|
---|
1096 | out double z,
|
---|
1097 | out Vector dz
|
---|
1098 | ) {
|
---|
1099 | double f, g;
|
---|
1100 | Vector df, dg;
|
---|
1101 | switch (node.Symbol.Name) {
|
---|
1102 | case "+": {
|
---|
1103 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1104 | InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
|
---|
1105 | z = f + g;
|
---|
1106 | dz = df + dg; // Vector.AddTo(gl, gr);
|
---|
1107 | break;
|
---|
1108 | }
|
---|
1109 | case "*": {
|
---|
1110 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1111 | InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
|
---|
1112 | z = f * g;
|
---|
1113 | dz = df * g + f * dg; // Vector.AddTo(gl.Scale(fr), gr.Scale(fl)); // f'*g + f*g'
|
---|
1114 | break;
|
---|
1115 | }
|
---|
1116 |
|
---|
1117 | case "-": {
|
---|
1118 | if (node.SubtreeCount == 1) {
|
---|
1119 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1120 | z = -f;
|
---|
1121 | dz = -df;
|
---|
1122 | } else {
|
---|
1123 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1124 | InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
|
---|
1125 |
|
---|
1126 | z = f - g;
|
---|
1127 | dz = df - dg;
|
---|
1128 | }
|
---|
1129 | break;
|
---|
1130 | }
|
---|
1131 | case "%": {
|
---|
1132 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1133 | InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
|
---|
1134 |
|
---|
1135 | // protected division
|
---|
1136 | if (g.IsAlmost(0.0)) {
|
---|
1137 | z = 0;
|
---|
1138 | dz = Vector.Zero;
|
---|
1139 | } else {
|
---|
1140 | z = f / g;
|
---|
1141 | dz = -f / (g * g) * dg + df / g; // Vector.AddTo(dg.Scale(f * -1.0 / (g * g)), df.Scale(1.0 / g)); // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f'
|
---|
1142 | }
|
---|
1143 | break;
|
---|
1144 | }
|
---|
1145 | case "sin": {
|
---|
1146 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1147 | z = Math.Sin(f);
|
---|
1148 | dz = Math.Cos(f) * df;
|
---|
1149 | break;
|
---|
1150 | }
|
---|
1151 | case "cos": {
|
---|
1152 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1153 | z = Math.Cos(f);
|
---|
1154 | dz = -Math.Sin(f) * df;
|
---|
1155 | break;
|
---|
1156 | }
|
---|
1157 | case "sqr": {
|
---|
1158 | InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
|
---|
1159 | z = f * f;
|
---|
1160 | dz = 2.0 * f * df;
|
---|
1161 | break;
|
---|
1162 | }
|
---|
1163 | default: {
|
---|
1164 | var t = nodeValues[node];
|
---|
1165 | z = t.Item1;
|
---|
1166 | dz = Vector.CreateNew(t.Item2);
|
---|
1167 | break;
|
---|
1168 | }
|
---|
1169 | }
|
---|
1170 | }
|
---|
1171 | #endregion
|
---|
1172 |
|
---|
1173 | #region events
|
---|
1174 | /*
|
---|
1175 | * Dependencies between parameters:
|
---|
1176 | *
|
---|
1177 | * ProblemData
|
---|
1178 | * |
|
---|
1179 | * V
|
---|
1180 | * TargetVariables FunctionSet MaximumLength NumberOfLatentVariables
|
---|
1181 | * | | | |
|
---|
1182 | * V V | |
|
---|
1183 | * Grammar <---------------+-------------------
|
---|
1184 | * |
|
---|
1185 | * V
|
---|
1186 | * Encoding
|
---|
1187 | */
|
---|
1188 | private void RegisterEventHandlers() {
|
---|
1189 | ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
|
---|
1190 | if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
|
---|
1191 |
|
---|
1192 | TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
|
---|
1193 | if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
|
---|
1194 |
|
---|
1195 | FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
|
---|
1196 | if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
|
---|
1197 |
|
---|
1198 | MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
|
---|
1199 |
|
---|
1200 | NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
|
---|
1201 | }
|
---|
1202 |
|
---|
1203 | private void NumLatentVariablesChanged(object sender, EventArgs e) {
|
---|
1204 | UpdateGrammarAndEncoding();
|
---|
1205 | }
|
---|
1206 |
|
---|
1207 | private void MaximumLengthChanged(object sender, EventArgs e) {
|
---|
1208 | UpdateGrammarAndEncoding();
|
---|
1209 | }
|
---|
1210 |
|
---|
1211 | private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
|
---|
1212 | FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
|
---|
1213 | }
|
---|
1214 |
|
---|
1215 | private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
|
---|
1216 | UpdateGrammarAndEncoding();
|
---|
1217 | }
|
---|
1218 |
|
---|
1219 | private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
|
---|
1220 | TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
|
---|
1221 | }
|
---|
1222 |
|
---|
1223 | private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
|
---|
1224 | UpdateGrammarAndEncoding();
|
---|
1225 | }
|
---|
1226 |
|
---|
1227 | private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
|
---|
1228 | ProblemDataParameter.Value.Changed += ProblemData_Changed;
|
---|
1229 | OnProblemDataChanged();
|
---|
1230 | OnReset();
|
---|
1231 | }
|
---|
1232 |
|
---|
1233 | private void ProblemData_Changed(object sender, EventArgs e) {
|
---|
1234 | OnProblemDataChanged();
|
---|
1235 | OnReset();
|
---|
1236 | }
|
---|
1237 |
|
---|
1238 | private void OnProblemDataChanged() {
|
---|
1239 | UpdateTargetVariables(); // implicitly updates other dependent parameters
|
---|
1240 | var handler = ProblemDataChanged;
|
---|
1241 | if (handler != null) handler(this, EventArgs.Empty);
|
---|
1242 | }
|
---|
1243 |
|
---|
1244 | #endregion
|
---|
1245 |
|
---|
1246 | #region helper
|
---|
1247 |
|
---|
1248 | private void InitAllParameters() {
|
---|
1249 | UpdateTargetVariables(); // implicitly updates the grammar and the encoding
|
---|
1250 | }
|
---|
1251 |
|
---|
1252 | private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
|
---|
1253 | var l = new CheckedItemList<StringValue>();
|
---|
1254 | l.Add(new StringValue("+").AsReadOnly());
|
---|
1255 | l.Add(new StringValue("*").AsReadOnly());
|
---|
1256 | l.Add(new StringValue("%").AsReadOnly());
|
---|
1257 | l.Add(new StringValue("-").AsReadOnly());
|
---|
1258 | l.Add(new StringValue("sin").AsReadOnly());
|
---|
1259 | l.Add(new StringValue("cos").AsReadOnly());
|
---|
1260 | l.Add(new StringValue("sqr").AsReadOnly());
|
---|
1261 | return l.AsReadOnly();
|
---|
1262 | }
|
---|
1263 |
|
---|
1264 | private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
|
---|
1265 | return n.Symbol.Name[0] == 'θ';
|
---|
1266 | }
|
---|
1267 | private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
|
---|
1268 | return n.Symbol.Name[0] == 'λ';
|
---|
1269 | }
|
---|
1270 | private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
|
---|
1271 | return (n.SubtreeCount == 0) && !IsConstantNode(n) && !IsLatentVariableNode(n);
|
---|
1272 | }
|
---|
1273 |
|
---|
1274 |
|
---|
1275 | private void UpdateTargetVariables() {
|
---|
1276 | var currentlySelectedVariables = TargetVariables.CheckedItems
|
---|
1277 | .OrderBy(i => i.Index)
|
---|
1278 | .Select(i => i.Value.Value)
|
---|
1279 | .ToArray();
|
---|
1280 |
|
---|
1281 | var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
|
---|
1282 | var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
|
---|
1283 | foreach (var item in newVariablesList) {
|
---|
1284 | if (currentlySelectedVariables.Contains(item.Value)) {
|
---|
1285 | newVariablesList.SetItemCheckedState(item, true);
|
---|
1286 | } else {
|
---|
1287 | newVariablesList.SetItemCheckedState(item, false);
|
---|
1288 | }
|
---|
1289 | }
|
---|
1290 | TargetVariablesParameter.Value = newVariablesList;
|
---|
1291 | }
|
---|
1292 |
|
---|
1293 | private void UpdateGrammarAndEncoding() {
|
---|
1294 | var encoding = new MultiEncoding();
|
---|
1295 | var g = CreateGrammar();
|
---|
1296 | foreach (var targetVar in TargetVariables.CheckedItems) {
|
---|
1297 | encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength)); // only limit by length
|
---|
1298 | }
|
---|
1299 | for (int i = 1; i <= NumberOfLatentVariables; i++) {
|
---|
1300 | encoding = encoding.Add(new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength));
|
---|
1301 | }
|
---|
1302 | Encoding = encoding;
|
---|
1303 | }
|
---|
1304 |
|
---|
1305 | private ISymbolicExpressionGrammar CreateGrammar() {
|
---|
1306 | // whenever ProblemData is changed we create a new grammar with the necessary symbols
|
---|
1307 | var g = new SimpleSymbolicExpressionGrammar();
|
---|
1308 | var unaryFunc = new string[] { "sin", "cos", "sqr" };
|
---|
1309 | var binaryFunc = new string[] { "+", "-", "*", "%" };
|
---|
1310 | foreach (var func in unaryFunc) {
|
---|
1311 | if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 1, 1);
|
---|
1312 | }
|
---|
1313 | foreach (var func in binaryFunc) {
|
---|
1314 | if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 2, 2);
|
---|
1315 | }
|
---|
1316 |
|
---|
1317 | foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value.Value)))
|
---|
1318 | g.AddTerminalSymbol(variableName);
|
---|
1319 |
|
---|
1320 | // generate symbols for numeric parameters for which the value is optimized using AutoDiff
|
---|
1321 | // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
|
---|
1322 | var numericConstantsFactor = 2.0;
|
---|
1323 | for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
|
---|
1324 | g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
|
---|
1325 | }
|
---|
1326 |
|
---|
1327 | // generate symbols for latent variables
|
---|
1328 | for (int i = 1; i <= NumberOfLatentVariables; i++) {
|
---|
1329 | g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
|
---|
1330 | }
|
---|
1331 |
|
---|
1332 | return g;
|
---|
1333 | }
|
---|
1334 |
|
---|
1335 |
|
---|
1336 |
|
---|
1337 |
|
---|
1338 |
|
---|
1339 | private ISymbolicExpressionTreeNode FixParameters(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
|
---|
1340 | ISymbolicExpressionTreeNode translatedNode = null;
|
---|
1341 | if (n.Symbol is StartSymbol) {
|
---|
1342 | translatedNode = new StartSymbol().CreateTreeNode();
|
---|
1343 | } else if (n.Symbol is ProgramRootSymbol) {
|
---|
1344 | translatedNode = new ProgramRootSymbol().CreateTreeNode();
|
---|
1345 | } else if (n.Symbol.Name == "+") {
|
---|
1346 | translatedNode = new SimpleSymbol("+", 2).CreateTreeNode();
|
---|
1347 | } else if (n.Symbol.Name == "-") {
|
---|
1348 | translatedNode = new SimpleSymbol("-", 2).CreateTreeNode();
|
---|
1349 | } else if (n.Symbol.Name == "*") {
|
---|
1350 | translatedNode = new SimpleSymbol("*", 2).CreateTreeNode();
|
---|
1351 | } else if (n.Symbol.Name == "%") {
|
---|
1352 | translatedNode = new SimpleSymbol("%", 2).CreateTreeNode();
|
---|
1353 | } else if (n.Symbol.Name == "sin") {
|
---|
1354 | translatedNode = new SimpleSymbol("sin", 1).CreateTreeNode();
|
---|
1355 | } else if (n.Symbol.Name == "cos") {
|
---|
1356 | translatedNode = new SimpleSymbol("cos", 1).CreateTreeNode();
|
---|
1357 | } else if (n.Symbol.Name == "sqr") {
|
---|
1358 | translatedNode = new SimpleSymbol("sqr", 1).CreateTreeNode();
|
---|
1359 | } else if (IsConstantNode(n)) {
|
---|
1360 | translatedNode = new SimpleSymbol("c_" + nextParIdx, 0).CreateTreeNode();
|
---|
1361 | nextParIdx++;
|
---|
1362 | } else {
|
---|
1363 | translatedNode = new SimpleSymbol(n.Symbol.Name, n.SubtreeCount).CreateTreeNode();
|
---|
1364 | }
|
---|
1365 | foreach (var child in n.Subtrees) {
|
---|
1366 | translatedNode.AddSubtree(FixParameters(child, parameterValues, ref nextParIdx));
|
---|
1367 | }
|
---|
1368 | return translatedNode;
|
---|
1369 | }
|
---|
1370 |
|
---|
1371 |
|
---|
1372 | private ISymbolicExpressionTreeNode TranslateTreeNode(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
|
---|
1373 | ISymbolicExpressionTreeNode translatedNode = null;
|
---|
1374 | if (n.Symbol is StartSymbol) {
|
---|
1375 | translatedNode = new StartSymbol().CreateTreeNode();
|
---|
1376 | } else if (n.Symbol is ProgramRootSymbol) {
|
---|
1377 | translatedNode = new ProgramRootSymbol().CreateTreeNode();
|
---|
1378 | } else if (n.Symbol.Name == "+") {
|
---|
1379 | translatedNode = new Addition().CreateTreeNode();
|
---|
1380 | } else if (n.Symbol.Name == "-") {
|
---|
1381 | translatedNode = new Subtraction().CreateTreeNode();
|
---|
1382 | } else if (n.Symbol.Name == "*") {
|
---|
1383 | translatedNode = new Multiplication().CreateTreeNode();
|
---|
1384 | } else if (n.Symbol.Name == "%") {
|
---|
1385 | translatedNode = new Division().CreateTreeNode();
|
---|
1386 | } else if (n.Symbol.Name == "sin") {
|
---|
1387 | translatedNode = new Sine().CreateTreeNode();
|
---|
1388 | } else if (n.Symbol.Name == "cos") {
|
---|
1389 | translatedNode = new Cosine().CreateTreeNode();
|
---|
1390 | } else if (n.Symbol.Name == "sqr") {
|
---|
1391 | translatedNode = new Square().CreateTreeNode();
|
---|
1392 | } else if (IsConstantNode(n)) {
|
---|
1393 | var constNode = (ConstantTreeNode)new Constant().CreateTreeNode();
|
---|
1394 | constNode.Value = parameterValues[nextParIdx];
|
---|
1395 | nextParIdx++;
|
---|
1396 | translatedNode = constNode;
|
---|
1397 | } else {
|
---|
1398 | // assume a variable name
|
---|
1399 | var varName = n.Symbol.Name;
|
---|
1400 | var varNode = (VariableTreeNode)new Variable().CreateTreeNode();
|
---|
1401 | varNode.Weight = 1.0;
|
---|
1402 | varNode.VariableName = varName;
|
---|
1403 | translatedNode = varNode;
|
---|
1404 | }
|
---|
1405 | foreach (var child in n.Subtrees) {
|
---|
1406 | translatedNode.AddSubtree(TranslateTreeNode(child, parameterValues, ref nextParIdx));
|
---|
1407 | }
|
---|
1408 | return translatedNode;
|
---|
1409 | }
|
---|
1410 | #endregion
|
---|
1411 |
|
---|
1412 | #region Import & Export
|
---|
1413 | public void Load(IRegressionProblemData data) {
|
---|
1414 | Name = data.Name;
|
---|
1415 | Description = data.Description;
|
---|
1416 | ProblemData = data;
|
---|
1417 | }
|
---|
1418 |
|
---|
1419 | public IRegressionProblemData Export() {
|
---|
1420 | return ProblemData;
|
---|
1421 | }
|
---|
1422 |
|
---|
1423 | public class OptimizationData {
|
---|
1424 | public readonly ISymbolicExpressionTree[] trees;
|
---|
1425 | public readonly string[] targetVariables;
|
---|
1426 | public readonly IRegressionProblemData problemData;
|
---|
1427 | public readonly double[,] targetValues;
|
---|
1428 | public readonly IntRange[] episodes;
|
---|
1429 | public readonly int numericIntegrationSteps;
|
---|
1430 | public readonly string[] latentVariables;
|
---|
1431 | public readonly string odeSolver;
|
---|
1432 |
|
---|
1433 | public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, IRegressionProblemData problemData, double[,] targetValues, IntRange[] episodes, int numericIntegrationSteps, string[] latentVariables, string odeSolver) {
|
---|
1434 | this.trees = trees;
|
---|
1435 | this.targetVariables = targetVars;
|
---|
1436 | this.problemData = problemData;
|
---|
1437 | this.targetValues = targetValues;
|
---|
1438 | this.episodes = episodes;
|
---|
1439 | this.numericIntegrationSteps = numericIntegrationSteps;
|
---|
1440 | this.latentVariables = latentVariables;
|
---|
1441 | this.odeSolver = odeSolver;
|
---|
1442 | }
|
---|
1443 | }
|
---|
1444 | #endregion
|
---|
1445 |
|
---|
1446 | }
|
---|
1447 | }
|
---|