Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16616

Last change on this file since 16616 was 16616, checked in by gkronber, 5 years ago

#2925 added crossover probability (important for multi-encoding in this case), re-added scaling of targets

File size: 81.1 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Linq;
26using HeuristicLab.Analysis;
27using HeuristicLab.Collections;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
32using HeuristicLab.Optimization;
33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Problems.DataAnalysis.Symbolic;
37using HeuristicLab.Problems.Instances;
38using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
39
40namespace HeuristicLab.Problems.DynamicalSystemsModelling {
41  [Item("Dynamical Systems Modelling Problem", "TODO")]
42  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
43  [StorableClass]
44  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
45    #region parameter names
46    private const string ProblemDataParameterName = "Data";
47    private const string TargetVariablesParameterName = "Target variables";
48    private const string FunctionSetParameterName = "Function set";
49    private const string MaximumLengthParameterName = "Size limit";
50    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
51    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
52    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
53    private const string TrainingEpisodesParameterName = "Training episodes";
54    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
55    private const string OdeSolverParameterName = "ODE Solver";
56    #endregion
57
58    #region Parameter Properties
59    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
60
61    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
62      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
63    }
64    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
65      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
66    }
67    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
68      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
69    }
70    public IFixedValueParameter<IntValue> MaximumLengthParameter {
71      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
72    }
73
74    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
75      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
76    }
77    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
78      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
79    }
80    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
81      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
82    }
83    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
84      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
85    }
86    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
87      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
88    }
89    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
90      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
91    }
92    #endregion
93
94    #region Properties
95    public IRegressionProblemData ProblemData {
96      get { return ProblemDataParameter.Value; }
97      set { ProblemDataParameter.Value = value; }
98    }
99    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
100
101    public ReadOnlyCheckedItemList<StringValue> TargetVariables {
102      get { return TargetVariablesParameter.Value; }
103    }
104
105    public ReadOnlyCheckedItemList<StringValue> FunctionSet {
106      get { return FunctionSetParameter.Value; }
107    }
108
109    public int MaximumLength {
110      get { return MaximumLengthParameter.Value.Value; }
111    }
112    public int MaximumParameterOptimizationIterations {
113      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
114    }
115    public int NumberOfLatentVariables {
116      get { return NumberOfLatentVariablesParameter.Value.Value; }
117    }
118    public int NumericIntegrationSteps {
119      get { return NumericIntegrationStepsParameter.Value.Value; }
120    }
121    public IEnumerable<IntRange> TrainingEpisodes {
122      get { return TrainingEpisodesParameter.Value; }
123    }
124    public bool OptimizeParametersForEpisodes {
125      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
126    }
127
128    public string OdeSolver {
129      get { return OdeSolverParameter.Value.Value; }
130      set {
131        var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
132        if (matchingValue == null) throw new ArgumentOutOfRangeException();
133        else OdeSolverParameter.Value = matchingValue;
134      }
135    }
136
137    #endregion
138
139    public event EventHandler ProblemDataChanged;
140
141    public override bool Maximization {
142      get { return false; } // we minimize NMSE
143    }
144
145    #region item cloning and persistence
146    // persistence
147    [StorableConstructor]
148    private Problem(bool deserializing) : base(deserializing) { }
149    [StorableHook(HookType.AfterDeserialization)]
150    private void AfterDeserialization() {
151      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
152        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
153      }
154      RegisterEventHandlers();
155    }
156
157    // cloning
158    private Problem(Problem original, Cloner cloner)
159      : base(original, cloner) {
160      RegisterEventHandlers();
161    }
162    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
163    #endregion
164
165    public Problem()
166      : base() {
167      var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
168      var functions = CreateFunctionSet();
169      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
170      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
171      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
172      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
173      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
174      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
175      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
176      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
177      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
178
179      var solversStr = new string[] { "HeuristicLab" /* , "CVODES" */};
180      var solvers = new ItemSet<StringValue>(
181        solversStr.Select(s => new StringValue(s).AsReadOnly())
182        );
183      Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
184
185      RegisterEventHandlers();
186      InitAllParameters();
187
188      // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item
189      // TODO: use training range as default training episode
190      // TODO: write back optimized parameters to solution?
191      // TODO: optimization of starting values for latent variables in CVODES solver
192      // TODO: allow to specify the name for the time variable in the dataset and allow variable step-sizes
193      // TODO: check grammars (input variables) after cloning
194
195    }
196
197    public override double Evaluate(Individual individual, IRandom random) {
198      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
199
200      var problemData = ProblemData;
201      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
202      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
203      if (OptimizeParametersForEpisodes) {
204        throw new NotImplementedException();
205        int eIdx = 0;
206        double totalNMSE = 0.0;
207        int totalSize = 0;
208        foreach (var episode in TrainingEpisodes) {
209          // double[] optTheta;
210          double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
211          // individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
212          eIdx++;
213          totalNMSE += nmse * episode.Size;
214          totalSize += episode.Size;
215        }
216        return totalNMSE / totalSize;
217      } else {
218        // double[] optTheta;
219        double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
220        // individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
221        return nmse;
222      }
223    }
224
225    public static double OptimizeForEpisodes(
226      ISymbolicExpressionTree[] trees,
227      IRegressionProblemData problemData,
228      string[] targetVars,
229      string[] latentVariables,
230      IRandom random,
231      IEnumerable<IntRange> episodes,
232      int maxParameterOptIterations,
233      int numericIntegrationSteps,
234      string odeSolver) {
235
236      // extract constants from tree
237      var constantNodes = trees.Select(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().ToArray()).ToArray();
238      var initialTheta = constantNodes.Select(nodes => nodes.Select(n => n.Value).ToArray()).ToArray();
239
240      // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t)
241      double nmse = PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxParameterOptIterations,
242        initialTheta, out double[] pretunedParameters);
243
244      // optimize parameters using integration of f(x,y) to calculate y(t)
245      nmse = OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,
246        out double[] optTheta);
247      // var optTheta = pretunedParameters;
248
249      if (double.IsNaN(nmse) ||
250        double.IsInfinity(nmse) ||
251        nmse > 100 * trees.Length * episodes.Sum(ep => ep.Size))
252        return 100 * trees.Length * episodes.Sum(ep => ep.Size);
253
254      // update tree nodes with optimized values
255      var paramIdx = 0;
256      for (var treeIdx = 0; treeIdx < constantNodes.Length; treeIdx++) {
257        for (int i = 0; i < constantNodes[treeIdx].Length; i++)
258          constantNodes[treeIdx][i].Value = optTheta[paramIdx++];
259      }
260      return nmse;
261    }
262
263    private static double PreTuneParameters(
264      ISymbolicExpressionTree[] trees,
265      IRegressionProblemData problemData,
266      string[] targetVars,
267      string[] latentVariables,
268      IRandom random,
269      IEnumerable<IntRange> episodes,
270      int maxParameterOptIterations,
271      double[][] initialTheta,
272      out double[] optTheta) {
273      var thetas = new List<double>();
274      double nmse = 0.0;
275      var maxTreeNmse = 100 * episodes.Sum(ep => ep.Size);
276
277      // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
278      for (int treeIdx = 0; treeIdx < trees.Length; treeIdx++) {
279        var t = trees[treeIdx];
280
281        var targetValuesDiff = new List<double>();
282        foreach (var ep in episodes) {
283          var episodeRows = Enumerable.Range(ep.Start, ep.Size);
284          var targetValues = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], episodeRows).ToArray();
285          targetValuesDiff.AddRange(targetValues.Skip(1).Zip(targetValues, (t1, t0) => t1 - t0));// TODO: smoothing or multi-pole);
286        }
287        var adjustedEpisodes = episodes.Select(ep => new IntRange(ep.Start, ep.End - 1)); // because we lose the last row in the differencing step
288        var myState = new OptimizationData(new[] { t },
289          targetVars,
290          problemData.AllowedInputVariables.Concat(targetVars).ToArray(),
291          problemData, new[] { targetValuesDiff.ToArray() }, adjustedEpisodes.ToArray(), -99, latentVariables, string.Empty); // TODO
292        var paramCount = myState.nodeValueLookup.ParameterCount;
293
294        optTheta = new double[0];
295        if (initialTheta[treeIdx].Length > 0) {
296          try {
297            alglib.minlmstate state;
298            alglib.minlmreport report;
299            var p = new double[initialTheta[treeIdx].Length];
300            var lowerBounds = Enumerable.Repeat(-1000.0, p.Length).ToArray();
301            var upperBounds = Enumerable.Repeat(1000.0, p.Length).ToArray();
302            Array.Copy(initialTheta[treeIdx], p, p.Length);
303            alglib.minlmcreatevj(targetValuesDiff.Count, p, out state);
304            alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
305            alglib.minlmsetbc(state, lowerBounds, upperBounds);
306#if DEBUG
307            //alglib.minlmsetgradientcheck(state, 1.0e-7);
308#endif
309            alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
310
311            alglib.minlmresults(state, out optTheta, out report);
312            if (report.terminationtype < 0) {
313#if DEBUG
314              if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
315#endif
316              optTheta = initialTheta[treeIdx];
317            }
318          } catch (alglib.alglibexception) {
319            optTheta = initialTheta[treeIdx];
320          }
321        }
322        var tree_nmse = EvaluateMSE(optTheta, myState);
323        if (double.IsNaN(tree_nmse) || double.IsInfinity(tree_nmse) || tree_nmse > maxTreeNmse) {
324          nmse += maxTreeNmse;
325          thetas.AddRange(initialTheta[treeIdx]);
326        } else {
327          nmse += tree_nmse;
328          thetas.AddRange(optTheta);
329        }
330      } // foreach tree
331      optTheta = thetas.ToArray();
332
333      return nmse;
334    }
335
336
337    // similar to above but this time we integrate and optimize all parameters for all targets concurrently
338    private static double OptimizeParameters(ISymbolicExpressionTree[] trees, IRegressionProblemData problemData, string[] targetVars, string[] latentVariables,
339      IEnumerable<IntRange> episodes, int maxParameterOptIterations, double[] initialTheta, int numericIntegrationSteps, string odeSolver, out double[] optTheta) {
340      var rowsForDataExtraction = episodes.SelectMany(e => Enumerable.Range(e.Start, e.Size)).ToArray();
341      var targetValues = new double[trees.Length][];
342      for (int treeIdx = 0; treeIdx < trees.Length; treeIdx++) {
343        var t = trees[treeIdx];
344
345        targetValues[treeIdx] = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], rowsForDataExtraction).ToArray();
346      }
347
348      var myState = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver);
349      optTheta = initialTheta;
350
351      if (initialTheta.Length > 0) {
352        var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray();
353        var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray();
354        try {
355          alglib.minlmstate state;
356          alglib.minlmreport report;
357          alglib.minlmcreatevj(rowsForDataExtraction.Length * trees.Length, initialTheta, out state);
358          alglib.minlmsetbc(state, lowerBounds, upperBounds);
359          alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
360#if DEBUG         
361          //alglib.minlmsetgradientcheck(state, 1.0e-7);
362#endif
363          alglib.minlmoptimize(state, IntegrateAndEvaluateObjectiveVector, IntegrateAndEvaluateObjectiveVectorAndJacobian, null, myState);
364
365          alglib.minlmresults(state, out optTheta, out report);
366
367          if (report.terminationtype < 0) {
368#if DEBUG
369            if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
370#endif            // there was a problem: reset theta and evaluate for inital values
371            optTheta = initialTheta;
372          }
373        } catch (alglib.alglibexception) {
374          optTheta = initialTheta;
375        }
376      }
377      var nmse = EvaluateIntegratedMSE(optTheta, myState);
378      var maxNmse = 100 * targetValues.Length * rowsForDataExtraction.Length;
379      if (double.IsNaN(nmse) || double.IsInfinity(nmse) || nmse > maxNmse) nmse = maxNmse;
380      return nmse;
381    }
382
383
384    // helper
385    public static double EvaluateMSE(double[] x, OptimizationData optimizationData) {
386      var fi = new double[optimizationData.rows.Count()];
387      EvaluateObjectiveVector(x, fi, optimizationData);
388      return fi.Sum(fii => fii * fii) / fi.Length;
389    }
390    public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
391    public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
392      var rows = optimizationData.rows;
393      var problemData = optimizationData.problemData;
394      var nodeValueLookup = optimizationData.nodeValueLookup;
395      var ds = problemData.Dataset;
396      var variables = optimizationData.variables;
397
398      nodeValueLookup.UpdateParamValues(x);
399
400      int outputIdx = 0;
401      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
402        // update variable values
403        foreach (var variable in variables) {
404          nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
405        }
406        // interpret all trees
407        for (int treeIdx = 0; treeIdx < optimizationData.trees.Length; treeIdx++) {
408          var tree = optimizationData.trees[treeIdx];
409          var pred = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup);
410          var y = optimizationData.targetValues[treeIdx][trainIdx];
411          fi[outputIdx++] = (y - pred) * optimizationData.inverseStandardDeviation[treeIdx];
412        }
413      }
414    }
415
416    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
417    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
418      // extract variable values from dataset
419      var variableValues = new Dictionary<string, Tuple<double, Vector>>();
420      var problemData = optimizationData.problemData;
421      var ds = problemData.Dataset;
422      var rows = optimizationData.rows;
423      var variables = optimizationData.variables;
424
425      var nodeValueLookup = optimizationData.nodeValueLookup;
426      nodeValueLookup.UpdateParamValues(x);
427
428      int termIdx = 0;
429
430      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
431        // update variable values
432        foreach (var variable in variables) {
433          nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
434        }
435
436        var calculatedVariables = optimizationData.targetVariables;
437
438        var trees = optimizationData.trees;
439        for (int i = 0; i < trees.Length; i++) {
440          var tree = trees[i];
441          var targetVarName = calculatedVariables[i];
442
443          double f; Vector g;
444          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup, out f, out g);
445
446          var y = optimizationData.targetValues[i][trainIdx];
447          fi[termIdx] = (y - f) * optimizationData.inverseStandardDeviation[i]; // scale of NMSE
448          if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[termIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i];
449
450          termIdx++;
451        }
452      }
453
454    }
455
456    // helper
457    public static double EvaluateIntegratedMSE(double[] x, OptimizationData optimizationData) {
458      var fi = new double[optimizationData.rows.Count() * optimizationData.targetVariables.Length];
459      IntegrateAndEvaluateObjectiveVector(x, fi, optimizationData);
460      return fi.Sum(fii => fii * fii) / fi.Length;
461    }
462    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { IntegrateAndEvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
463    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
464      IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData);
465    }
466
467    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
468    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
469      var rows = optimizationData.rows.ToArray();
470      var problemData = optimizationData.problemData;
471      var nodeValueLookup = optimizationData.nodeValueLookup;
472      var ds = problemData.Dataset;
473      int outputIdx = 0;
474
475      nodeValueLookup.UpdateParamValues(x);
476
477      Integrate(optimizationData, fi, jac);
478      var trees = optimizationData.trees;
479
480      // update result with error
481      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
482        for (int i = 0; i < trees.Length; i++) {
483          var tree = trees[i];
484          var y = optimizationData.targetValues[i][trainIdx];
485          fi[outputIdx] = (y - fi[outputIdx]) * optimizationData.inverseStandardDeviation[i];  // scale for normalized squared error
486          if (jac != null) for (int j = 0; j < x.Length; j++) jac[outputIdx, j] = -jac[outputIdx, j] * optimizationData.inverseStandardDeviation[i];
487          outputIdx++;
488        }
489      }
490    }
491
492    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
493      base.Analyze(individuals, qualities, results, random);
494
495      if (!results.ContainsKey("Prediction (training)")) {
496        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
497      }
498      if (!results.ContainsKey("Prediction (test)")) {
499        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
500      }
501      if (!results.ContainsKey("Models")) {
502        results.Add(new Result("Models", typeof(VariableCollection)));
503      }
504      if (!results.ContainsKey("SNMSE")) {
505        results.Add(new Result("SNMSE", typeof(DoubleValue)));
506      }
507      if (!results.ContainsKey("Solution")) {
508        results.Add(new Result("Solution", typeof(Solution)));
509      }
510      if (!results.ContainsKey("Squared error and gradient")) {
511        results.Add(new Result("Squared error and gradient", typeof(DataTable)));
512      }
513
514      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
515      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
516
517      results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
518
519      var problemData = ProblemData;
520      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
521      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
522
523      var trainingList = new ItemList<DataTable>();
524
525      if (OptimizeParametersForEpisodes) {
526        throw new NotSupportedException();
527        var eIdx = 0;
528        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
529        foreach (var episode in TrainingEpisodes) {
530          var episodes = new[] { episode };
531          var optimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver);
532          var trainingPrediction = Integrate(optimizationData).ToArray();
533          trainingPredictions.Add(trainingPrediction);
534          eIdx++;
535        }
536
537        // only for target values
538        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
539        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
540          var targetVar = targetVars[colIdx];
541          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
542          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
543          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
544          trainingDataTable.Rows.Add(actualValuesRow);
545          trainingDataTable.Rows.Add(predictedValuesRow);
546          trainingList.Add(trainingDataTable);
547        }
548        results["Prediction (training)"].Value = trainingList.AsReadOnly();
549
550
551        var models = new VariableCollection();
552
553        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
554          var targetVarName = tup.Item1;
555          var tree = tup.Item2;
556
557          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
558          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
559          models.Add(origTreeVar);
560        }
561        results["Models"].Value = models;
562      } else {
563        var optimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);
564        var trainingPrediction = Integrate(optimizationData).ToArray();
565
566
567        var numParams = optimizationData.nodeValueLookup.ParameterCount;
568        // for target values and latent variables
569        var trainingRows = optimizationData.rows;
570        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
571          // is target variable
572          if (colIdx < targetVars.Length) {
573            var targetVar = targetVars[colIdx];
574            var trainingDataTable = new DataTable(targetVar + " prediction (training)");
575            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
576            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
577            trainingDataTable.Rows.Add(actualValuesRow);
578            trainingDataTable.Rows.Add(predictedValuesRow);
579
580            for (int paramIdx = 0; paramIdx < numParams; paramIdx++) {
581              var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", trainingPrediction.Select(arr => arr[colIdx].Item2[paramIdx]).ToArray());
582              paramSensitivityRow.VisualProperties.SecondYAxis = true;
583              trainingDataTable.Rows.Add(paramSensitivityRow);
584            }
585            trainingList.Add(trainingDataTable);
586          } else {
587            var latentVar = latentVariables[colIdx - targetVars.Length];
588            var trainingDataTable = new DataTable(latentVar + " prediction (training)");
589            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
590            var emptyRow = new DataRow(latentVar);
591            trainingDataTable.Rows.Add(emptyRow);
592            trainingDataTable.Rows.Add(predictedValuesRow);
593            trainingList.Add(trainingDataTable);
594          }
595        }
596
597        var errorTable = new DataTable("Squared error and gradient");
598        var seRow = new DataRow("Squared error");
599        var gradientRows = Enumerable.Range(0, numParams).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
600        errorTable.Rows.Add(seRow);
601        foreach (var gRow in gradientRows) {
602          gRow.VisualProperties.SecondYAxis = true;
603          errorTable.Rows.Add(gRow);
604        }
605        var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
606        int r = 0;
607
608        foreach (var y_pred in trainingPrediction) {
609          // calculate objective function gradient
610          double f_i = 0.0;
611          Vector g_i = Vector.CreateNew(new double[numParams]);
612          for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
613            var y_pred_f = y_pred[colIdx].Item1;
614            var y = targetValues[colIdx][r];
615
616            var res = (y - y_pred_f) * optimizationData.inverseStandardDeviation[colIdx];
617            var ressq = res * res;
618            f_i += ressq;
619            g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res));
620          }
621          seRow.Values.Add(f_i);
622          for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
623          r++;
624        }
625        results["Squared error and gradient"].Value = errorTable;
626
627        // TODO: DRY for training and test
628        var testList = new ItemList<DataTable>();
629        var testRows = ProblemData.TestIndices.ToArray();
630        var testOptimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver);
631        var testPrediction = Integrate(testOptimizationData).ToArray();
632
633        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
634          // is target variable
635          if (colIdx < targetVars.Length) {
636            var targetVar = targetVars[colIdx];
637            var testDataTable = new DataTable(targetVar + " prediction (test)");
638            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
639            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
640            testDataTable.Rows.Add(actualValuesRow);
641            testDataTable.Rows.Add(predictedValuesRow);
642            testList.Add(testDataTable);
643
644          } else {
645            var latentVar = latentVariables[colIdx - targetVars.Length];
646            var testDataTable = new DataTable(latentVar + " prediction (test)");
647            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
648            var emptyRow = new DataRow(latentVar);
649            testDataTable.Rows.Add(emptyRow);
650            testDataTable.Rows.Add(predictedValuesRow);
651            testList.Add(testDataTable);
652          }
653        }
654
655        results["Prediction (training)"].Value = trainingList.AsReadOnly();
656        results["Prediction (test)"].Value = testList.AsReadOnly();
657
658
659        #region simplification of models
660        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
661        var models = new VariableCollection();    // to store target var names and original version of tree
662
663        var clonedTrees = new List<ISymbolicExpressionTree>();
664        for (int idx = 0; idx < trees.Length; idx++) {
665          clonedTrees.Add((ISymbolicExpressionTree)trees[idx].Clone());
666        }
667        var ds = problemData.Dataset;
668        var newProblemData = new RegressionProblemData((IDataset)ds.Clone(), problemData.AllowedInputVariables, problemData.TargetVariable);
669        results["Solution"].Value = new Solution(clonedTrees.ToArray(),
670                   // optTheta,
671                   newProblemData,
672                   targetVars,
673                   latentVariables,
674                   TrainingEpisodes,
675                   OdeSolver,
676                   NumericIntegrationSteps);
677
678
679        for (int idx = 0; idx < trees.Length; idx++) {
680          var varName = string.Empty;
681          if (idx < targetVars.Length) {
682            varName = targetVars[idx];
683          } else {
684            varName = latentVariables[idx - targetVars.Length];
685          }
686          var tree = trees[idx];
687
688          var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
689          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
690          models.Add(origTreeVar);
691          var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
692          simplifiedTreeVar.Value = TreeSimplifier.Simplify(tree);
693          models.Add(simplifiedTreeVar);
694
695        }
696
697        results["Models"].Value = models;
698        #endregion
699      }
700    }
701
702    #region interpretation
703
704    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
705    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
706
707    // a comparison of three potential calculation methods for the gradient is given in:
708    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
709    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
710    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
711
712    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
713
714    public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData) {
715      var nTargets = optimizationData.targetVariables.Length;
716      var n = optimizationData.rows.Length * optimizationData.targetVariables.Length;
717      var d = optimizationData.nodeValueLookup.ParameterCount;
718      double[] fi = new double[n];
719      double[,] jac = new double[n, d];
720      Integrate(optimizationData, fi, jac);
721      for (int i = 0; i < optimizationData.rows.Length; i++) {
722        var res = new Tuple<double, Vector>[nTargets];
723        for (int j = 0; j < nTargets; j++) {
724          res[j] = Tuple.Create(fi[i * nTargets + j], Vector.CreateFromMatrixRow(jac, i * nTargets + j));
725        }
726        yield return res;
727      }
728    }
729
730    public static void Integrate(OptimizationData optimizationData, double[] fi, double[,] jac) {
731      var trees = optimizationData.trees;
732      var dataset = optimizationData.problemData.Dataset;
733      var inputVariables = optimizationData.problemData.AllowedInputVariables.ToArray();
734      var targetVariables = optimizationData.targetVariables;
735      var latentVariables = optimizationData.latentVariables;
736      var episodes = optimizationData.episodes;
737      var odeSolver = optimizationData.odeSolver;
738      var numericIntegrationSteps = optimizationData.numericIntegrationSteps;
739      var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
740
741
742
743      var nodeValues = optimizationData.nodeValueLookup;
744
745      // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
746      var outputRowIdx = 0;
747      var episodeIdx = 0;
748      foreach (var episode in optimizationData.episodes) {
749        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start).ToArray();
750
751        var t0 = rows.First();
752
753        // initialize values for inputs and targets from dataset
754        foreach (var varName in inputVariables) {
755          var y0 = dataset.GetDoubleValue(varName, t0);
756          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
757        }
758        foreach (var varName in targetVariables) {
759          var y0 = dataset.GetDoubleValue(varName, t0);
760          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
761
762          // output starting value
763          fi[outputRowIdx] = y0;
764          Vector.Zero.CopyTo(jac, outputRowIdx);
765
766          outputRowIdx++;
767        }
768
769        { // CODE BELOW DOESN'T WORK ANYMORE
770          // if (latentVariables.Length > 0) throw new NotImplementedException();
771          //
772          // // add value entries for latent variables which are also integrated
773          // // initial values are at the end of the parameter vector
774          // // separate initial values for each episode
775          // var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
776          // foreach (var latentVar in latentVariables) {
777          //   var arr = new double[parameterValues.Length]; // backing array
778          //   arr[initialValueIdx] = 1.0;
779          //   var g = new Vector(arr);
780          //   nodeValues.SetVariableValue(latentVar, parameterValues[initialValueIdx], g); // we don't have observations for latent variables therefore we optimize the initial value for each episode
781          //   initialValueIdx++;
782          // }
783        }
784
785        var prevT = t0; // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
786        foreach (var t in rows.Skip(1)) {
787          if (odeSolver == "HeuristicLab")
788            IntegrateHL(trees, calculatedVariables, nodeValues, numericIntegrationSteps); // integrator updates nodeValues
789          else if (odeSolver == "CVODES")
790            throw new NotImplementedException();
791          // IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT);
792          else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
793          prevT = t;
794
795          for (int i = 0; i < calculatedVariables.Length; i++) {
796            var targetVar = calculatedVariables[i];
797            var yt = nodeValues.GetVariableValue(targetVar);
798
799            // fill up remaining rows with last valid value if there are invalid values
800            if (double.IsNaN(yt.Item1) || double.IsInfinity(yt.Item1)) {
801              for (; outputRowIdx < fi.Length; outputRowIdx++) {
802                var prevIdx = outputRowIdx - calculatedVariables.Length;
803                fi[outputRowIdx] = fi[prevIdx]; // current <- prev
804                if (jac != null) for (int j = 0; j < jac.GetLength(1); j++) jac[outputRowIdx, j] = jac[prevIdx, j];
805              }
806              return;
807            };
808
809            fi[outputRowIdx] = yt.Item1;
810            var g = yt.Item2;
811            g.CopyTo(jac, outputRowIdx);
812            outputRowIdx++;
813          }
814
815          // update for next time step (only the inputs)
816          foreach (var varName in inputVariables) {
817            nodeValues.SetVariableValue(varName, dataset.GetDoubleValue(varName, t), Vector.Zero);
818          }
819        }
820        episodeIdx++;
821      }
822    }
823
824    #region CVODES
825
826    /*
827    /// <summary>
828    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
829    /// </summary>
830    /// <param name="trees">Each equation in the ODE represented as a tree</param>
831    /// <param name="calculatedVariables">The names of the calculated variables</param>
832    /// <param name="variableValues">The start values of the calculated variables as well as their sensitivites over parameters</param>
833    /// <param name="parameterValues">The current parameter values</param>
834    /// <param name="t">The time t up to which we need to integrate.</param>
835    private static void IntegrateCVODES(
836      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
837      string[] calculatedVariables, // names of elements of y
838      Dictionary<string, Tuple<double, Vector>> variableValues,  //  y (intput and output) input: y(t0), output: y(t0+t)
839      double[] parameterValues, // p
840      double t // duration t for which we want to integrate
841      ) {
842
843      // the RHS of the ODE
844      // dy/dt = f(y_t,x_t,p)
845      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, parameterValues);
846      // the Jacobian ∂f/∂y
847      CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, parameterValues);
848
849      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
850      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, parameterValues);
851
852      // setup solver
853      int numberOfEquations = trees.Length;
854      IntPtr y = IntPtr.Zero;
855      IntPtr cvode_mem = IntPtr.Zero;
856      IntPtr A = IntPtr.Zero;
857      IntPtr yS0 = IntPtr.Zero;
858      IntPtr linearSolver = IntPtr.Zero;
859      var ns = parameterValues.Length; // number of parameters
860
861      try {
862        y = CVODES.N_VNew_Serial(numberOfEquations);
863        // init y to current values of variables
864        // y must be initialized before calling CVodeInit
865        for (int i = 0; i < calculatedVariables.Length; i++) {
866          CVODES.NV_Set_Ith_S(y, i, variableValues[calculatedVariables[i]].Item1);
867        }
868
869        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
870
871        var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
872        Assert(CVODES.CV_SUCCESS == flag);
873
874        double relTol = 1.0e-2;
875        double absTol = 1.0;
876        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
877        Assert(CVODES.CV_SUCCESS == flag);
878
879        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
880        Assert(A != IntPtr.Zero);
881
882        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
883        Assert(linearSolver != IntPtr.Zero);
884
885        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
886        Assert(CVODES.CV_SUCCESS == flag);
887
888        flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
889        Assert(CVODES.CV_SUCCESS == flag);
890
891        yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
892        unsafe {
893          // set to initial sensitivities supplied by caller
894          for (int pIdx = 0; pIdx < ns; pIdx++) {
895            var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
896            for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
897              CVODES.NV_Set_Ith_S(yS0_i, varIdx, variableValues[calculatedVariables[varIdx]].Item2[pIdx]);
898            }
899          }
900        }
901
902        flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
903        Assert(CVODES.CV_SUCCESS == flag);
904
905        flag = CVODES.CVodeSensEEtolerances(cvode_mem);
906        Assert(CVODES.CV_SUCCESS == flag);
907
908        // make one forward integration step
909        double tout = 0.0; // first output time
910        flag = CVODES.CVode(cvode_mem, t, y, ref tout, CVODES.CV_NORMAL);
911        if (flag == CVODES.CV_SUCCESS) {
912          Assert(t == tout);
913
914          // get sensitivities
915          flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
916          Assert(CVODES.CV_SUCCESS == flag);
917
918          // update variableValues based on integration results
919          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
920            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
921            var gArr = new double[parameterValues.Length];
922            for (var pIdx = 0; pIdx < parameterValues.Length; pIdx++) {
923              unsafe {
924                var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
925                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
926              }
927            }
928            variableValues[calculatedVariables[varIdx]] = Tuple.Create(yi, new Vector(gArr));
929          }
930        } else {
931          variableValues.Clear();   // indicate problems by not returning new values
932        }
933
934        // cleanup all allocated objects
935      } finally {
936        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
937        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
938        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
939        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
940        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
941      }
942    }
943
944
945    private static CVODES.CVRhsFunc CreateOdeRhs(
946      ISymbolicExpressionTree[] trees,
947      string[] calculatedVariables,
948      double[] parameterValues) {
949      // we don't need to calculate a gradient here
950      return (double t,
951              IntPtr y, // N_Vector, current value of y (input)
952              IntPtr ydot, // N_Vector (calculated value of y' (output)
953              IntPtr user_data // optional user data, (unused here)
954              ) => {
955                // TODO: perf
956                var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
957
958                int pIdx = 0;
959                foreach (var tree in trees) {
960                  foreach (var n in tree.IterateNodesPrefix()) {
961                    if (IsConstantNode(n)) {
962                      nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we do not need a gradient
963                      pIdx++;
964                    } else if (n.SubtreeCount == 0) {
965                      // for variables and latent variables get the value from variableValues
966                      var varName = n.Symbol.Name;
967                      var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
968                      if (varIdx < 0) throw new InvalidProgramException();
969                      var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
970                      nodeValues.Add(n, Tuple.Create(y_i, Vector.Zero)); // no gradient needed
971                    }
972                  }
973                }
974                for (int i = 0; i < trees.Length; i++) {
975                  var tree = trees[i];
976                  var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
977                  CVODES.NV_Set_Ith_S(ydot, i, res_i.Item1);
978                }
979                return 0;
980              };
981    }
982
983    private static CVODES.CVDlsJacFunc CreateJac(
984      ISymbolicExpressionTree[] trees,
985      string[] calculatedVariables,
986      double[] parameterValues) {
987
988      return (
989        double t, // current time (input)
990        IntPtr y, // N_Vector, current value of y (input)
991        IntPtr fy, // N_Vector, current value of f (input)
992        IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
993        IntPtr user_data, // optional (unused here)
994        IntPtr tmp1, // N_Vector, optional (unused here)
995        IntPtr tmp2, // N_Vector, optional (unused here)
996        IntPtr tmp3 // N_Vector, optional (unused here)
997      ) => {
998        // here we need to calculate partial derivatives for the calculated variables y
999        var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1000        int pIdx = 0;
1001        foreach (var tree in trees) {
1002          foreach (var n in tree.IterateNodesPrefix()) {
1003            if (IsConstantNode(n)) {
1004              nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
1005              pIdx++;
1006            } else if (n.SubtreeCount == 0) {
1007              // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1008              var varName = n.Symbol.Name;
1009              var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
1010              if (varIdx < 0) throw new InvalidProgramException();
1011
1012              var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1013              var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
1014              gArr[varIdx] = 1.0;
1015              var g = new Vector(gArr);
1016              nodeValues.Add(n, Tuple.Create(y_i, g));
1017            }
1018          }
1019        }
1020
1021        for (int i = 0; i < trees.Length; i++) {
1022          var tree = trees[i];
1023          var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1024          var g = res.Item2;
1025          for (int j = 0; j < calculatedVariables.Length; j++) {
1026            CVODES.SUNDenseMatrix_Set(Jac, i, j, g[j]);
1027          }
1028        }
1029        return 0; // on success
1030      };
1031    }
1032
1033
1034    // to calculate sensitivities RHS for all equations at once
1035    // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
1036    // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
1037    private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, double[] parameterValues) {
1038      return (
1039              int Ns, // number of parameters
1040              double t, // current time
1041              IntPtr y, // N_Vector y(t) (input)
1042              IntPtr ydot, // N_Vector dy/dt(t) (input)
1043              IntPtr yS, // N_Vector*, one vector for each parameter (input)
1044              IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
1045              IntPtr user_data, // optional (unused here)
1046              IntPtr tmp1, // N_Vector, optional (unused here)
1047              IntPtr tmp2 // N_Vector, optional (unused here)
1048        ) => {
1049          // here we need to calculate partial derivatives for the calculated variables y as well as for the parameters
1050          var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1051          var d = calculatedVariables.Length + parameterValues.Length; // dimensionality of gradient
1052          // first collect variable values
1053          foreach (var tree in trees) {
1054            foreach (var n in tree.IterateNodesPrefix()) {
1055              if (IsVariableNode(n)) {
1056                // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1057                var varName = n.Symbol.Name;
1058                var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
1059                if (varIdx < 0) throw new InvalidProgramException();
1060
1061                var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1062                var gArr = new double[d]; // backing array
1063                gArr[varIdx] = 1.0;
1064                var g = new Vector(gArr);
1065                nodeValues.Add(n, Tuple.Create(y_i, g));
1066              }
1067            }
1068          }
1069          // then collect constants
1070          int pIdx = 0;
1071          foreach (var tree in trees) {
1072            foreach (var n in tree.IterateNodesPrefix()) {
1073              if (IsConstantNode(n)) {
1074                var gArr = new double[d];
1075                gArr[calculatedVariables.Length + pIdx] = 1.0;
1076                var g = new Vector(gArr);
1077                nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], g));
1078                pIdx++;
1079              }
1080            }
1081          }
1082          // gradient vector is [∂f/∂y_1, ∂f/∂y_2, ... ∂f/∂yN, ∂f/∂p_1 ... ∂f/∂p_K]
1083
1084
1085          for (pIdx = 0; pIdx < Ns; pIdx++) {
1086            unsafe {
1087              var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1088              CVODES.N_VConst_Serial(0.0, sDot_pi);
1089            }
1090          }
1091
1092          for (int i = 0; i < trees.Length; i++) {
1093            var tree = trees[i];
1094            var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1095            var g = res.Item2;
1096
1097
1098            // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
1099
1100            for (pIdx = 0; pIdx < Ns; pIdx++) {
1101              unsafe {
1102                var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1103                var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1104
1105                var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
1106                // (∂f/∂y)s_i(t)
1107                var p = 0.0;
1108                for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
1109                  p += g[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
1110                }
1111                // + ∂f/∂p_i
1112                CVODES.NV_Set_Ith_S(sDot_pi, i, v + p + g[calculatedVariables.Length + pIdx]);
1113              }
1114            }
1115
1116          }
1117          return 0; // on success
1118        };
1119    }
1120    */
1121    #endregion
1122
1123    private static void IntegrateHL(
1124      ISymbolicExpressionTree[] trees,
1125      string[] calculatedVariables, // names of integrated variables
1126      NodeValueLookup nodeValues,
1127      int numericIntegrationSteps) {
1128
1129
1130      double[] deltaF = new double[calculatedVariables.Length];
1131      Vector[] deltaG = new Vector[calculatedVariables.Length];
1132
1133      double h = 1.0 / numericIntegrationSteps;
1134      for (int step = 0; step < numericIntegrationSteps; step++) {
1135
1136        // evaluate all trees
1137        for (int i = 0; i < trees.Length; i++) {
1138          var tree = trees[i];
1139
1140          // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
1141          double f; Vector g;
1142          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
1143          deltaF[i] = f;
1144          deltaG[i] = g;
1145        }
1146
1147        // update variableValues for next step, trapezoid integration
1148        for (int i = 0; i < trees.Length; i++) {
1149          var varName = calculatedVariables[i];
1150          var oldVal = nodeValues.GetVariableValue(varName);
1151          nodeValues.SetVariableValue(varName, oldVal.Item1 + h * deltaF[i], oldVal.Item2.Add(deltaG[i].Scale(h)));
1152        }
1153      }
1154    }
1155
1156    // TODO: use an existing interpreter implementation instead
1157    private static double InterpretRec(ISymbolicExpressionTreeNode node, NodeValueLookup nodeValues) {
1158      if (node is ConstantTreeNode) {
1159        return ((ConstantTreeNode)node).Value;
1160      } else if (node is VariableTreeNode) {
1161        return nodeValues.NodeValue(node);
1162      } else if (node.Symbol is Addition) {
1163        Assert(node.SubtreeCount == 2);
1164        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1165        var g = InterpretRec(node.GetSubtree(1), nodeValues);
1166        return f + g;
1167      } else if (node.Symbol is Multiplication) {
1168        Assert(node.SubtreeCount == 2);
1169        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1170        var g = InterpretRec(node.GetSubtree(1), nodeValues);
1171        return f * g;
1172      } else if (node.Symbol is Subtraction) {
1173        Assert(node.SubtreeCount <= 2);
1174        if (node.SubtreeCount == 1) {
1175          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1176          return -f;
1177        } else {
1178          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1179          var g = InterpretRec(node.GetSubtree(1), nodeValues);
1180
1181          return f - g;
1182        }
1183      } else if (node.Symbol is Division) {
1184        Assert(node.SubtreeCount <= 2);
1185
1186        if (node.SubtreeCount == 1) {
1187          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1188          // protected division
1189          if (f.IsAlmost(0.0)) {
1190            return 0;
1191          } else {
1192            return 1.0 / f;
1193          }
1194        } else {
1195          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1196          var g = InterpretRec(node.GetSubtree(1), nodeValues);
1197
1198          // protected division
1199          if (g.IsAlmost(0.0)) {
1200            return 0;
1201          } else {
1202            return f / g;
1203          }
1204        }
1205      } else if (node.Symbol is Sine) {
1206        Assert(node.SubtreeCount == 1);
1207
1208        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1209        return Math.Sin(f);
1210      } else if (node.Symbol is Cosine) {
1211        Assert(node.SubtreeCount == 1);
1212
1213        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1214        return Math.Cos(f);
1215      } else if (node.Symbol is Square) {
1216        Assert(node.SubtreeCount == 1);
1217
1218        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1219        return f * f;
1220      } else if (node.Symbol is Exponential) {
1221        Assert(node.SubtreeCount == 1);
1222
1223        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1224        return Math.Exp(f);
1225      } else if (node.Symbol is Logarithm) {
1226        Assert(node.SubtreeCount == 1);
1227
1228        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1229        return Math.Log(f);
1230      } else throw new NotSupportedException("unsupported symbol");
1231    }
1232
1233    private static void Assert(bool cond) {
1234#if DEBUG
1235      if (!cond) throw new InvalidOperationException("Assertion failed");
1236#endif
1237    }
1238
1239    private static void InterpretRec(
1240      ISymbolicExpressionTreeNode node,
1241       NodeValueLookup nodeValues,      // contains value and gradient vector for a node (variables and constants only)
1242      out double z,
1243      out Vector dz
1244      ) {
1245      double f, g;
1246      Vector df, dg;
1247      if (node.Symbol is Constant || node.Symbol is Variable) {
1248        z = nodeValues.NodeValue(node);
1249        dz = Vector.CreateNew(nodeValues.NodeGradient(node)); // original gradient vectors are never changed by evaluation
1250      } else if (node.Symbol is Addition) {
1251
1252        Assert(node.SubtreeCount == 2);
1253        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1254        InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1255        z = f + g;
1256        dz = df.Add(dg);
1257      } else if (node.Symbol is Multiplication) {
1258
1259        Assert(node.SubtreeCount == 2);
1260        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1261        InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1262        z = f * g;
1263        dz = df.Scale(g).Add(dg.Scale(f));  // f'*g + f*g'
1264
1265      } else if (node.Symbol is Subtraction) {
1266
1267        Assert(node.SubtreeCount <= 2);
1268        if (node.SubtreeCount == 1) {
1269          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1270          z = -f;
1271          dz = df.Scale(-1.0);
1272        } else {
1273          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1274          InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1275          z = f - g;
1276          dz = df.Subtract(dg);
1277        }
1278
1279      } else if (node.Symbol is Division) {
1280
1281        Assert(node.SubtreeCount <= 2);
1282        if (node.SubtreeCount == 1) {
1283          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1284          // protected division
1285          if (f.IsAlmost(0.0)) {
1286            z = 0;
1287            dz = Vector.Zero;
1288          } else {
1289            z = 1.0 / f;
1290            dz = df.Scale(z * z);
1291          }
1292        } else {
1293          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1294          InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1295
1296          // protected division
1297          if (g.IsAlmost(0.0)) {
1298            z = 0;
1299            dz = Vector.Zero;
1300          } else {
1301            var inv_g = 1.0 / g;
1302            z = f * inv_g;
1303
1304            dz = dg.Scale(-f * inv_g * inv_g).Add(df.Scale(inv_g));
1305          }
1306        }
1307
1308      } else if (node.Symbol is Sine) {
1309
1310        Assert(node.SubtreeCount == 1);
1311        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1312        z = Math.Sin(f);
1313        dz = df.Scale(Math.Cos(f));
1314
1315      } else if (node.Symbol is Cosine) {
1316
1317        Assert(node.SubtreeCount == 1);
1318        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1319        z = Math.Cos(f);
1320        dz = df.Scale(-Math.Sin(f));
1321      } else if (node.Symbol is Square) {
1322
1323        Assert(node.SubtreeCount == 1);
1324        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1325        z = f * f;
1326        dz = df.Scale(2.0 * f);
1327      } else if (node.Symbol is Exponential) {
1328
1329        Assert(node.SubtreeCount == 1);
1330        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1331        z = Math.Exp(f);
1332        dz = df.Scale(Math.Exp(f));
1333      } else if (node.Symbol is Logarithm) {
1334
1335        Assert(node.SubtreeCount == 1);
1336        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1337        z = Math.Log(f);
1338        dz = df.Scale(1.0 / f);
1339      } else {
1340        throw new NotSupportedException("unsupported symbol");
1341      }
1342    }
1343
1344    #endregion
1345
1346    #region events
1347    /*
1348     * Dependencies between parameters:
1349     *
1350     * ProblemData
1351     *    |
1352     *    V
1353     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables
1354     *               |   |                 |                   |
1355     *               V   V                 |                   |
1356     *             Grammar <---------------+-------------------
1357     *                |
1358     *                V
1359     *            Encoding
1360     */
1361    private void RegisterEventHandlers() {
1362      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
1363      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
1364
1365      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
1366      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1367
1368      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
1369      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1370
1371      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
1372
1373      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
1374    }
1375
1376    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1377      UpdateGrammarAndEncoding();
1378    }
1379
1380    private void MaximumLengthChanged(object sender, EventArgs e) {
1381      UpdateGrammarAndEncoding();
1382    }
1383
1384    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1385      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1386    }
1387
1388    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1389      UpdateGrammarAndEncoding();
1390    }
1391
1392    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1393      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1394    }
1395
1396    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1397      UpdateGrammarAndEncoding();
1398    }
1399
1400    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
1401      ProblemDataParameter.Value.Changed += ProblemData_Changed;
1402      OnProblemDataChanged();
1403      OnReset();
1404    }
1405
1406    private void ProblemData_Changed(object sender, EventArgs e) {
1407      OnProblemDataChanged();
1408      OnReset();
1409    }
1410
1411    private void OnProblemDataChanged() {
1412      UpdateTargetVariables();        // implicitly updates other dependent parameters
1413      var handler = ProblemDataChanged;
1414      if (handler != null) handler(this, EventArgs.Empty);
1415    }
1416
1417    #endregion
1418
1419    #region  helper
1420
1421    private void InitAllParameters() {
1422      UpdateTargetVariables(); // implicitly updates the grammar and the encoding
1423    }
1424
1425    private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
1426      var l = new CheckedItemList<StringValue>();
1427      l.Add(new StringValue("Addition").AsReadOnly());
1428      l.Add(new StringValue("Multiplication").AsReadOnly());
1429      l.Add(new StringValue("Division").AsReadOnly());
1430      l.Add(new StringValue("Subtraction").AsReadOnly());
1431      l.Add(new StringValue("Sine").AsReadOnly());
1432      l.Add(new StringValue("Cosine").AsReadOnly());
1433      l.Add(new StringValue("Square").AsReadOnly());
1434      return l.AsReadOnly();
1435    }
1436
1437    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
1438      // return n.Symbol.Name[0] == 'θ';
1439      return n is ConstantTreeNode;
1440    }
1441    private static double GetConstantValue(ISymbolicExpressionTreeNode n) {
1442      return ((ConstantTreeNode)n).Value;
1443    }
1444    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
1445      return n.Symbol.Name[0] == 'λ';
1446    }
1447    private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
1448      return (n.SubtreeCount == 0) && !IsConstantNode(n) && !IsLatentVariableNode(n);
1449    }
1450    private static string GetVariableName(ISymbolicExpressionTreeNode n) {
1451      return ((VariableTreeNode)n).VariableName;
1452    }
1453
1454    private void UpdateTargetVariables() {
1455      var currentlySelectedVariables = TargetVariables.CheckedItems
1456        .OrderBy(i => i.Index)
1457        .Select(i => i.Value.Value)
1458        .ToArray();
1459
1460      var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
1461      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
1462      foreach (var item in newVariablesList) {
1463        if (currentlySelectedVariables.Contains(item.Value)) {
1464          newVariablesList.SetItemCheckedState(item, true);
1465        } else {
1466          newVariablesList.SetItemCheckedState(item, false);
1467        }
1468      }
1469      TargetVariablesParameter.Value = newVariablesList;
1470    }
1471
1472    private void UpdateGrammarAndEncoding() {
1473      var encoding = new MultiEncoding();
1474      var g = CreateGrammar();
1475      foreach (var targetVar in TargetVariables.CheckedItems) {
1476        var e = new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength);
1477        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1478        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1479        // make sure our multi-manipulator is the only manipulator
1480        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1481
1482        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1483        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1484        foreach (var xover in subtreeCrossovers) {
1485          xover.CrossoverProbability.Value = 0.3;
1486        }
1487
1488        encoding = encoding.Add(e); // only limit by length
1489      }
1490      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1491        var e = new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength);
1492        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1493        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1494        // make sure our multi-manipulator is the only manipulator
1495        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1496
1497        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1498        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1499        foreach (var xover in subtreeCrossovers) {
1500          xover.CrossoverProbability.Value = 0.3;
1501        }
1502
1503        encoding = encoding.Add(e);
1504      }
1505      Encoding = encoding;
1506    }
1507
1508    private ISymbolicExpressionGrammar CreateGrammar() {
1509      var grammar = new TypeCoherentExpressionGrammar();
1510      grammar.StartGrammarManipulation();
1511
1512      var problemData = ProblemData;
1513      var ds = problemData.Dataset;
1514      grammar.MaximumFunctionArguments = 0;
1515      grammar.MaximumFunctionDefinitions = 0;
1516      var allowedVariables = problemData.AllowedInputVariables.Concat(TargetVariables.CheckedItems.Select(chk => chk.Value.Value));
1517      foreach (var varSymbol in grammar.Symbols.OfType<HeuristicLab.Problems.DataAnalysis.Symbolic.VariableBase>()) {
1518        if (!varSymbol.Fixed) {
1519          varSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<double>(x));
1520          varSymbol.VariableNames = allowedVariables.Where(x => ds.VariableHasType<double>(x));
1521        }
1522      }
1523      foreach (var factorSymbol in grammar.Symbols.OfType<BinaryFactorVariable>()) {
1524        if (!factorSymbol.Fixed) {
1525          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1526          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1527          factorSymbol.VariableValues = factorSymbol.VariableNames
1528            .ToDictionary(varName => varName, varName => ds.GetStringValues(varName).Distinct().ToList());
1529        }
1530      }
1531      foreach (var factorSymbol in grammar.Symbols.OfType<FactorVariable>()) {
1532        if (!factorSymbol.Fixed) {
1533          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1534          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1535          factorSymbol.VariableValues = factorSymbol.VariableNames
1536            .ToDictionary(varName => varName,
1537            varName => ds.GetStringValues(varName).Distinct()
1538            .Select((n, i) => Tuple.Create(n, i))
1539            .ToDictionary(tup => tup.Item1, tup => tup.Item2));
1540        }
1541      }
1542
1543      grammar.ConfigureAsDefaultRegressionGrammar();
1544      grammar.GetSymbol("Logarithm").Enabled = false; // not supported yet
1545      grammar.GetSymbol("Exponential").Enabled = false; // not supported yet
1546
1547      // configure initialization of constants
1548      var constSy = (Constant)grammar.GetSymbol("Constant");
1549      // max and min are only relevant for initialization
1550      constSy.MaxValue = +1.0e-1; // small initial values for constant opt
1551      constSy.MinValue = -1.0e-1;
1552      constSy.MultiplicativeManipulatorSigma = 1.0; // allow large jumps for manipulation
1553      constSy.ManipulatorMu = 0.0;
1554      constSy.ManipulatorSigma = 1.0; // allow large jumps
1555
1556      // configure initialization of variables
1557      var varSy = (Variable)grammar.GetSymbol("Variable");
1558      // fix variable weights to 1.0
1559      varSy.WeightMu = 1.0;
1560      varSy.WeightSigma = 0.0;
1561      varSy.WeightManipulatorMu = 0.0;
1562      varSy.WeightManipulatorSigma = 0.0;
1563      varSy.MultiplicativeWeightManipulatorSigma = 0.0;
1564
1565      foreach (var f in FunctionSet) {
1566        grammar.GetSymbol(f.Value).Enabled = FunctionSet.ItemChecked(f);
1567      }
1568
1569      grammar.FinishedGrammarManipulation();
1570      return grammar;
1571      // // whenever ProblemData is changed we create a new grammar with the necessary symbols
1572      // var g = new SimpleSymbolicExpressionGrammar();
1573      // var unaryFunc = new string[] { "sin", "cos", "sqr" };
1574      // var binaryFunc = new string[] { "+", "-", "*", "%" };
1575      // foreach (var func in unaryFunc) {
1576      //   if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 1, 1);
1577      // }
1578      // foreach (var func in binaryFunc) {
1579      //   if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 2, 2);
1580      // }
1581      //
1582      // foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value.Value)))
1583      //   g.AddTerminalSymbol(variableName);
1584      //
1585      // // generate symbols for numeric parameters for which the value is optimized using AutoDiff
1586      // // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
1587      // var numericConstantsFactor = 2.0;
1588      // for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
1589      //   g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
1590      // }
1591      //
1592      // // generate symbols for latent variables
1593      // for (int i = 1; i <= NumberOfLatentVariables; i++) {
1594      //   g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
1595      // }
1596      //
1597      // return g;
1598    }
1599    #endregion
1600
1601
1602    #region Import & Export
1603    public void Load(IRegressionProblemData data) {
1604      Name = data.Name;
1605      Description = data.Description;
1606      ProblemData = data;
1607    }
1608
1609    public IRegressionProblemData Export() {
1610      return ProblemData;
1611    }
1612    #endregion
1613
1614
1615    // TODO: for integration we only need a part of the data that we need for optimization
1616
1617    public class OptimizationData {
1618      public readonly ISymbolicExpressionTree[] trees;
1619      public readonly string[] targetVariables;
1620      public readonly IRegressionProblemData problemData;
1621      public readonly double[][] targetValues;
1622      public readonly double[] inverseStandardDeviation;
1623      public readonly IntRange[] episodes;
1624      public readonly int numericIntegrationSteps;
1625      public readonly string[] latentVariables;
1626      public readonly string odeSolver;
1627      public readonly NodeValueLookup nodeValueLookup;
1628      public readonly int[] rows;
1629      internal readonly string[] variables;
1630
1631      public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, string[] inputVariables,
1632        IRegressionProblemData problemData,
1633        double[][] targetValues,
1634        IntRange[] episodes,
1635        int numericIntegrationSteps, string[] latentVariables, string odeSolver) {
1636        this.trees = trees;
1637        this.targetVariables = targetVars;
1638        this.problemData = problemData;
1639        this.targetValues = targetValues;
1640        this.variables = inputVariables;
1641        if (targetValues != null) {
1642          this.inverseStandardDeviation = new double[targetValues.Length];
1643          for (int i = 0; i < targetValues.Length; i++) {
1644            // calculate variance for each episode separately and calc the average
1645            var epStartIdx = 0;
1646            var stdevs = new List<double>();
1647            foreach (var ep in episodes) {
1648              var epValues = targetValues[i].Skip(epStartIdx).Take(ep.Size);
1649              stdevs.Add(epValues.StandardDeviation());
1650              epStartIdx += ep.Size;
1651            }
1652            inverseStandardDeviation[i] = 1.0 / stdevs.Average();
1653          }
1654        } else
1655          this.inverseStandardDeviation = Enumerable.Repeat(1.0, trees.Length).ToArray();
1656        this.episodes = episodes;
1657        this.numericIntegrationSteps = numericIntegrationSteps;
1658        this.latentVariables = latentVariables;
1659        this.odeSolver = odeSolver;
1660        this.nodeValueLookup = new NodeValueLookup(trees);
1661        this.rows = episodes.SelectMany(ep => Enumerable.Range(ep.Start, ep.Size)).ToArray();
1662      }
1663    }
1664
1665    public class NodeValueLookup {
1666      private readonly Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> node2val = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1667      private readonly Dictionary<string, List<ISymbolicExpressionTreeNode>> name2nodes = new Dictionary<string, List<ISymbolicExpressionTreeNode>>();
1668      private readonly ConstantTreeNode[] constantNodes;
1669      private readonly Vector[] constantGradientVectors;
1670
1671      // private readonly Dictionary<int, ISymbolicExpressionTreeNode> paramIdx2node = new Dictionary<int, ISymbolicExpressionTreeNode>();
1672
1673      public double NodeValue(ISymbolicExpressionTreeNode node) => node2val[node].Item1;
1674      public Vector NodeGradient(ISymbolicExpressionTreeNode node) => node2val[node].Item2;
1675
1676      public NodeValueLookup(ISymbolicExpressionTree[] trees) {
1677
1678        this.constantNodes = trees.SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>()).ToArray();
1679        constantGradientVectors = new Vector[constantNodes.Length];
1680        for (int paramIdx = 0; paramIdx < constantNodes.Length; paramIdx++) {
1681          constantGradientVectors[paramIdx] = Vector.CreateIndicator(length: constantNodes.Length, idx: paramIdx);
1682
1683          var node = constantNodes[paramIdx];
1684          node2val[node] = Tuple.Create(node.Value, constantGradientVectors[paramIdx]);
1685        }
1686
1687        foreach (var tree in trees) {
1688          foreach (var node in tree.IterateNodesPrefix().Where(IsVariableNode)) {
1689            var varName = GetVariableName(node);
1690            if (!name2nodes.TryGetValue(varName, out List<ISymbolicExpressionTreeNode> nodes)) {
1691              nodes = new List<ISymbolicExpressionTreeNode>();
1692              name2nodes.Add(varName, nodes);
1693            }
1694            nodes.Add(node);
1695            SetVariableValue(varName, 0.0);  // this value is updated in the prediction loop
1696          }
1697        }
1698      }
1699
1700      public int ParameterCount => constantNodes.Length;
1701
1702      public void SetVariableValue(string variableName, double val) {
1703        SetVariableValue(variableName, val, Vector.Zero);
1704      }
1705      public Tuple<double, Vector> GetVariableValue(string variableName) {
1706        return node2val[name2nodes[variableName].First()];
1707      }
1708      public void SetVariableValue(string variableName, double val, Vector dVal) {
1709        if (name2nodes.TryGetValue(variableName, out List<ISymbolicExpressionTreeNode> nodes)) {
1710          nodes.ForEach(n => node2val[n] = Tuple.Create(val, dVal));
1711        } else {
1712          var fakeNode = new VariableTreeNode(new Variable());
1713          fakeNode.Weight = 1.0;
1714          fakeNode.VariableName = variableName;
1715          var newNodeList = new List<ISymbolicExpressionTreeNode>();
1716          newNodeList.Add(fakeNode);
1717          name2nodes.Add(variableName, newNodeList);
1718          node2val[fakeNode] = Tuple.Create(val, dVal);
1719        }
1720      }
1721
1722      internal void UpdateParamValues(double[] x) {
1723        for (int i = 0; i < x.Length; i++) {
1724          constantNodes[i].Value = x[i];
1725          node2val[constantNodes[i]] = Tuple.Create(x[i], constantGradientVectors[i]);
1726        }
1727      }
1728    }
1729  }
1730}
Note: See TracBrowser for help on using the repository browser.