Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16999

Last change on this file since 16999 was 16999, checked in by gkronber, 5 years ago

#2925: Added optimization of weights for variables and added an integration method which uses CVODES to integrate over the whole episode (without input variables)

File size: 107.4 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Globalization;
26using System.Linq;
27using HeuristicLab.Analysis;
28using HeuristicLab.Collections;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
33using HeuristicLab.Optimization;
34using HeuristicLab.Parameters;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Problems.DataAnalysis.Symbolic;
37using HeuristicLab.Problems.Instances;
38using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
39using HEAL.Attic;
40using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
41using System.Runtime.InteropServices;
42
43namespace HeuristicLab.Problems.DynamicalSystemsModelling {
44  [Item("Dynamical Systems Modelling Problem", "TODO")]
45  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
46  [StorableType("065C6A61-773A-42C9-9DE5-61A5D1D823EB")]
47  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<Problem> {
48    #region parameter names
49    private const string ProblemDataParameterName = "Data";
50    private const string TargetVariablesParameterName = "Target variables";
51    private const string FunctionSetParameterName = "Function set";
52    private const string MaximumLengthParameterName = "Size limit";
53    private const string MaximumPretuningParameterOptimizationIterationsParameterName = "Max. pre-tuning parameter optimization iterations";
54    private const string MaximumOdeParameterOptimizationIterationsParameterName = "Max. ODE parameter optimization iterations";
55    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
56    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
57    private const string TrainingEpisodesParameterName = "Training episodes";
58    private const string TestEpisodesParameterName = "Test episodes";
59    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
60    private const string OdeSolverParameterName = "ODE Solver";
61    #endregion
62
63    #region Parameter Properties
64    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
65
66    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
67      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
68    }
69    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
70      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
71    }
72    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
73      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
74    }
75    public IFixedValueParameter<IntValue> MaximumLengthParameter {
76      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
77    }
78
79    public IFixedValueParameter<IntValue> MaximumPretuningParameterOptimizationIterationsParameter {
80      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumPretuningParameterOptimizationIterationsParameterName]; }
81    }
82    public IFixedValueParameter<IntValue> MaximumOdeParameterOptimizationIterationsParameter {
83      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumOdeParameterOptimizationIterationsParameterName]; }
84    }
85    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
86      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
87    }
88    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
89      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
90    }
91    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
92      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
93    }
94    public IValueParameter<ItemList<IntRange>> TestEpisodesParameter {
95      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TestEpisodesParameterName]; }
96    }
97    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
98      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
99    }
100    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
101      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
102    }
103    public IFixedValueParameter<DoubleValue> PretuningErrorWeight {
104      get { return (IFixedValueParameter<DoubleValue>)Parameters["Pretuning NMSE weight"]; }
105    }
106    public IFixedValueParameter<DoubleValue> OdeErrorWeight {
107      get { return (IFixedValueParameter<DoubleValue>)Parameters["ODE NMSE weight"]; }
108    }
109    public IFixedValueParameter<DoubleValue> NumericDifferencesSmoothingParameter {
110      get { return (IFixedValueParameter<DoubleValue>)Parameters["Numeric differences smoothing"]; }
111    }
112    #endregion
113
114    #region Properties
115    public IRegressionProblemData ProblemData {
116      get { return ProblemDataParameter.Value; }
117      set { ProblemDataParameter.Value = value; }
118    }
119    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
120
121    public ReadOnlyCheckedItemList<StringValue> TargetVariables {
122      get { return TargetVariablesParameter.Value; }
123    }
124
125    public ReadOnlyCheckedItemList<StringValue> FunctionSet {
126      get { return FunctionSetParameter.Value; }
127    }
128
129    public int MaximumLength {
130      get { return MaximumLengthParameter.Value.Value; }
131    }
132    public int MaximumPretuningParameterOptimizationIterations {
133      get { return MaximumPretuningParameterOptimizationIterationsParameter.Value.Value; }
134    }
135    public int MaximumOdeParameterOptimizationIterations {
136      get { return MaximumOdeParameterOptimizationIterationsParameter.Value.Value; }
137    }
138    public int NumberOfLatentVariables {
139      get { return NumberOfLatentVariablesParameter.Value.Value; }
140    }
141    public int NumericIntegrationSteps {
142      get { return NumericIntegrationStepsParameter.Value.Value; }
143    }
144    public IList<IntRange> TrainingEpisodes {
145      get { return TrainingEpisodesParameter.Value; }
146    }
147    public IList<IntRange> TestEpisodes {
148      get { return TestEpisodesParameter.Value; }
149    }
150    public bool OptimizeParametersForEpisodes {
151      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
152    }
153    public double NumericDifferencesSmoothing {
154      get { return NumericDifferencesSmoothingParameter.Value.Value; }
155    }
156
157
158    public string OdeSolver {
159      get { return OdeSolverParameter.Value.Value; }
160      set {
161        var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
162        if (matchingValue == null) throw new ArgumentOutOfRangeException();
163        else OdeSolverParameter.Value = matchingValue;
164      }
165    }
166
167    #endregion
168
169    public event EventHandler ProblemDataChanged;
170
171    public override bool Maximization {
172      get { return false; } // we minimize NMSE
173    }
174
175    #region item cloning and persistence
176    // persistence
177    [StorableConstructor]
178    private Problem(StorableConstructorFlag _) : base(_) { }
179    [StorableHook(HookType.AfterDeserialization)]
180    private void AfterDeserialization() {
181      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
182        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
183      }
184      int iters = 100;
185      if (Parameters.ContainsKey("Max. parameter optimization iterations")) {
186        iters = ((IFixedValueParameter<IntValue>)Parameters["Max. parameter optimization iterations"]).Value.Value;
187      }
188      if (!Parameters.ContainsKey(MaximumPretuningParameterOptimizationIterationsParameterName)) {
189        Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters)));
190      }
191      if (!Parameters.ContainsKey(MaximumOdeParameterOptimizationIterationsParameterName)) {
192        Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters)));
193      }
194
195      if (!Parameters.ContainsKey("Pretuning NMSE weight"))
196        Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
197      if (!Parameters.ContainsKey("ODE NMSE weight"))
198        Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
199
200
201      RegisterEventHandlers();
202    }
203
204    // cloning
205    private Problem(Problem original, Cloner cloner)
206      : base(original, cloner) {
207      RegisterEventHandlers();
208    }
209    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
210    #endregion
211
212    public Problem()
213      : base() {
214      var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
215      var functions = CreateFunctionSet();
216      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
217      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
218      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
219      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
220      Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
221      Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
222      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
223      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
224      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
225      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TestEpisodesParameterName, "A list of ranges that should be used for validation, each range represents an independent episode. This overrides the TestSet parameter in ProblemData.", new ItemList<IntRange>()));
226      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
227      Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
228      Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
229      Parameters.Add(new FixedValueParameter<DoubleValue>("Numeric differences smoothing", "Determines the amount of smoothing for the numeric differences which are calculated for pre-tuning. Values from -8 to 8 are reasonable. Use very low value if the data contains no noise. Default: 2.", new DoubleValue(2.0)));
230
231      var solversStr = new string[] { "HeuristicLab", "CVODES", "CVODES (full)" };
232      var solvers = new ItemSet<StringValue>(
233        solversStr.Select(s => new StringValue(s).AsReadOnly())
234        );
235      Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
236
237      RegisterEventHandlers();
238      InitAllParameters();
239
240      // TODO: optimization of starting values for latent variables in CVODES solver
241      // TODO: allow to specify the name for the time variable in the dataset and allow variable step-sizes
242    }
243
244    public override double Evaluate(Individual individual, IRandom random) {
245      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
246
247      var problemData = ProblemData;
248      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
249      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
250      if (latentVariables.Any()) throw new NotSupportedException("latent variables are not supported"); // not sure if everything still works in combination with latent variables
251      if (OptimizeParametersForEpisodes) {
252        throw new NotImplementedException();
253        int eIdx = 0;
254        double totalNMSE = 0.0;
255        int totalSize = 0;
256        foreach (var episode in TrainingEpisodes) {
257          // double[] optTheta;
258          double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations);
259          // individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
260          eIdx++;
261          totalNMSE += nmse * episode.Size;
262          totalSize += episode.Size;
263        }
264        return totalNMSE / totalSize;
265      } else {
266        // when no training episodes are specified then we implicitly use the training parition from the problemData
267        var trainingEpisodes = TrainingEpisodes;
268        if (!trainingEpisodes.Any()) {
269          trainingEpisodes = new List<IntRange>();
270          trainingEpisodes.Add((IntRange)ProblemData.TrainingPartition.Clone());
271        }
272        double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, trainingEpisodes, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations,
273          PretuningErrorWeight.Value.Value, OdeErrorWeight.Value.Value, NumericDifferencesSmoothing);
274        return nmse;
275      }
276    }
277
278    public static double OptimizeForEpisodes(
279      ISymbolicExpressionTree[] trees,
280      IRegressionProblemData problemData,
281      string[] targetVars,
282      string[] latentVariables,
283      IRandom random,
284      IEnumerable<IntRange> episodes,
285      int maxPretuningParameterOptIterations,
286      int numericIntegrationSteps,
287      string odeSolver,
288      int maxOdeParameterOptIterations,
289      double pretuningErrorWeight = 0.5,
290      double odeErrorWeight = 0.5,
291      double numericDifferencesSmoothing = 2
292      ) {
293
294
295
296      // extract constants from trees (without trees for latent variables)
297      var targetVariableTrees = trees.Take(targetVars.Length).ToArray();
298      var latentVariableTrees = trees.Skip(targetVars.Length).ToArray();
299      // var constantNodes = targetVariableTrees.Select(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().ToArray()).ToArray();
300      // var initialTheta = constantNodes.Select(nodes => nodes.Select(n => n.Value).ToArray()).ToArray();
301      var constantNodes = targetVariableTrees.Select(
302        t => t.IterateNodesPrefix()
303        .Where(n => n.SubtreeCount == 0) // select leaves
304        .ToArray()).ToArray();
305      var initialTheta = constantNodes.Select(
306        a => a.Select(
307          n => {
308            if (n is VariableTreeNode varTreeNode) {
309              return varTreeNode.Weight;
310            } else if (n is ConstantTreeNode constTreeNode) {
311              return constTreeNode.Value;
312            } else throw new InvalidProgramException();
313          }).ToArray()).ToArray();
314
315      // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t)
316      double[] pretunedParameters = initialTheta.SelectMany(v => v).ToArray();
317      double nmse = 0;
318      if (pretuningErrorWeight > 0 || maxPretuningParameterOptIterations > -1) {
319        nmse += pretuningErrorWeight * PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes,
320          maxPretuningParameterOptIterations, numericDifferencesSmoothing,
321          initialTheta, out pretunedParameters);
322      }
323
324      // extend parameter vector to include parameters for latent variable trees
325      pretunedParameters = pretunedParameters
326        .Concat(latentVariableTrees
327        .SelectMany(t => t.IterateNodesPrefix()
328        .Where(n => n.SubtreeCount == 0)
329        .Select(n => {
330          if (n is VariableTreeNode varTreeNode) {
331            return varTreeNode.Weight;
332          } else if (n is ConstantTreeNode constTreeNode) {
333            return constTreeNode.Value;
334          } else throw new InvalidProgramException();
335        })))
336        .ToArray();
337
338      double[] optTheta = pretunedParameters;
339      if (odeErrorWeight > 0 || maxOdeParameterOptIterations > -1) {
340        // optimize parameters using integration of f(x,y) to calculate y(t)
341        nmse += odeErrorWeight * OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxOdeParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,
342          out optTheta);
343      }
344      // var optTheta = pretunedParameters;
345
346      if (double.IsNaN(nmse) ||
347        double.IsInfinity(nmse) ||
348        nmse > 100 * trees.Length * episodes.Sum(ep => ep.Size))
349        return 100 * trees.Length * episodes.Sum(ep => ep.Size);
350
351      // update tree nodes with optimized values
352      var paramIdx = 0;
353      for (var treeIdx = 0; treeIdx < constantNodes.Length; treeIdx++) {
354        for (int i = 0; i < constantNodes[treeIdx].Length; i++) {
355          if (constantNodes[treeIdx][i] is VariableTreeNode varTreeNode) {
356            varTreeNode.Weight = optTheta[paramIdx++];
357          } else if (constantNodes[treeIdx][i] is ConstantTreeNode constTreeNode) {
358            constTreeNode.Value = optTheta[paramIdx++];
359          }
360        }
361      }
362      return nmse;
363    }
364
365    private static double PreTuneParameters(
366      ISymbolicExpressionTree[] trees,
367      IRegressionProblemData problemData,
368      string[] targetVars,
369      string[] latentVariables,
370      IRandom random,
371      IEnumerable<IntRange> episodes,
372      int maxParameterOptIterations,
373      double numericDifferencesSmoothing, // for smoothing of numeric differences
374      double[][] initialTheta,
375      out double[] optTheta) {
376      var thetas = new List<double>();
377      double nmse = 0.0;
378      var maxTreeNmse = 100 * episodes.Sum(ep => ep.Size);
379
380      var targetTrees = trees.Take(targetVars.Length).ToArray();
381      var latentTrees = trees.Take(latentVariables.Length).ToArray();
382
383      // first calculate values of latent variables by integration
384      if (latentVariables.Length > 0) {
385        var inputVariables = targetVars.Concat(latentTrees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))).Except(latentVariables).Distinct();
386        var myState = new OptimizationData(latentTrees, targetVars, inputVariables.ToArray(), problemData, null, episodes.ToArray(), 10, latentVariables, "NONE");
387
388        var fi = new double[myState.rows.Length * targetVars.Length];
389        var jac = new double[myState.rows.Length * targetVars.Length, myState.nodeValueLookup.ParameterCount];
390        var latentValues = new double[myState.rows.Length, latentVariables.Length];
391        Integrate(myState, fi, jac, latentValues);
392
393        // add integrated latent variables to dataset
394        var modifiedDataset = ((Dataset)problemData.Dataset).ToModifiable();
395        foreach (var variable in latentVariables) {
396          modifiedDataset.AddVariable(variable, Enumerable.Repeat(0.0, modifiedDataset.Rows).ToList()); // empty column
397        }
398        int predIdx = 0;
399        foreach (var ep in episodes) {
400          for (int r = ep.Start; r < ep.End; r++) {
401            for (int latVarIdx = 0; latVarIdx < latentVariables.Length; latVarIdx++) {
402              modifiedDataset.SetVariableValue(latentValues[predIdx, latVarIdx], latentVariables[latVarIdx], r);
403            }
404            predIdx++;
405          }
406        }
407
408        problemData = new RegressionProblemData(modifiedDataset, problemData.AllowedInputVariables, problemData.TargetVariable);
409      }
410      // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
411      for (int treeIdx = 0; treeIdx < targetTrees.Length; treeIdx++) {
412        var t = targetTrees[treeIdx];
413
414        // check if we need to change the problem data
415        var targetValuesDiff = new List<double>();
416
417        // TODO: smooth only once
418        foreach (var ep in episodes) {
419          var episodeRows = Enumerable.Range(ep.Start, ep.Size);
420          var targetValues = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], episodeRows).ToArray();
421          targetValuesDiff.AddRange(CalculateDifferences(targetValues, numericDifferencesSmoothing));
422        }
423        var adjustedEpisodes = episodes.Select(ep => new IntRange(ep.Start, ep.End));
424
425        // data for input variables is assumed to be known
426        // input variables in pretuning are all target variables and all variable names that occur in the tree
427        var inputVariables = targetVars.Concat(t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName)).Distinct();
428
429        var myState = new OptimizationData(new[] { t },
430          targetVars,
431          inputVariables.ToArray(),
432          problemData, new[] { targetValuesDiff.ToArray() }, adjustedEpisodes.ToArray(), -99, latentVariables, string.Empty); // TODO
433        var paramCount = myState.nodeValueLookup.ParameterCount;
434
435        optTheta = initialTheta[treeIdx];
436        if (initialTheta[treeIdx].Length > 0 && maxParameterOptIterations > -1) {
437          try {
438            alglib.minlmstate state;
439            alglib.minlmreport report;
440            var p = new double[initialTheta[treeIdx].Length];
441            var lowerBounds = Enumerable.Repeat(-1000.0, p.Length).ToArray();
442            var upperBounds = Enumerable.Repeat(1000.0, p.Length).ToArray();
443            Array.Copy(initialTheta[treeIdx], p, p.Length);
444            alglib.minlmcreatevj(targetValuesDiff.Count, p, out state);
445            alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
446            alglib.minlmsetbc(state, lowerBounds, upperBounds);
447#if DEBUG
448            //alglib.minlmsetgradientcheck(state, 1.0e-7);
449#endif
450            alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
451
452            alglib.minlmresults(state, out optTheta, out report);
453            if (report.terminationtype < 0) {
454#if DEBUG
455              if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
456#endif
457              optTheta = initialTheta[treeIdx];
458            }
459          } catch (alglib.alglibexception) {
460            optTheta = initialTheta[treeIdx];
461          }
462        }
463        var tree_nmse = EvaluateMSE(optTheta, myState);
464        if (double.IsNaN(tree_nmse) || double.IsInfinity(tree_nmse) || tree_nmse > maxTreeNmse) {
465          nmse += maxTreeNmse;
466          thetas.AddRange(initialTheta[treeIdx]);
467        } else {
468          nmse += tree_nmse;
469          thetas.AddRange(optTheta);
470        }
471      } // foreach tree
472      optTheta = thetas.ToArray();
473
474      return nmse;
475    }
476
477
478
479    // similar to above but this time we integrate and optimize all parameters for all targets concurrently
480    private static double OptimizeParameters(ISymbolicExpressionTree[] trees, IRegressionProblemData problemData, string[] targetVars, string[] latentVariables,
481      IEnumerable<IntRange> episodes, int maxParameterOptIterations, double[] initialTheta, int numericIntegrationSteps, string odeSolver, out double[] optTheta) {
482      var rowsForDataExtraction = episodes.SelectMany(e => Enumerable.Range(e.Start, e.Size)).ToArray();
483      var targetValues = new double[targetVars.Length][];
484      for (int treeIdx = 0; treeIdx < targetVars.Length; treeIdx++) {
485        var t = trees[treeIdx];
486
487        targetValues[treeIdx] = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], rowsForDataExtraction).ToArray();
488      }
489
490      // data for input variables is assumed to be known
491      // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
492      var inputVariables = trees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
493        .Except(targetVars)
494        .Except(latentVariables)
495        .Distinct();
496
497      var myState = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver);
498      optTheta = initialTheta;
499
500      if (initialTheta.Length > 0 && maxParameterOptIterations > -1) {
501        var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray();
502        var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray();
503        try {
504          alglib.minlmstate state;
505          alglib.minlmreport report;
506          alglib.minlmcreatevj(rowsForDataExtraction.Length * trees.Length, initialTheta, out state);
507          alglib.minlmsetbc(state, lowerBounds, upperBounds);
508          alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
509#if DEBUG         
510          //alglib.minlmsetgradientcheck(state, 1.0e-7);
511#endif
512          alglib.minlmoptimize(state, IntegrateAndEvaluateObjectiveVector, IntegrateAndEvaluateObjectiveVectorAndJacobian, null, myState);
513
514          alglib.minlmresults(state, out optTheta, out report);
515
516          if (report.terminationtype < 0) {
517#if DEBUG
518            if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
519#endif            // there was a problem: reset theta and evaluate for inital values
520            optTheta = initialTheta;
521          }
522        } catch (alglib.alglibexception) {
523          optTheta = initialTheta;
524        }
525      }
526      var nmse = EvaluateIntegratedMSE(optTheta, myState);
527      var maxNmse = 100 * targetValues.Length * rowsForDataExtraction.Length;
528      if (double.IsNaN(nmse) || double.IsInfinity(nmse) || nmse > maxNmse) nmse = maxNmse;
529      return nmse;
530    }
531
532
533    // helper
534    public static double EvaluateMSE(double[] x, OptimizationData optimizationData) {
535      var fi = new double[optimizationData.rows.Count()];
536      EvaluateObjectiveVector(x, fi, optimizationData);
537      return fi.Sum(fii => fii * fii) / fi.Length;
538    }
539    public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
540    public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
541      var rows = optimizationData.rows;
542      var problemData = optimizationData.problemData;
543      var nodeValueLookup = optimizationData.nodeValueLookup;
544      var ds = problemData.Dataset;
545      var variables = optimizationData.variables;
546
547      nodeValueLookup.UpdateParamValues(x);
548
549      int outputIdx = 0;
550      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
551        // update variable values
552        foreach (var variable in variables) {
553          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
554          // if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
555          //   nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
556          // } else {
557          nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
558          // }
559        }
560        // interpret all trees
561        for (int treeIdx = 0; treeIdx < optimizationData.trees.Length; treeIdx++) {
562          var tree = optimizationData.trees[treeIdx];
563          var pred = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup);
564          var y = optimizationData.targetValues[treeIdx][trainIdx];
565          fi[outputIdx++] = (y - pred) * optimizationData.inverseStandardDeviation[treeIdx];
566        }
567      }
568    }
569
570    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
571    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
572      // extract variable values from dataset
573      var variableValues = new Dictionary<string, Tuple<double, Vector>>();
574      var problemData = optimizationData.problemData;
575      var ds = problemData.Dataset;
576      var rows = optimizationData.rows;
577      var variables = optimizationData.variables;
578
579      var nodeValueLookup = optimizationData.nodeValueLookup;
580      nodeValueLookup.UpdateParamValues(x);
581
582      int termIdx = 0;
583
584      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
585        // update variable values
586        foreach (var variable in variables) {
587          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
588          // if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
589          //   nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
590          // } else {
591          nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
592          // }
593        }
594
595        var calculatedVariables = optimizationData.targetVariables;
596
597        var trees = optimizationData.trees;
598        for (int i = 0; i < trees.Length; i++) {
599          var tree = trees[i];
600          var targetVarName = calculatedVariables[i];
601
602          double f; Vector g;
603          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup, out f, out g);
604
605          var y = optimizationData.targetValues[i][trainIdx];
606          fi[termIdx] = (y - f) * optimizationData.inverseStandardDeviation[i]; // scale of NMSE
607          if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[termIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i];
608
609          termIdx++;
610        }
611      }
612
613    }
614
615    // helper
616    public static double EvaluateIntegratedMSE(double[] x, OptimizationData optimizationData) {
617      var fi = new double[optimizationData.rows.Count() * optimizationData.targetVariables.Length];
618      IntegrateAndEvaluateObjectiveVector(x, fi, optimizationData);
619      return fi.Sum(fii => fii * fii) / fi.Length;
620    }
621    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { IntegrateAndEvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
622    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
623      IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData);
624    }
625
626    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
627    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
628      var rows = optimizationData.rows.ToArray();
629      var problemData = optimizationData.problemData;
630      var nodeValueLookup = optimizationData.nodeValueLookup;
631      var ds = problemData.Dataset;
632      int outputIdx = 0;
633
634      nodeValueLookup.UpdateParamValues(x);
635
636      Integrate(optimizationData, fi, jac, null);
637      var trees = optimizationData.trees;
638
639      // update result with error
640      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
641        for (int i = 0; i < optimizationData.targetVariables.Length; i++) {
642          var tree = trees[i];
643          var y = optimizationData.targetValues[i][trainIdx];
644          fi[outputIdx] = (y - fi[outputIdx]) * optimizationData.inverseStandardDeviation[i];  // scale for normalized squared error
645          if (jac != null) for (int j = 0; j < x.Length; j++) jac[outputIdx, j] = -jac[outputIdx, j] * optimizationData.inverseStandardDeviation[i];
646          outputIdx++;
647        }
648      }
649    }
650
651    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
652      base.Analyze(individuals, qualities, results, random);
653
654      if (!results.ContainsKey("Prediction (training)")) {
655        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
656      }
657      if (!results.ContainsKey("Prediction (test)")) {
658        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
659      }
660      if (!results.ContainsKey("Models")) {
661        results.Add(new Result("Models", typeof(VariableCollection)));
662      }
663      if (!results.ContainsKey("SNMSE")) {
664        results.Add(new Result("SNMSE", typeof(DoubleValue)));
665      }
666      if (!results.ContainsKey("SNMSE values")) {
667        var dt = new DataTable("SNMSE values");
668        dt.Rows.Add(new DataRow("ODE SNMSE"));
669        dt.Rows.Add(new DataRow("Fitness"));
670        results.Add(new Result("SNMSE values", dt));
671      }
672      if (!results.ContainsKey("Solution")) {
673        results.Add(new Result("Solution", typeof(Solution)));
674      }
675
676
677      // when no training episodes are specified then we implicitly use the training parition from the problemData
678      var trainingEpisodes = TrainingEpisodes;
679      if (!trainingEpisodes.Any()) {
680        trainingEpisodes = new List<IntRange>();
681        trainingEpisodes.Add((IntRange)ProblemData.TrainingPartition.Clone());
682      }
683
684      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
685      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value)
686        .OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
687
688      results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
689
690      var problemData = ProblemData;
691      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
692      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
693
694      var trainingList = new ItemList<DataTable>();
695
696      if (OptimizeParametersForEpisodes) {
697        throw new NotSupportedException();
698        var eIdx = 0;
699        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
700        foreach (var episode in TrainingEpisodes) {
701          var episodes = new[] { episode };
702          var optimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver);
703          var trainingPrediction = Integrate(optimizationData).ToArray();
704          trainingPredictions.Add(trainingPrediction);
705          eIdx++;
706        }
707
708        // only for target values
709        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
710        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
711          var targetVar = targetVars[colIdx];
712          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
713          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
714          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
715          trainingDataTable.Rows.Add(actualValuesRow);
716          trainingDataTable.Rows.Add(predictedValuesRow);
717          trainingList.Add(trainingDataTable);
718        }
719        results["Prediction (training)"].Value = trainingList.AsReadOnly();
720
721
722        var models = new VariableCollection();
723
724        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
725          var targetVarName = tup.Item1;
726          var tree = tup.Item2;
727
728          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
729          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
730          models.Add(origTreeVar);
731        }
732        results["Models"].Value = models;
733      } else {
734        // data for input variables is assumed to be known
735        // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
736        var inputVariables = trees
737          .SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
738          .Except(targetVars)
739          .Except(latentVariables)
740          .Distinct();
741
742        var optimizationData = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, trainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);
743        var numParams = optimizationData.nodeValueLookup.ParameterCount;
744
745        var fi = new double[optimizationData.rows.Length * targetVars.Length];
746        var jac = new double[optimizationData.rows.Length * targetVars.Length, numParams];
747        var latentValues = new double[optimizationData.rows.Length, latentVariables.Length];
748        Integrate(optimizationData, fi, jac, latentValues);
749
750
751        // for target values and latent variables
752        var trainingRows = optimizationData.rows;
753        double trainingSNMSE = 0.0;
754        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
755          // is target variable
756          if (colIdx < targetVars.Length) {
757            var targetVar = targetVars[colIdx];
758            var trainingDataTable = new DataTable(targetVar + " prediction (training)");
759            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
760            var idx = Enumerable.Range(0, trainingRows.Length).Select(i => i * targetVars.Length + colIdx);
761            var pred = idx.Select(i => fi[i]);
762            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, pred.ToArray());
763            trainingDataTable.Rows.Add(actualValuesRow);
764            trainingDataTable.Rows.Add(predictedValuesRow);
765
766            // again calculate the integrated error (regardless how fitness is determined)
767            trainingSNMSE += actualValuesRow.Values.Zip(predictedValuesRow.Values, (a, p) => Math.Pow(a - p, 2)).Average() / actualValuesRow.Values.Variance() / targetVars.Length;
768
769            for (int paramIdx = 0; paramIdx < numParams; paramIdx++) {
770              var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", idx.Select(i => jac[i, paramIdx]).ToArray());
771              paramSensitivityRow.VisualProperties.SecondYAxis = true;
772              trainingDataTable.Rows.Add(paramSensitivityRow);
773            }
774            trainingList.Add(trainingDataTable);
775          } else {
776            var latentVar = latentVariables[colIdx - targetVars.Length];
777            var trainingDataTable = new DataTable(latentVar + " prediction (training)");
778            var idx = Enumerable.Range(0, trainingRows.Length);
779            var pred = idx.Select(i => latentValues[i, colIdx - targetVars.Length]);
780            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, pred.ToArray());
781            var emptyRow = new DataRow(latentVar);
782            trainingDataTable.Rows.Add(emptyRow);
783            trainingDataTable.Rows.Add(predictedValuesRow);
784            trainingList.Add(trainingDataTable);
785          }
786        }
787
788        results.AddOrUpdateResult("ODE SNMSE", new DoubleValue(trainingSNMSE));
789        var odeSNMSETable = (DataTable)results["SNMSE values"].Value;
790        odeSNMSETable.Rows["ODE SNMSE"].Values.Add(trainingSNMSE);
791        odeSNMSETable.Rows["Fitness"].Values.Add(bestIndividualAndQuality.Item2);
792
793        // var errorTable = new DataTable("Squared error and gradient");
794        // var seRow = new DataRow("Squared error");
795        // var gradientRows = Enumerable.Range(0, numParams).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
796        // errorTable.Rows.Add(seRow);
797        // foreach (var gRow in gradientRows) {
798        //   gRow.VisualProperties.SecondYAxis = true;
799        //   errorTable.Rows.Add(gRow);
800        // }
801        // var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
802        // int r = 0;
803
804        // foreach (var y_pred in fi) {
805        //   // calculate objective function gradient
806        //   double f_i = 0.0;
807        //   Vector g_i = Vector.CreateNew(new double[numParams]);
808        //   for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
809        //     var y_pred_f = y_pred[colIdx].Item1;
810        //     var y = targetValues[colIdx][r];
811        //
812        //     var res = (y - y_pred_f) * optimizationData.inverseStandardDeviation[colIdx];
813        //     var ressq = res * res;
814        //     f_i += ressq;
815        //     g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res));
816        //   }
817        //   seRow.Values.Add(f_i);
818        //   for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
819        //   r++;
820        // }
821        // results["Squared error and gradient"].Value = errorTable;
822
823        // only if there is a non-empty test partition
824        if (ProblemData.TestIndices.Any()) {
825          // TODO: DRY for training and test
826
827          var testList = new ItemList<DataTable>();
828          var testRows = ProblemData.TestIndices.ToArray();
829          var testOptimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver);
830          var testPrediction = Integrate(testOptimizationData).ToArray();
831
832          for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
833            // is target variable
834            if (colIdx < targetVars.Length) {
835              var targetVar = targetVars[colIdx];
836              var testDataTable = new DataTable(targetVar + " prediction (test)");
837              var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
838              var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
839              testDataTable.Rows.Add(actualValuesRow);
840              testDataTable.Rows.Add(predictedValuesRow);
841              testList.Add(testDataTable);
842
843            } else {
844              // var latentVar = latentVariables[colIdx - targetVars.Length];
845              // var testDataTable = new DataTable(latentVar + " prediction (test)");
846              // var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
847              // var emptyRow = new DataRow(latentVar);
848              // testDataTable.Rows.Add(emptyRow);
849              // testDataTable.Rows.Add(predictedValuesRow);
850              // testList.Add(testDataTable);
851            }
852          }
853
854          results["Prediction (training)"].Value = trainingList.AsReadOnly();
855          results["Prediction (test)"].Value = testList.AsReadOnly();
856
857        }
858
859        #region simplification of models
860        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
861        var models = new VariableCollection();    // to store target var names and original version of tree
862
863        var clonedTrees = new List<ISymbolicExpressionTree>();
864        for (int idx = 0; idx < trees.Length; idx++) {
865          clonedTrees.Add((ISymbolicExpressionTree)trees[idx].Clone());
866        }
867        var ds = problemData.Dataset;
868        var newProblemData = new RegressionProblemData((IDataset)ds.Clone(), problemData.AllowedInputVariables, problemData.TargetVariable);
869        results["Solution"].Value = new Solution(clonedTrees.ToArray(),
870                   // optTheta,
871                   newProblemData,
872                   targetVars,
873                   latentVariables,
874                   trainingEpisodes,
875                   OdeSolver,
876                   NumericIntegrationSteps);
877
878
879        for (int idx = 0; idx < trees.Length; idx++) {
880          var varName = string.Empty;
881          if (idx < targetVars.Length) {
882            varName = targetVars[idx];
883          } else {
884            varName = latentVariables[idx - targetVars.Length];
885          }
886          var tree = trees[idx];
887
888          var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
889          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
890          models.Add(origTreeVar);
891          var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
892          simplifiedTreeVar.Value = TreeSimplifier.Simplify(tree);
893          models.Add(simplifiedTreeVar);
894        }
895
896        results["Models"].Value = models;
897        #endregion
898
899        #region produce classical solutions to allow visualization with PDP
900        for (int treeIdx = 0; treeIdx < targetVars.Length; treeIdx++) {
901          var t = (ISymbolicExpressionTree)trees[treeIdx].Clone();
902          var name = targetVars.Concat(latentVariables).ElementAt(treeIdx); // whatever
903          var model = new SymbolicRegressionModel(name + "_diff", t, new SymbolicDataAnalysisExpressionTreeLinearInterpreter());
904          var solutionDataset = ((Dataset)problemData.Dataset).ToModifiable();
905          solutionDataset.Name = ((Dataset)problemData.Dataset).Name;
906          solutionDataset.Description = ((Dataset)problemData.Dataset).Description;
907
908          var absValues = solutionDataset.GetDoubleValues(name).ToArray();
909
910          var diffValues = new double[absValues.Length];
911          foreach (var ep in TrainingEpisodes.Concat(TestEpisodes)) {
912            var y = solutionDataset.GetDoubleValues(name, Enumerable.Range(ep.Start, ep.End - ep.Start)).ToArray();
913            var yd = CalculateDifferences(y, NumericDifferencesSmoothing).ToArray();
914            for (int r = ep.Start; r < ep.End; r++) {
915              diffValues[r] = yd[r - ep.Start];
916            }
917          }
918
919          solutionDataset.AddVariable(name + "_diff", diffValues);
920          var solutionProblemData = new RegressionProblemData(solutionDataset, problemData.AllowedInputVariables, name + "_diff");
921          solutionProblemData.Name = problemData.Name;
922          solutionProblemData.Description = problemData.Description;
923
924          solutionProblemData.TrainingPartition.Start = TrainingEpisodes.Select(ep => ep.Start).Min();
925          solutionProblemData.TrainingPartition.End = TrainingEpisodes.Select(ep => ep.End).Max(); // assumes training episodes are sequential without gaps
926          if (TestEpisodes.Any()) {
927            solutionProblemData.TestPartition.Start = TestEpisodes.Select(ep => ep.Start).Min();
928            solutionProblemData.TestPartition.End = TestEpisodes.Select(ep => ep.End).Max();
929          } else {
930            solutionProblemData.TestPartition.Start = problemData.TestPartition.Start;
931            solutionProblemData.TestPartition.End = problemData.TestPartition.End;
932          }
933          var solution = model.CreateRegressionSolution(solutionProblemData);
934          results.AddOrUpdateResult("Solution " + name, solution);
935        }
936        #endregion
937      }
938    }
939
940    #region interpretation
941
942    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
943    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
944
945    // a comparison of three potential calculation methods for the gradient is given in:
946    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
947    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
948    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
949
950    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
951
952    public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData) {
953      var nTargets = optimizationData.targetVariables.Length;
954      var n = optimizationData.rows.Length * optimizationData.targetVariables.Length;
955      var d = optimizationData.nodeValueLookup.ParameterCount;
956      double[] fi = new double[n];
957      double[,] jac = new double[n, d];
958      Integrate(optimizationData, fi, jac, null);
959      for (int i = 0; i < optimizationData.rows.Length; i++) {
960        var res = new Tuple<double, Vector>[nTargets];
961        for (int j = 0; j < nTargets; j++) {
962          res[j] = Tuple.Create(fi[i * nTargets + j], Vector.CreateFromMatrixRow(jac, i * nTargets + j));
963        }
964        yield return res;
965      }
966    }
967
968    public static void Integrate(OptimizationData optimizationData, double[] fi, double[,] jac, double[,] latentValues) {
969      var trees = optimizationData.trees;
970      var dataset = optimizationData.problemData.Dataset;
971      var inputVariables = optimizationData.variables;
972      var targetVariables = optimizationData.targetVariables;
973      var latentVariables = optimizationData.latentVariables;
974      var episodes = optimizationData.episodes;
975      var odeSolver = optimizationData.odeSolver;
976      var numericIntegrationSteps = optimizationData.numericIntegrationSteps;
977      var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
978
979
980
981      var nodeValues = optimizationData.nodeValueLookup;
982
983      // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
984      var outputRowIdx = 0;
985      var episodeIdx = 0;
986      foreach (var episode in optimizationData.episodes) {
987        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start).ToArray();
988
989        var t0 = rows.First();
990
991        // initialize values for inputs and targets from dataset
992        foreach (var varName in inputVariables) {
993          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
994          // if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
995          //   nodeValues.SetVariableValue(varName, value, Vector.Zero);
996          // } else {
997          var y0 = dataset.GetDoubleValue(varName, t0);
998          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
999          //}
1000        }
1001        foreach (var varName in targetVariables) {
1002          var y0 = dataset.GetDoubleValue(varName, t0);
1003          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
1004
1005          // output starting value
1006          fi[outputRowIdx] = y0;
1007          Vector.Zero.CopyTo(jac, outputRowIdx);
1008
1009          outputRowIdx++;
1010        }
1011
1012        var latentValueRowIdx = 0;
1013        var latentValueColIdx = 0;
1014        foreach (var varName in latentVariables) {
1015          var y0 = 0.0; // assume we start at zero
1016          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
1017
1018          if (latentValues != null) {
1019            latentValues[latentValueRowIdx, latentValueColIdx++] = y0;
1020          }
1021        }
1022        latentValueColIdx = 0; latentValueRowIdx++;
1023
1024        { // CODE BELOW DOESN'T WORK ANYMORE
1025          // if (latentVariables.Length > 0) throw new NotImplementedException();
1026          //
1027          // // add value entries for latent variables which are also integrated
1028          // // initial values are at the end of the parameter vector
1029          // // separate initial values for each episode
1030          // var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
1031          // foreach (var latentVar in latentVariables) {
1032          //   var arr = new double[parameterValues.Length]; // backing array
1033          //   arr[initialValueIdx] = 1.0;
1034          //   var g = new Vector(arr);
1035          //   nodeValues.SetVariableValue(latentVar, parameterValues[initialValueIdx], g); // we don't have observations for latent variables therefore we optimize the initial value for each episode
1036          //   initialValueIdx++;
1037          // }
1038        }
1039
1040        var prevT = t0; // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
1041        if (odeSolver == "CVODES (full)") {
1042          IntegrateCVODES(trees, calculatedVariables, nodeValues, rows, fi, jac);
1043        } else {
1044
1045          foreach (var t in rows.Skip(1)) {
1046            if (odeSolver == "HeuristicLab")
1047              IntegrateHL(trees, calculatedVariables, nodeValues, numericIntegrationSteps); // integrator updates nodeValues
1048            else if (odeSolver == "CVODES")
1049              IntegrateCVODES(trees, calculatedVariables, nodeValues);
1050            else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
1051            prevT = t;
1052
1053            // update output for target variables (TODO: if we want to visualize the latent variables then we need to provide a separate output)
1054            for (int i = 0; i < targetVariables.Length; i++) {
1055              var targetVar = targetVariables[i];
1056              var yt = nodeValues.GetVariableValue(targetVar);
1057
1058              // fill up remaining rows with last valid value if there are invalid values
1059              if (double.IsNaN(yt.Item1) || double.IsInfinity(yt.Item1)) {
1060                for (; outputRowIdx < fi.Length; outputRowIdx++) {
1061                  var prevIdx = outputRowIdx - targetVariables.Length;
1062                  fi[outputRowIdx] = fi[prevIdx]; // current <- prev
1063                  if (jac != null) for (int j = 0; j < jac.GetLength(1); j++) jac[outputRowIdx, j] = jac[prevIdx, j];
1064                }
1065                return;
1066              };
1067
1068              fi[outputRowIdx] = yt.Item1;
1069              var g = yt.Item2;
1070              g.CopyTo(jac, outputRowIdx);
1071              outputRowIdx++;
1072            }
1073            if (latentValues != null) {
1074              foreach (var latentVariable in latentVariables) {
1075                var lt = nodeValues.GetVariableValue(latentVariable).Item1;
1076                latentValues[latentValueRowIdx, latentValueColIdx++] = lt;
1077              }
1078              latentValueRowIdx++; latentValueColIdx = 0;
1079            }
1080
1081            // update for next time step (only the inputs)
1082            foreach (var varName in inputVariables) {
1083              // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
1084              // if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
1085              //   // value is unchanged
1086              // } else {
1087              nodeValues.SetVariableValue(varName, dataset.GetDoubleValue(varName, t), Vector.Zero);
1088              // }
1089            }
1090          }
1091        }
1092        episodeIdx++;
1093      }
1094    }
1095
1096    #region CVODES
1097
1098
1099    /// <summary>
1100    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
1101    /// </summary>
1102    /// <param name="trees">Each equation in the ODE represented as a tree</param>
1103    /// <param name="calculatedVariables">The names of the calculated variables</param>
1104    /// <param name="t">The time t up to which we need to integrate.</param>
1105    private static void IntegrateCVODES(
1106      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
1107      string[] calculatedVariables, // names of elements of y
1108      NodeValueLookup nodeValues
1109      ) {
1110
1111      // the RHS of the ODE
1112      // dy/dt = f(y_t,x_t,p)
1113      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, nodeValues);
1114      // the Jacobian ∂f/∂y
1115      CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, nodeValues);
1116
1117      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
1118      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, nodeValues);
1119
1120      // setup solver
1121      int numberOfEquations = trees.Length;
1122      IntPtr y = IntPtr.Zero;
1123      IntPtr cvode_mem = IntPtr.Zero;
1124      IntPtr A = IntPtr.Zero;
1125      IntPtr yS0 = IntPtr.Zero;
1126      IntPtr linearSolver = IntPtr.Zero;
1127      var ns = nodeValues.ParameterCount; // number of parameters
1128
1129      try {
1130        y = CVODES.N_VNew_Serial(numberOfEquations);
1131        // init y to current values of variables
1132        // y must be initialized before calling CVodeInit
1133        for (int i = 0; i < calculatedVariables.Length; i++) {
1134          CVODES.NV_Set_Ith_S(y, i, nodeValues.GetVariableValue(calculatedVariables[i]).Item1);
1135        }
1136
1137        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
1138
1139        var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
1140        Assert(CVODES.CV_SUCCESS == flag);
1141
1142        flag = CVODES.CVodeSetErrHandlerFn(cvode_mem, errorFunction, IntPtr.Zero);
1143        Assert(CVODES.CV_SUCCESS == flag);
1144
1145
1146        double relTol = 1.0e-2;
1147        double absTol = 1.0;
1148        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
1149        Assert(CVODES.CV_SUCCESS == flag);
1150
1151        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
1152        Assert(A != IntPtr.Zero);
1153
1154        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
1155        Assert(linearSolver != IntPtr.Zero);
1156
1157        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
1158        Assert(CVODES.CV_SUCCESS == flag);
1159
1160        flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
1161        Assert(CVODES.CV_SUCCESS == flag);
1162
1163        yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
1164        unsafe {
1165          // set to initial sensitivities supplied by caller
1166          for (int pIdx = 0; pIdx < ns; pIdx++) {
1167            var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
1168            for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1169              CVODES.NV_Set_Ith_S(yS0_i, varIdx, nodeValues.GetVariableValue(calculatedVariables[varIdx]).Item2[pIdx]); // TODO: perf
1170            }
1171          }
1172        }
1173
1174        flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
1175        Assert(CVODES.CV_SUCCESS == flag);
1176
1177        flag = CVODES.CVodeSensEEtolerances(cvode_mem);
1178        Assert(CVODES.CV_SUCCESS == flag);
1179
1180        // make one forward integration step
1181        double tout = 0.0; // first output time
1182        flag = CVODES.CVode(cvode_mem, 1.0, y, ref tout, CVODES.CV_NORMAL);
1183        if (flag == CVODES.CV_SUCCESS) {
1184          Assert(1.0 == tout);
1185
1186          // get sensitivities
1187          flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
1188          Assert(CVODES.CV_SUCCESS == flag);
1189
1190          // update variableValues based on integration results
1191          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1192            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
1193            var gArr = new double[ns];
1194            for (var pIdx = 0; pIdx < ns; pIdx++) {
1195              unsafe {
1196                var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
1197                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
1198              }
1199            }
1200            nodeValues.SetVariableValue(calculatedVariables[varIdx], yi, new Vector(gArr));
1201          }
1202        } else {
1203          throw new InvalidOperationException();
1204        }
1205
1206        // cleanup all allocated objects
1207      } finally {
1208        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
1209        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
1210        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
1211        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
1212        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
1213      }
1214    }
1215
1216    /// <summary>
1217    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
1218    /// </summary>
1219    /// <param name="trees">Each equation in the ODE represented as a tree</param>
1220    /// <param name="calculatedVariables">The names of the calculated variables</param>
1221    /// <param name="t">The time t up to which we need to integrate.</param>
1222    private static void IntegrateCVODES(
1223      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
1224      string[] calculatedVariables, // names of elements of y
1225      NodeValueLookup nodeValues,
1226      IEnumerable<int> rows,
1227      double[] fi,
1228      double[,] jac
1229      ) {
1230
1231      // the RHS of the ODE
1232      // dy/dt = f(y_t,x_t,p)
1233      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, nodeValues);
1234
1235      var calcSens = jac != null;
1236      // the Jacobian ∂f/∂y
1237      CVODES.CVDlsJacFunc jacF = CreateJac(trees, calculatedVariables, nodeValues);
1238
1239      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
1240      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, nodeValues);
1241
1242      // setup solver
1243      int numberOfEquations = trees.Length;
1244      IntPtr y = IntPtr.Zero;
1245      IntPtr cvode_mem = IntPtr.Zero;
1246      IntPtr A = IntPtr.Zero;
1247      IntPtr yS0 = IntPtr.Zero;
1248      IntPtr linearSolver = IntPtr.Zero;
1249      var ns = nodeValues.ParameterCount; // number of parameters
1250
1251      try {
1252        y = CVODES.N_VNew_Serial(numberOfEquations);
1253        // init y to current values of variables
1254        // y must be initialized before calling CVodeInit
1255        for (int i = 0; i < calculatedVariables.Length; i++) {
1256          CVODES.NV_Set_Ith_S(y, i, nodeValues.GetVariableValue(calculatedVariables[i]).Item1);
1257        }
1258
1259        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
1260
1261        var flag = CVODES.CVodeInit(cvode_mem, f, rows.First(), y);
1262        Assert(CVODES.CV_SUCCESS == flag);
1263
1264        flag = CVODES.CVodeSetErrHandlerFn(cvode_mem, errorFunction, IntPtr.Zero);
1265        Assert(CVODES.CV_SUCCESS == flag);
1266        double relTol = 1.0e-2;
1267        double absTol = 1.0;
1268        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
1269        Assert(CVODES.CV_SUCCESS == flag);
1270
1271        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
1272        Assert(A != IntPtr.Zero);
1273
1274        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
1275        Assert(linearSolver != IntPtr.Zero);
1276
1277        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
1278        Assert(CVODES.CV_SUCCESS == flag);
1279
1280        flag = CVODES.CVDlsSetJacFn(cvode_mem, jacF);
1281        Assert(CVODES.CV_SUCCESS == flag);
1282
1283        if (calcSens) {
1284
1285          yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
1286          unsafe {
1287            // set to initial sensitivities supplied by caller
1288            for (int pIdx = 0; pIdx < ns; pIdx++) {
1289              var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
1290              for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1291                CVODES.NV_Set_Ith_S(yS0_i, varIdx, nodeValues.GetVariableValue(calculatedVariables[varIdx]).Item2[pIdx]); // TODO: perf
1292              }
1293            }
1294          }
1295
1296          flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
1297          Assert(CVODES.CV_SUCCESS == flag);
1298
1299          flag = CVODES.CVodeSensEEtolerances(cvode_mem);
1300          Assert(CVODES.CV_SUCCESS == flag);
1301        }
1302        // integrate
1303        int outputIdx = calculatedVariables.Length; // values at t0 do not need to be set.
1304        foreach (var tout in rows.Skip(1)) {
1305          double tret = 0;
1306          flag = CVODES.CVode(cvode_mem, tout, y, ref tret, CVODES.CV_NORMAL);
1307          if (flag == CVODES.CV_SUCCESS) {
1308            // Assert(1.0 == tout);
1309            if (calcSens) {
1310              // get sensitivities
1311              flag = CVODES.CVodeGetSens(cvode_mem, ref tret, yS0);
1312              Assert(CVODES.CV_SUCCESS == flag);
1313            }
1314            // update variableValues based on integration results
1315            for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1316              var yi = CVODES.NV_Get_Ith_S(y, varIdx);
1317              fi[outputIdx] = yi;
1318              if (calcSens) {
1319                // var gArr = new double[ns];
1320                for (var pIdx = 0; pIdx < ns; pIdx++) {
1321                  unsafe {
1322                    var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
1323                    jac[outputIdx, pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
1324                  }
1325                }
1326              }
1327              outputIdx++;
1328            }
1329
1330          } else {
1331            // fill up remaining values
1332            while (outputIdx < fi.Length) {
1333              fi[outputIdx] = fi[outputIdx - calculatedVariables.Length];
1334              if (calcSens) {
1335                for (var pIdx = 0; pIdx < ns; pIdx++) {
1336                  jac[outputIdx, pIdx] = jac[outputIdx - calculatedVariables.Length, pIdx];
1337                }
1338              }
1339              outputIdx++;
1340            }
1341            return;
1342          }
1343        }
1344
1345        // cleanup all allocated objects
1346      } finally {
1347        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
1348        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
1349        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
1350        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
1351        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
1352      }
1353    }
1354
1355    private static void errorFunction(int errorCode, IntPtr module, IntPtr function, IntPtr msg, IntPtr ehdata) {
1356      var moduleStr = Marshal.PtrToStringAnsi(module);
1357      var functionStr = Marshal.PtrToStringAnsi(function);
1358      var msgStr = Marshal.PtrToStringAnsi(msg);
1359      string type = errorCode == 0 ? "Warning" : "Error";
1360      // throw new InvalidProgramException($"{type}: {msgStr} Module: {moduleStr} Function: {functionStr}");
1361    }
1362
1363    private static CVODES.CVRhsFunc CreateOdeRhs(
1364      ISymbolicExpressionTree[] trees,
1365      string[] calculatedVariables,
1366      NodeValueLookup nodeValues) {
1367      // we don't need to calculate a gradient here
1368      return (double t,
1369              IntPtr y, // N_Vector, current value of y (input)
1370              IntPtr ydot, // N_Vector (calculated value of y' (output)
1371              IntPtr user_data // optional user data, (unused here)
1372              ) => {
1373                for (int i = 0; i < calculatedVariables.Length; i++) {
1374                  var y_i = CVODES.NV_Get_Ith_S(y, (long)i);
1375                  nodeValues.SetVariableValue(calculatedVariables[i], y_i);
1376                }
1377                for (int i = 0; i < trees.Length; i++) {
1378                  var tree = trees[i];
1379                  var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1380                  CVODES.NV_Set_Ith_S(ydot, i, res_i);
1381                }
1382                return 0;
1383              };
1384    }
1385
1386    private static CVODES.CVDlsJacFunc CreateJac(
1387      ISymbolicExpressionTree[] trees,
1388      string[] calculatedVariables,
1389      NodeValueLookup nodeValues) {
1390
1391      return (
1392        double t, // current time (input)
1393        IntPtr y, // N_Vector, current value of y (input)
1394        IntPtr fy, // N_Vector, current value of f (input)
1395        IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
1396        IntPtr user_data, // optional (unused here)
1397        IntPtr tmp1, // N_Vector, optional (unused here)
1398        IntPtr tmp2, // N_Vector, optional (unused here)
1399        IntPtr tmp3 // N_Vector, optional (unused here)
1400      ) => {
1401        // int pIdx = 0;
1402        // foreach (var tree in trees) {
1403        //   foreach (var n in tree.IterateNodesPrefix()) {
1404        //     if (IsConstantNode(n)) {
1405        //       nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
1406        //       pIdx++;
1407        //     } else if (n.SubtreeCount == 0) {
1408        //       // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1409        //       var varName = n.Symbol.Name;
1410        //       var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
1411        //       if (varIdx < 0) throw new InvalidProgramException();
1412        //
1413        //       var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1414        //       var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
1415        //       gArr[varIdx] = 1.0;
1416        //       var g = new Vector(gArr);
1417        //       nodeValues.Add(n, Tuple.Create(y_i, g));
1418        //     }
1419        //   }
1420        // }
1421        for (int i = 0; i < calculatedVariables.Length; i++) {
1422          var y_i = CVODES.NV_Get_Ith_S(y, (long)i);
1423          nodeValues.SetVariableValue(calculatedVariables[i], y_i);
1424        }
1425        for (int i = 0; i < trees.Length; i++) {
1426          var tree = trees[i];
1427          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out double z, out Vector dz);
1428          for (int j = 0; j < calculatedVariables.Length; j++) {
1429            CVODES.SUNDenseMatrix_Set(Jac, i, j, dz[j]);  //TODO: must set as in SensitivityRhs!
1430          }
1431        }
1432        return 0; // on success
1433      };
1434    }
1435
1436
1437    // to calculate sensitivities RHS for all equations at once
1438    // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
1439    // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
1440    private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, NodeValueLookup nodeValues) {
1441      return (
1442              int Ns, // number of parameters
1443              double t, // current time
1444              IntPtr y, // N_Vector y(t) (input)
1445              IntPtr ydot, // N_Vector dy/dt(t) (input)
1446              IntPtr yS, // N_Vector*, one vector for each parameter (input)
1447              IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
1448              IntPtr user_data, // optional (unused here)
1449              IntPtr tmp1, // N_Vector, optional (unused here)
1450              IntPtr tmp2 // N_Vector, optional (unused here)
1451        ) => {
1452
1453          var tmpNodeValues = new NodeValueLookup(trees, variableGradient: true); // for df / dy calculation
1454
1455          // update variableValues based on integration results
1456          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1457            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
1458            var gArr = new double[Ns];
1459            for (var pIdx = 0; pIdx < Ns; pIdx++) {
1460              unsafe {
1461                var yS_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1462                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS_pi, varIdx);
1463              }
1464            }
1465            nodeValues.SetVariableValue(calculatedVariables[varIdx], yi, new Vector(gArr));
1466            tmpNodeValues.SetVariableValue(calculatedVariables[varIdx], yi, Vector.CreateIndicator(calculatedVariables.Length, varIdx));
1467          }
1468
1469          for (int pIdx = 0; pIdx < Ns; pIdx++) {
1470            unsafe {
1471              var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1472              CVODES.N_VConst_Serial(0.0, sDot_pi);
1473            }
1474          }
1475
1476
1477          for (int i = 0; i < trees.Length; i++) {
1478            var tree = trees[i];
1479
1480            // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
1481
1482            // 1. interpret tree to calculate (∂f/∂y)
1483            // we need a different nodeValue object for (∂f/∂y)
1484            InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), tmpNodeValues, out double z1, out Vector df_dy);
1485
1486            // 2. interpret tree to calculate ∂f/∂p_i
1487            InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out double z, out Vector df_dp);
1488
1489            for (int pIdx = 0; pIdx < Ns; pIdx++) {
1490              unsafe {
1491                var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1492                var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1493
1494                var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
1495                // (∂f/∂y)s_i(t)
1496                var p = 0.0;
1497                for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
1498                  p += df_dy[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
1499                }
1500                // + ∂f/∂p_i
1501                CVODES.NV_Set_Ith_S(sDot_pi, i, p + df_dp[pIdx]);
1502              }
1503            }
1504
1505          }
1506          return 0; // on success
1507        };
1508    }
1509
1510    #endregion
1511
1512    private static void IntegrateHL(
1513      ISymbolicExpressionTree[] trees,
1514      string[] calculatedVariables, // names of integrated variables
1515      NodeValueLookup nodeValues,
1516      int numericIntegrationSteps) {
1517
1518
1519      double[] deltaF = new double[calculatedVariables.Length];
1520      Vector[] deltaG = new Vector[calculatedVariables.Length];
1521
1522      double h = 1.0 / numericIntegrationSteps;
1523      for (int step = 0; step < numericIntegrationSteps; step++) {
1524
1525        // evaluate all trees
1526        for (int i = 0; i < trees.Length; i++) {
1527          var tree = trees[i];
1528
1529          // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
1530          double f; Vector g;
1531          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
1532          deltaF[i] = f;
1533          deltaG[i] = g;
1534        }
1535
1536        // update variableValues for next step, trapezoid integration
1537        for (int i = 0; i < trees.Length; i++) {
1538          var varName = calculatedVariables[i];
1539          var oldVal = nodeValues.GetVariableValue(varName);
1540          nodeValues.SetVariableValue(varName, oldVal.Item1 + h * deltaF[i], oldVal.Item2.Add(deltaG[i].Scale(h)));
1541        }
1542      }
1543    }
1544
1545    // TODO: use an existing interpreter implementation instead
1546    private static double InterpretRec(ISymbolicExpressionTreeNode node, NodeValueLookup nodeValues) {
1547      if (node is ConstantTreeNode constTreeNode) {
1548        return nodeValues.ConstantNodeValue(constTreeNode);
1549      } else if (node is VariableTreeNode varTreeNode) {
1550        return nodeValues.VariableNodeValue(varTreeNode);
1551      } else if (node.Symbol is Addition) {
1552        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1553        for (int i = 1; i < node.SubtreeCount; i++) {
1554          f += InterpretRec(node.GetSubtree(i), nodeValues);
1555        }
1556        return f;
1557      } else if (node.Symbol is Multiplication) {
1558        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1559        for (int i = 1; i < node.SubtreeCount; i++) {
1560          f *= InterpretRec(node.GetSubtree(i), nodeValues);
1561        }
1562        return f;
1563      } else if (node.Symbol is Subtraction) {
1564        if (node.SubtreeCount == 1) {
1565          return -InterpretRec(node.GetSubtree(0), nodeValues);
1566        } else {
1567          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1568          for (int i = 1; i < node.SubtreeCount; i++) {
1569            f -= InterpretRec(node.GetSubtree(i), nodeValues);
1570          }
1571          return f;
1572        }
1573      } else if (node.Symbol is Division) {
1574        if (node.SubtreeCount == 1) {
1575          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1576          // protected division
1577          if (f.IsAlmost(0.0)) {
1578            return 0;
1579          } else {
1580            return 1.0 / f;
1581          }
1582        } else {
1583          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1584          for (int i = 1; i < node.SubtreeCount; i++) {
1585            var g = InterpretRec(node.GetSubtree(i), nodeValues);
1586            // protected division
1587            if (g.IsAlmost(0.0)) {
1588              return 0;
1589            } else {
1590              f /= g;
1591            }
1592          }
1593          return f;
1594        }
1595      } else if (node.Symbol is Sine) {
1596        Assert(node.SubtreeCount == 1);
1597
1598        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1599        return Math.Sin(f);
1600      } else if (node.Symbol is Cosine) {
1601        Assert(node.SubtreeCount == 1);
1602
1603        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1604        return Math.Cos(f);
1605      } else if (node.Symbol is Square) {
1606        Assert(node.SubtreeCount == 1);
1607
1608        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1609        return f * f;
1610      } else if (node.Symbol is Exponential) {
1611        Assert(node.SubtreeCount == 1);
1612
1613        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1614        return Math.Exp(f);
1615      } else if (node.Symbol is Logarithm) {
1616        Assert(node.SubtreeCount == 1);
1617
1618        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1619        return Math.Log(f);
1620      } else if (node.Symbol is HyperbolicTangent) {
1621        Assert(node.SubtreeCount == 1);
1622
1623        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1624        return Math.Tanh(f);
1625      } else if (node.Symbol is AnalyticQuotient) {
1626        Assert(node.SubtreeCount == 2);
1627
1628        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1629        var g = InterpretRec(node.GetSubtree(1), nodeValues);
1630        return f / Math.Sqrt(1 + g * g);
1631      } else throw new NotSupportedException("unsupported symbol");
1632    }
1633
1634    private static void Assert(bool cond) {
1635#if DEBUG
1636      if (!cond) throw new InvalidOperationException("Assertion failed");
1637#endif
1638    }
1639
1640    private static void InterpretRec(
1641      ISymbolicExpressionTreeNode node,
1642       NodeValueLookup nodeValues,      // contains value and gradient vector for a node (variables and constants only)
1643      out double z,
1644      out Vector dz
1645      ) {
1646      double f, g;
1647      Vector df, dg;
1648      if (node is ConstantTreeNode constTreeNode) {
1649        var val = nodeValues.ConstantNodeValueAndGradient(constTreeNode);
1650        z = val.Item1;
1651        dz = val.Item2;
1652      } else if (node is VariableTreeNode varTreeNode) {
1653        var val = nodeValues.VariableNodeValueAndGradient(varTreeNode);
1654        z = val.Item1;
1655        dz = val.Item2;
1656      } else if (node.Symbol is Addition) {
1657        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1658        for (int i = 1; i < node.SubtreeCount; i++) {
1659          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1660          f = f + g;
1661          df = df.Add(dg);
1662        }
1663        z = f;
1664        dz = df;
1665      } else if (node.Symbol is Multiplication) {
1666        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1667        for (int i = 1; i < node.SubtreeCount; i++) {
1668          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1669          f = f * g;
1670          df = df.Scale(g).Add(dg.Scale(f));  // f'*g + f*g'
1671        }
1672        z = f;
1673        dz = df;
1674      } else if (node.Symbol is Subtraction) {
1675        if (node.SubtreeCount == 1) {
1676          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1677          z = -f;
1678          dz = df.Scale(-1.0);
1679        } else {
1680          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1681          for (int i = 1; i < node.SubtreeCount; i++) {
1682            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1683            f = f - g;
1684            df = df.Subtract(dg);
1685          }
1686          z = f;
1687          dz = df;
1688        }
1689      } else if (node.Symbol is Division) {
1690        if (node.SubtreeCount == 1) {
1691          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1692          // protected division
1693          if (f.IsAlmost(0.0)) {
1694            z = 0;
1695            dz = Vector.Zero;
1696          } else {
1697            z = 1.0 / f;
1698            dz = df.Scale(-1 * z * z);
1699          }
1700        } else {
1701          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1702          for (int i = 1; i < node.SubtreeCount; i++) {
1703            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1704            // protected division
1705            if (g.IsAlmost(0.0)) {
1706              z = 0;
1707              dz = Vector.Zero;
1708              return;
1709            } else {
1710              var inv_g = 1.0 / g;
1711              f = f * inv_g;
1712              df = dg.Scale(-f * inv_g * inv_g).Add(df.Scale(inv_g));
1713            }
1714          }
1715          z = f;
1716          dz = df;
1717        }
1718      } else if (node.Symbol is Sine) {
1719        Assert(node.SubtreeCount == 1);
1720        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1721        z = Math.Sin(f);
1722        dz = df.Scale(Math.Cos(f));
1723      } else if (node.Symbol is Cosine) {
1724        Assert(node.SubtreeCount == 1);
1725        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1726        z = Math.Cos(f);
1727        dz = df.Scale(-Math.Sin(f));
1728      } else if (node.Symbol is Square) {
1729        Assert(node.SubtreeCount == 1);
1730        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1731        z = f * f;
1732        dz = df.Scale(2.0 * f);
1733      } else if (node.Symbol is Exponential) {
1734        Assert(node.SubtreeCount == 1);
1735        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1736        z = Math.Exp(f);
1737        dz = df.Scale(Math.Exp(f));
1738      } else if (node.Symbol is Logarithm) {
1739        Assert(node.SubtreeCount == 1);
1740        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1741        z = Math.Log(f);
1742        dz = df.Scale(1.0 / f);
1743      } else if (node.Symbol is HyperbolicTangent) {
1744        Assert(node.SubtreeCount == 1);
1745        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1746        z = Math.Tanh(f);
1747        dz = df.Scale(1 - z * z); // tanh(f(x))' = f(x)'sech²(f(x)) = f(x)'(1 - tanh²(f(x)))
1748      } else if (node.Symbol is AnalyticQuotient) {
1749        Assert(node.SubtreeCount == 2);
1750        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1751        InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1752        z = f / Math.Sqrt(1 + g * g);
1753        var denom = 1.0 / Math.Pow(1 + g * g, 1.5);
1754        dz = df.Scale(1 + g * g).Subtract(dg.Scale(f * g)).Scale(denom);
1755      } else {
1756        throw new NotSupportedException("unsupported symbol");
1757      }
1758    }
1759
1760    #endregion
1761
1762    #region events
1763    /*
1764     * Dependencies between parameters:
1765     *
1766     * ProblemData
1767     *    |                                                                         
1768     *    V                                                                         
1769     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables     
1770     *               |   |                 |                   |                     
1771     *               V   V                 |                   |                     
1772     *             Grammar <---------------+-------------------                     
1773     *                |                                                             
1774     *                V                                                             
1775     *            Encoding                                                           
1776     */
1777    private void RegisterEventHandlers() {
1778      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
1779      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
1780
1781      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
1782      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1783
1784      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
1785      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1786
1787      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
1788
1789      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
1790    }
1791
1792    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1793      UpdateGrammarAndEncoding();
1794    }
1795
1796    private void MaximumLengthChanged(object sender, EventArgs e) {
1797      UpdateGrammarAndEncoding();
1798    }
1799
1800    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1801      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1802    }
1803
1804    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1805      UpdateGrammarAndEncoding();
1806    }
1807
1808    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1809      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1810      UpdateGrammarAndEncoding();
1811    }
1812
1813    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1814      UpdateGrammarAndEncoding();
1815    }
1816
1817    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
1818      ProblemDataParameter.Value.Changed += ProblemData_Changed;
1819      OnProblemDataChanged();
1820      OnReset();
1821    }
1822
1823    private void ProblemData_Changed(object sender, EventArgs e) {
1824      OnProblemDataChanged();
1825      OnReset();
1826    }
1827
1828    private void OnProblemDataChanged() {
1829      UpdateTargetVariables();        // implicitly updates other dependent parameters
1830      var handler = ProblemDataChanged;
1831      if (handler != null) handler(this, EventArgs.Empty);
1832    }
1833
1834    #endregion
1835
1836    #region  helper
1837
1838    private static double[] CalculateDifferences(double[] targetValues, double numericDifferencesSmoothing) {
1839      return CalculateDifferencesSavitykzGolay(targetValues);
1840    }
1841
1842    private static double[] CalculateDifferencesPenalizedSplines(double[] targetValues, double numericDifferencesSmoothing) {
1843      var x = Enumerable.Range(0, targetValues.Length).Select(i => (double)i).ToArray();
1844      alglib.spline1dfitpenalized(x, targetValues, targetValues.Length / 2, numericDifferencesSmoothing,
1845        out int info, out alglib.spline1dinterpolant s, out alglib.spline1dfitreport rep);
1846      if (info <= 0) throw new ArgumentException("There was a problem while smoothing numeric differences. Try to use a different smoothing parameter value.");
1847
1848      double[] dy = new double[x.Length];
1849      for (int i = 0; i < x.Length; i++) {
1850        double xi = x[i];
1851        alglib.spline1ddiff(s, xi, out double y, out double dyi, out double d2y);
1852        dy[i] = dyi;
1853      }
1854      return dy;
1855    }
1856
1857    private static readonly double[] sgCoeffMiddle = SavitzkyGolayCoefficients(3, 3, 1, 3);
1858    private static readonly double[] sgCoeffStart = SavitzkyGolayCoefficients(0, 3, 1, 3);
1859    private static readonly double[] sgCoeffEnd = SavitzkyGolayCoefficients(3, 0, 1, 3);
1860    private static double[] CalculateDifferencesSavitykzGolay(double[] y) {
1861      double[] dy = new double[y.Length];
1862      for (int i = 3; i < y.Length - 3; i++) {
1863        for (int j = -3; j <= 3; j++) {
1864          dy[i] += y[i + j] * sgCoeffMiddle[j + 3];
1865        }
1866      }
1867
1868      // start
1869      for (int i = 0; i < 3; i++) {
1870        for (int j = 0; j <= 3; j++) {
1871          dy[i] += y[i + j] * sgCoeffStart[j];
1872        }
1873      }
1874
1875      // end
1876      for (int i = y.Length - 3; i < y.Length; i++) {
1877        for (int j = -3; j <= 0; j++) {
1878          dy[i] += y[i + j] * sgCoeffEnd[j + 3];
1879        }
1880      }
1881
1882      return dy;
1883    }
1884
1885    /// <summary>
1886    /// Calculates coefficients for Savitzky-Golay filtering. (Numerical Recipes, page 769), one important change is that the coefficients are returned in normal order instead of wraparound order
1887    /// </summary>
1888    /// <param name="nl">number of samples to the left</param>
1889    /// <param name="nr">number of samples to the right</param>
1890    /// <param name="ld">order of derivative (smoothing=0)</param>
1891    /// <param name="order">order of the polynomial to fit</param>
1892    /// <param name="c">resulting coefficients for convolution, in correct order (t-nl, ... t-1, t+0, t+1, ... t+nr)</param>
1893    private static double[] SavitzkyGolayCoefficients(int nl, int nr, int ld, int order) {
1894      int np = nl + nr + 1;
1895
1896      int j, k, imj, ipj, kk, mm;
1897      double fac = 0;
1898      double sum = 0;
1899      if (nl < 0 || nr < 0 || ld > order || nl + nr < order) throw new ArgumentException();
1900
1901      double[,] a = new double[order + 1, order + 1];
1902      double[] b = new double[order + 1];
1903      var c = new double[np];
1904
1905      for (ipj = 0; ipj <= (order << 1); ipj++) {
1906        sum = (ipj > 0 ? 0.0 : 1.0);
1907        for (k = 1; k <= nr; k++) sum += Math.Pow((double)k, (double)ipj);
1908        for (k = 1; k <= nl; k++) sum += Math.Pow((double)-k, (double)ipj);
1909        mm = Math.Min(ipj, 2 * order - ipj);
1910        for (imj = -mm; imj <= mm; imj += 2)
1911          a[(ipj + imj) / 2, (ipj - imj) / 2] = sum;
1912      }
1913      for (j = 0; j < order + 1; j++) b[j] = 0;
1914      b[ld] = 1.0;
1915      alglib.densesolverreport rep;
1916      int info;
1917      double[] x = new double[b.Length];
1918      alglib.rmatrixsolve(a, b.Length, b, out info, out rep, out x);
1919
1920      for (kk = 0; kk < np; kk++) c[kk] = 0.0;
1921      for (k = -nl; k <= nr; k++) {
1922        sum = x[0];
1923        fac = 1.0;
1924        for (mm = 1; mm <= order; mm++) sum += x[mm] * (fac *= k);
1925        kk = k + nl;
1926        c[kk] = sum;
1927      }
1928      return c;
1929    }
1930
1931
1932    private void InitAllParameters() {
1933      UpdateTargetVariables(); // implicitly updates the grammar and the encoding
1934    }
1935
1936    private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
1937      var l = new CheckedItemList<StringValue>();
1938      l.Add(new StringValue("Addition").AsReadOnly());
1939      l.Add(new StringValue("Multiplication").AsReadOnly());
1940      l.Add(new StringValue("Division").AsReadOnly());
1941      l.Add(new StringValue("Subtraction").AsReadOnly());
1942      l.Add(new StringValue("Sine").AsReadOnly());
1943      l.Add(new StringValue("Cosine").AsReadOnly());
1944      l.Add(new StringValue("Square").AsReadOnly());
1945      l.Add(new StringValue("Logarithm").AsReadOnly());
1946      l.Add(new StringValue("Exponential").AsReadOnly());
1947      l.Add(new StringValue("HyperbolicTangent").AsReadOnly());
1948      l.Add(new StringValue("AnalyticQuotient").AsReadOnly());
1949      return l.AsReadOnly();
1950    }
1951
1952    // private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
1953    //   return n.Symbol.Name[0] == 'λ';
1954    // }
1955
1956    private void UpdateTargetVariables() {
1957      var currentlySelectedVariables = TargetVariables.CheckedItems
1958        .OrderBy(i => i.Index)
1959        .Select(i => i.Value.Value)
1960        .ToArray();
1961
1962      var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
1963      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
1964      foreach (var item in newVariablesList) {
1965        if (currentlySelectedVariables.Contains(item.Value)) {
1966          newVariablesList.SetItemCheckedState(item, true);
1967        } else {
1968          newVariablesList.SetItemCheckedState(item, false);
1969        }
1970      }
1971      TargetVariablesParameter.Value = newVariablesList;
1972    }
1973
1974    private void UpdateGrammarAndEncoding() {
1975      var encoding = new MultiEncoding();
1976      var g = CreateGrammar();
1977      foreach (var targetVar in TargetVariables.CheckedItems) {
1978        var e = new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength);
1979        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1980        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1981        // make sure our multi-manipulator is the only manipulator
1982        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1983
1984        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1985        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1986        foreach (var xover in subtreeCrossovers) {
1987          xover.CrossoverProbability.Value = 0.3;
1988        }
1989
1990        encoding = encoding.Add(e); // only limit by length
1991      }
1992      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1993        var e = new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength);
1994        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1995        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1996        // make sure our multi-manipulator is the only manipulator
1997        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1998
1999        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
2000        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
2001        foreach (var xover in subtreeCrossovers) {
2002          xover.CrossoverProbability.Value = 0.3;
2003        }
2004
2005        encoding = encoding.Add(e);
2006      }
2007      Encoding = encoding;
2008    }
2009
2010    private ISymbolicExpressionGrammar CreateGrammar() {
2011      var grammar = new TypeCoherentExpressionGrammar();
2012      grammar.StartGrammarManipulation();
2013
2014      var problemData = ProblemData;
2015      var ds = problemData.Dataset;
2016      grammar.MaximumFunctionArguments = 0;
2017      grammar.MaximumFunctionDefinitions = 0;
2018      var allowedVariables = problemData.AllowedInputVariables.Concat(TargetVariables.CheckedItems.Select(chk => chk.Value.Value));
2019      foreach (var varSymbol in grammar.Symbols.OfType<HeuristicLab.Problems.DataAnalysis.Symbolic.VariableBase>()) {
2020        if (!varSymbol.Fixed) {
2021          varSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<double>(x));
2022          varSymbol.VariableNames = allowedVariables.Where(x => ds.VariableHasType<double>(x));
2023        }
2024      }
2025      foreach (var factorSymbol in grammar.Symbols.OfType<BinaryFactorVariable>()) {
2026        if (!factorSymbol.Fixed) {
2027          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
2028          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
2029          factorSymbol.VariableValues = factorSymbol.VariableNames
2030            .ToDictionary(varName => varName, varName => ds.GetStringValues(varName).Distinct().ToList());
2031        }
2032      }
2033      foreach (var factorSymbol in grammar.Symbols.OfType<FactorVariable>()) {
2034        if (!factorSymbol.Fixed) {
2035          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
2036          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
2037          factorSymbol.VariableValues = factorSymbol.VariableNames
2038            .ToDictionary(varName => varName,
2039            varName => ds.GetStringValues(varName).Distinct()
2040            .Select((n, i) => Tuple.Create(n, i))
2041            .ToDictionary(tup => tup.Item1, tup => tup.Item2));
2042        }
2043      }
2044
2045      grammar.ConfigureAsDefaultRegressionGrammar();
2046
2047      // configure initialization of constants
2048      var constSy = (Constant)grammar.GetSymbol("Constant");
2049      // max and min are only relevant for initialization
2050      constSy.MaxValue = +1.0e-1; // small initial values for constant opt
2051      constSy.MinValue = -1.0e-1;
2052      constSy.MultiplicativeManipulatorSigma = 1.0; // allow large jumps for manipulation
2053      constSy.ManipulatorMu = 0.0;
2054      constSy.ManipulatorSigma = 1.0; // allow large jumps
2055
2056      // configure initialization of variables
2057      var varSy = (Variable)grammar.GetSymbol("Variable");
2058      // init variables to a small value and allow manipulation
2059      varSy.WeightMu = 0.0;
2060      varSy.WeightSigma = 1e-1;
2061      varSy.WeightManipulatorMu = 0.0;
2062      varSy.WeightManipulatorSigma = 1.0;
2063      varSy.MultiplicativeWeightManipulatorSigma = 1.0;
2064
2065      foreach (var f in FunctionSet) {
2066        grammar.GetSymbol(f.Value).Enabled = FunctionSet.ItemChecked(f);
2067      }
2068
2069      grammar.FinishedGrammarManipulation();
2070      return grammar;
2071
2072    }
2073    #endregion
2074
2075
2076    #region Import
2077    public void Load(Problem problem) {
2078      // transfer parameter values from problem parameter
2079      this.ProblemData = problem.ProblemData;
2080      this.TrainingEpisodesParameter.Value = problem.TrainingEpisodesParameter.Value;
2081      this.TargetVariablesParameter.Value = problem.TargetVariablesParameter.Value;
2082      this.Name = problem.Name;
2083      this.Description = problem.Description;
2084    }
2085    #endregion
2086
2087
2088    // TODO: for integration we only need a part of the data that we need for optimization
2089
2090    public class OptimizationData {
2091      public readonly ISymbolicExpressionTree[] trees;
2092      public readonly string[] targetVariables;
2093      public readonly IRegressionProblemData problemData;
2094      public readonly double[][] targetValues;
2095      public readonly double[] inverseStandardDeviation;
2096      public readonly IntRange[] episodes;
2097      public readonly int numericIntegrationSteps;
2098      public readonly string[] latentVariables;
2099      public readonly string odeSolver;
2100      public readonly NodeValueLookup nodeValueLookup;
2101      public readonly int[] rows;
2102      internal readonly string[] variables;
2103
2104      public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, string[] inputVariables,
2105        IRegressionProblemData problemData,
2106        double[][] targetValues,
2107        IntRange[] episodes,
2108        int numericIntegrationSteps, string[] latentVariables, string odeSolver) {
2109        this.trees = trees;
2110        this.targetVariables = targetVars;
2111        this.problemData = problemData;
2112        this.targetValues = targetValues;
2113        this.variables = inputVariables;
2114        if (targetValues != null) {
2115          this.inverseStandardDeviation = new double[targetValues.Length];
2116          for (int i = 0; i < targetValues.Length; i++) {
2117            // calculate variance for each episode separately and calc the average
2118            var epStartIdx = 0;
2119            var stdevs = new List<double>();
2120            foreach (var ep in episodes) {
2121              var epValues = targetValues[i].Skip(epStartIdx).Take(ep.Size);
2122              stdevs.Add(epValues.StandardDeviation());
2123              epStartIdx += ep.Size;
2124            }
2125            inverseStandardDeviation[i] = 1.0 / stdevs.Average();
2126          }
2127        } else
2128          this.inverseStandardDeviation = Enumerable.Repeat(1.0, trees.Length).ToArray();
2129        this.episodes = episodes;
2130        this.numericIntegrationSteps = numericIntegrationSteps;
2131        this.latentVariables = latentVariables;
2132        this.odeSolver = odeSolver;
2133        this.nodeValueLookup = new NodeValueLookup(trees);
2134        this.rows = episodes.SelectMany(ep => Enumerable.Range(ep.Start, ep.Size)).ToArray();
2135      }
2136    }
2137
2138    public class NodeValueLookup {
2139      private readonly Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> node2val = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
2140      private readonly ISymbolicExpressionTreeNode[] leafNodes;
2141      private readonly Vector[] constantGradientVectors;
2142      private readonly Dictionary<string, Tuple<double, Vector>> variableValues = new Dictionary<string, Tuple<double, Vector>>();
2143
2144      // accessors for current values of constant and variable nodes. For variable nodes we also need to account for the variable weight
2145      public double ConstantNodeValue(ConstantTreeNode node) => node2val[node].Item1;
2146      public Tuple<double, Vector> ConstantNodeValueAndGradient(ConstantTreeNode node) { var v = node2val[node]; return Tuple.Create(v.Item1, Vector.CreateNew(v.Item2)); }
2147      public double VariableNodeValue(VariableTreeNode node) => variableValues[node.VariableName].Item1 * node2val[node].Item1;
2148      public Tuple<double, Vector> VariableNodeValueAndGradient(VariableTreeNode node) {
2149        // (f*g)' = (f'*g)+(g'*f)       
2150        var g = node2val[node];
2151        var f = variableValues[node.VariableName];
2152
2153        return Tuple.Create(
2154          g.Item1 * f.Item1,
2155          Vector.CreateNew(f.Item2).Scale(g.Item1).Add(Vector.CreateNew(g.Item2).Scale(f.Item1)));
2156      }
2157
2158      public NodeValueLookup(ISymbolicExpressionTree[] trees, bool variableGradient = false) {
2159        this.leafNodes = trees.SelectMany(t => t.IterateNodesPrefix().Where(n => n.SubtreeCount==0)).ToArray();
2160        if (!variableGradient) {
2161          constantGradientVectors = new Vector[leafNodes.Length];
2162          for (int paramIdx = 0; paramIdx < leafNodes.Length; paramIdx++) {
2163            constantGradientVectors[paramIdx] = Vector.CreateIndicator(length: leafNodes.Length, idx: paramIdx);
2164
2165            var node = leafNodes[paramIdx];
2166            if (node is ConstantTreeNode constTreeNode) {
2167              node2val[node] = Tuple.Create(constTreeNode.Value, constantGradientVectors[paramIdx]);
2168            } else if (node is VariableTreeNode varTreeNode) {
2169              node2val[node] = Tuple.Create(varTreeNode.Weight, constantGradientVectors[paramIdx]);
2170            } else throw new InvalidProgramException();
2171          }
2172        } else {
2173          // variable gradient means we want to calculate the gradient over the target variables instead of parameters
2174          for (int paramIdx = 0; paramIdx < leafNodes.Length; paramIdx++) {
2175            var node = leafNodes[paramIdx];
2176            if (node is ConstantTreeNode constTreeNode) {
2177              node2val[node] = Tuple.Create(constTreeNode.Value, Vector.Zero);
2178            } else if (node is VariableTreeNode varTreeNode) {
2179              node2val[node] = Tuple.Create(varTreeNode.Weight, Vector.Zero);
2180            } else throw new InvalidProgramException();
2181          }
2182        }
2183      }
2184
2185      public int ParameterCount => leafNodes.Length;
2186
2187      public void SetVariableValue(string variableName, double val) {
2188        SetVariableValue(variableName, val, Vector.Zero);
2189      }
2190      /// <summary>
2191      /// returns the current value for variable variableName
2192      /// </summary>
2193      /// <param name="variableName"></param>
2194      /// <returns></returns>
2195      public Tuple<double, Vector> GetVariableValue(string variableName) {
2196        return variableValues[variableName];
2197      }
2198
2199      /// <summary>
2200      /// sets the current value for variable variableName
2201      /// </summary>
2202      /// <param name="variableName"></param>
2203      /// <param name="val"></param>
2204      /// <param name="dVal"></param>
2205      public void SetVariableValue(string variableName, double val, Vector dVal) {
2206        variableValues[variableName] = Tuple.Create(val, dVal);
2207        // if (name2nodes.TryGetValue(variableName, out List<ISymbolicExpressionTreeNode> nodes)) {
2208        //   nodes.ForEach(n => node2val[n] = Tuple.Create(val, dVal));
2209        // } else {
2210        //   var fakeNode = new VariableTreeNode(new Variable());
2211        //   fakeNode.Weight = 1.0;
2212        //   fakeNode.VariableName = variableName;
2213        //   var newNodeList = new List<ISymbolicExpressionTreeNode>();
2214        //   newNodeList.Add(fakeNode);
2215        //   name2nodes.Add(variableName, newNodeList);
2216        //   node2val[fakeNode] = Tuple.Create(val, dVal);
2217        // }
2218      }
2219
2220      internal void UpdateParamValues(double[] x) {
2221        for (int i = 0; i < x.Length; i++) {
2222          node2val[leafNodes[i]] = Tuple.Create(x[i], constantGradientVectors[i]);
2223        }
2224      }
2225    }
2226  }
2227}
Note: See TracBrowser for help on using the repository browser.