Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16785

Last change on this file since 16785 was 16785, checked in by gkronber, 5 years ago

#2925: fixed a bug in AQ evaluation

File size: 90.2 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Globalization;
26using System.Linq;
27using HeuristicLab.Analysis;
28using HeuristicLab.Collections;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
33using HeuristicLab.Optimization;
34using HeuristicLab.Parameters;
35using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
36using HeuristicLab.Problems.DataAnalysis;
37using HeuristicLab.Problems.DataAnalysis.Symbolic;
38using HeuristicLab.Problems.Instances;
39using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
40using HEAL.Attic;
41using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
42
43namespace HeuristicLab.Problems.DynamicalSystemsModelling {
44  [Item("Dynamical Systems Modelling Problem", "TODO")]
45  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
46  [StorableType("065C6A61-773A-42C9-9DE5-61A5D1D823EB")]
47  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
48    #region parameter names
49    private const string ProblemDataParameterName = "Data";
50    private const string TargetVariablesParameterName = "Target variables";
51    private const string FunctionSetParameterName = "Function set";
52    private const string MaximumLengthParameterName = "Size limit";
53    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
54    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
55    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
56    private const string TrainingEpisodesParameterName = "Training episodes";
57    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
58    private const string OdeSolverParameterName = "ODE Solver";
59    #endregion
60
61    #region Parameter Properties
62    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
63
64    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
65      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
66    }
67    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
68      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
69    }
70    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
71      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
72    }
73    public IFixedValueParameter<IntValue> MaximumLengthParameter {
74      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
75    }
76
77    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
78      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
79    }
80    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
81      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
82    }
83    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
84      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
85    }
86    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
87      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
88    }
89    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
90      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
91    }
92    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
93      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
94    }
95    #endregion
96
97    #region Properties
98    public IRegressionProblemData ProblemData {
99      get { return ProblemDataParameter.Value; }
100      set { ProblemDataParameter.Value = value; }
101    }
102    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
103
104    public ReadOnlyCheckedItemList<StringValue> TargetVariables {
105      get { return TargetVariablesParameter.Value; }
106    }
107
108    public ReadOnlyCheckedItemList<StringValue> FunctionSet {
109      get { return FunctionSetParameter.Value; }
110    }
111
112    public int MaximumLength {
113      get { return MaximumLengthParameter.Value.Value; }
114    }
115    public int MaximumParameterOptimizationIterations {
116      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
117    }
118    public int NumberOfLatentVariables {
119      get { return NumberOfLatentVariablesParameter.Value.Value; }
120    }
121    public int NumericIntegrationSteps {
122      get { return NumericIntegrationStepsParameter.Value.Value; }
123    }
124    public IEnumerable<IntRange> TrainingEpisodes {
125      get { return TrainingEpisodesParameter.Value; }
126    }
127    public bool OptimizeParametersForEpisodes {
128      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
129    }
130
131    public string OdeSolver {
132      get { return OdeSolverParameter.Value.Value; }
133      set {
134        var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
135        if (matchingValue == null) throw new ArgumentOutOfRangeException();
136        else OdeSolverParameter.Value = matchingValue;
137      }
138    }
139
140    #endregion
141
142    public event EventHandler ProblemDataChanged;
143
144    public override bool Maximization {
145      get { return false; } // we minimize NMSE
146    }
147
148    #region item cloning and persistence
149    // persistence
150    [StorableConstructor]
151    private Problem(StorableConstructorFlag _) : base(_) { }
152    [StorableHook(HookType.AfterDeserialization)]
153    private void AfterDeserialization() {
154      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
155        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
156      }
157      RegisterEventHandlers();
158    }
159
160    // cloning
161    private Problem(Problem original, Cloner cloner)
162      : base(original, cloner) {
163      RegisterEventHandlers();
164    }
165    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
166    #endregion
167
168    public Problem()
169      : base() {
170      var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
171      var functions = CreateFunctionSet();
172      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
173      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
174      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
175      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
176      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
177      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
178      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
179      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
180      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
181
182      var solversStr = new string[] { "HeuristicLab" /* , "CVODES" */};
183      var solvers = new ItemSet<StringValue>(
184        solversStr.Select(s => new StringValue(s).AsReadOnly())
185        );
186      Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
187
188      RegisterEventHandlers();
189      InitAllParameters();
190
191      // TODO: use training range as default training episode
192      // TODO: optimization of starting values for latent variables in CVODES solver
193      // TODO: allow to specify the name for the time variable in the dataset and allow variable step-sizes
194    }
195
196    public override double Evaluate(Individual individual, IRandom random) {
197      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
198
199      var problemData = ProblemData;
200      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
201      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
202      if (OptimizeParametersForEpisodes) {
203        throw new NotImplementedException();
204        int eIdx = 0;
205        double totalNMSE = 0.0;
206        int totalSize = 0;
207        foreach (var episode in TrainingEpisodes) {
208          // double[] optTheta;
209          double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
210          // individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
211          eIdx++;
212          totalNMSE += nmse * episode.Size;
213          totalSize += episode.Size;
214        }
215        return totalNMSE / totalSize;
216      } else {
217        // double[] optTheta;
218        double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
219        // individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
220        return nmse;
221      }
222    }
223
224    public static double OptimizeForEpisodes(
225      ISymbolicExpressionTree[] trees,
226      IRegressionProblemData problemData,
227      string[] targetVars,
228      string[] latentVariables,
229      IRandom random,
230      IEnumerable<IntRange> episodes,
231      int maxParameterOptIterations,
232      int numericIntegrationSteps,
233      string odeSolver) {
234
235      // extract constants from trees (without trees for latent variables)
236      var targetVariableTrees = trees.Take(targetVars.Length).ToArray();
237      var latentVariableTrees = trees.Skip(targetVars.Length).ToArray();
238      var constantNodes = targetVariableTrees.Select(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().ToArray()).ToArray();
239      var initialTheta = constantNodes.Select(nodes => nodes.Select(n => n.Value).ToArray()).ToArray();
240
241      // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t)
242      double nmse = PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxParameterOptIterations,
243        initialTheta, out double[] pretunedParameters);
244
245      // extend parameter vector to include parameters for latent variable trees
246      pretunedParameters = pretunedParameters
247        .Concat(latentVariableTrees
248        .SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().Select(n => n.Value)))
249        .ToArray();
250
251      // optimize parameters using integration of f(x,y) to calculate y(t)
252      nmse = OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,
253        out double[] optTheta);
254      // var optTheta = pretunedParameters;
255
256      if (double.IsNaN(nmse) ||
257        double.IsInfinity(nmse) ||
258        nmse > 100 * trees.Length * episodes.Sum(ep => ep.Size))
259        return 100 * trees.Length * episodes.Sum(ep => ep.Size);
260
261      // update tree nodes with optimized values
262      var paramIdx = 0;
263      for (var treeIdx = 0; treeIdx < constantNodes.Length; treeIdx++) {
264        for (int i = 0; i < constantNodes[treeIdx].Length; i++)
265          constantNodes[treeIdx][i].Value = optTheta[paramIdx++];
266      }
267      return nmse;
268    }
269
270    private static double PreTuneParameters(
271      ISymbolicExpressionTree[] trees,
272      IRegressionProblemData problemData,
273      string[] targetVars,
274      string[] latentVariables,
275      IRandom random,
276      IEnumerable<IntRange> episodes,
277      int maxParameterOptIterations,
278      double[][] initialTheta,
279      out double[] optTheta) {
280      var thetas = new List<double>();
281      double nmse = 0.0;
282      var maxTreeNmse = 100 * episodes.Sum(ep => ep.Size);
283
284      var targetTrees = trees.Take(targetVars.Length).ToArray();
285      var latentTrees = trees.Take(latentVariables.Length).ToArray();
286
287      {
288        // first calculate values of latent variables by integration
289        var inputVariables = targetVars.Concat(latentTrees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))).Except(latentVariables).Distinct();
290        var myState = new OptimizationData(latentTrees, targetVars, inputVariables.ToArray(), problemData, null, episodes.ToArray(), 10, latentVariables, "HeuristicLab");
291
292        var fi = new double[myState.rows.Length * targetVars.Length];
293        var jac = new double[myState.rows.Length * targetVars.Length, myState.nodeValueLookup.ParameterCount];
294        var latentValues = new double[myState.rows.Length, latentVariables.Length];
295        Integrate(myState, fi, jac, latentValues);
296
297        // add integrated latent variables to dataset
298        var modifiedDataset = ((Dataset)problemData.Dataset).ToModifiable();
299        foreach (var variable in latentVariables) {
300          modifiedDataset.AddVariable(variable, Enumerable.Repeat(0.0, modifiedDataset.Rows).ToList()); // empty column
301        }
302        int predIdx = 0;
303        foreach (var ep in episodes) {
304          for (int r = ep.Start; r < ep.End; r++) {
305            for (int latVarIdx = 0; latVarIdx < latentVariables.Length; latVarIdx++) {
306              modifiedDataset.SetVariableValue(latentValues[predIdx, latVarIdx], latentVariables[latVarIdx], r);
307            }
308            predIdx++;
309          }
310        }
311
312        problemData = new RegressionProblemData(modifiedDataset, problemData.AllowedInputVariables, problemData.TargetVariable);
313      }
314      // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
315      for (int treeIdx = 0; treeIdx < targetTrees.Length; treeIdx++) {
316        var t = targetTrees[treeIdx];
317
318        var targetValuesDiff = new List<double>();
319        foreach (var ep in episodes) {
320          var episodeRows = Enumerable.Range(ep.Start, ep.Size);
321          var targetValues = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], episodeRows).ToArray();
322          targetValuesDiff.AddRange(targetValues.Skip(1).Zip(targetValues, (t1, t0) => t1 - t0));// TODO: smoothing or multi-pole);
323        }
324        var adjustedEpisodes = episodes.Select(ep => new IntRange(ep.Start, ep.End - 1)); // because we lose the last row in the differencing step
325
326        // data for input variables is assumed to be known
327        // input variables in pretuning are all target variables and all variable names that occur in the tree
328        var inputVariables = targetVars.Concat(t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName)).Distinct();
329
330        var myState = new OptimizationData(new[] { t },
331          targetVars,
332          inputVariables.ToArray(),
333          problemData, new[] { targetValuesDiff.ToArray() }, adjustedEpisodes.ToArray(), -99, latentVariables, string.Empty); // TODO
334        var paramCount = myState.nodeValueLookup.ParameterCount;
335
336        optTheta = new double[0];
337        if (initialTheta[treeIdx].Length > 0) {
338          try {
339            alglib.minlmstate state;
340            alglib.minlmreport report;
341            var p = new double[initialTheta[treeIdx].Length];
342            var lowerBounds = Enumerable.Repeat(-1000.0, p.Length).ToArray();
343            var upperBounds = Enumerable.Repeat(1000.0, p.Length).ToArray();
344            Array.Copy(initialTheta[treeIdx], p, p.Length);
345            alglib.minlmcreatevj(targetValuesDiff.Count, p, out state);
346            alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
347            alglib.minlmsetbc(state, lowerBounds, upperBounds);
348#if DEBUG
349            //alglib.minlmsetgradientcheck(state, 1.0e-7);
350#endif
351            alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
352
353            alglib.minlmresults(state, out optTheta, out report);
354            if (report.terminationtype < 0) {
355#if DEBUG
356              if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
357#endif
358              optTheta = initialTheta[treeIdx];
359            }
360          } catch (alglib.alglibexception) {
361            optTheta = initialTheta[treeIdx];
362          }
363        }
364        var tree_nmse = EvaluateMSE(optTheta, myState);
365        if (double.IsNaN(tree_nmse) || double.IsInfinity(tree_nmse) || tree_nmse > maxTreeNmse) {
366          nmse += maxTreeNmse;
367          thetas.AddRange(initialTheta[treeIdx]);
368        } else {
369          nmse += tree_nmse;
370          thetas.AddRange(optTheta);
371        }
372      } // foreach tree
373      optTheta = thetas.ToArray();
374
375      return nmse;
376    }
377
378
379    // similar to above but this time we integrate and optimize all parameters for all targets concurrently
380    private static double OptimizeParameters(ISymbolicExpressionTree[] trees, IRegressionProblemData problemData, string[] targetVars, string[] latentVariables,
381      IEnumerable<IntRange> episodes, int maxParameterOptIterations, double[] initialTheta, int numericIntegrationSteps, string odeSolver, out double[] optTheta) {
382      var rowsForDataExtraction = episodes.SelectMany(e => Enumerable.Range(e.Start, e.Size)).ToArray();
383      var targetValues = new double[targetVars.Length][];
384      for (int treeIdx = 0; treeIdx < targetVars.Length; treeIdx++) {
385        var t = trees[treeIdx];
386
387        targetValues[treeIdx] = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], rowsForDataExtraction).ToArray();
388      }
389
390      // data for input variables is assumed to be known
391      // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
392      var inputVariables = trees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
393        .Except(targetVars)
394        .Except(latentVariables)
395        .Distinct();
396
397      var myState = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver);
398      optTheta = initialTheta;
399
400      if (initialTheta.Length > 0) {
401        var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray();
402        var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray();
403        try {
404          alglib.minlmstate state;
405          alglib.minlmreport report;
406          alglib.minlmcreatevj(rowsForDataExtraction.Length * trees.Length, initialTheta, out state);
407          alglib.minlmsetbc(state, lowerBounds, upperBounds);
408          alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
409#if DEBUG         
410          //alglib.minlmsetgradientcheck(state, 1.0e-7);
411#endif
412          alglib.minlmoptimize(state, IntegrateAndEvaluateObjectiveVector, IntegrateAndEvaluateObjectiveVectorAndJacobian, null, myState);
413
414          alglib.minlmresults(state, out optTheta, out report);
415
416          if (report.terminationtype < 0) {
417#if DEBUG
418            if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
419#endif            // there was a problem: reset theta and evaluate for inital values
420            optTheta = initialTheta;
421          }
422        } catch (alglib.alglibexception) {
423          optTheta = initialTheta;
424        }
425      }
426      var nmse = EvaluateIntegratedMSE(optTheta, myState);
427      var maxNmse = 100 * targetValues.Length * rowsForDataExtraction.Length;
428      if (double.IsNaN(nmse) || double.IsInfinity(nmse) || nmse > maxNmse) nmse = maxNmse;
429      return nmse;
430    }
431
432
433    // helper
434    public static double EvaluateMSE(double[] x, OptimizationData optimizationData) {
435      var fi = new double[optimizationData.rows.Count()];
436      EvaluateObjectiveVector(x, fi, optimizationData);
437      return fi.Sum(fii => fii * fii) / fi.Length;
438    }
439    public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
440    public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
441      var rows = optimizationData.rows;
442      var problemData = optimizationData.problemData;
443      var nodeValueLookup = optimizationData.nodeValueLookup;
444      var ds = problemData.Dataset;
445      var variables = optimizationData.variables;
446
447      nodeValueLookup.UpdateParamValues(x);
448
449      int outputIdx = 0;
450      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
451        // update variable values
452        foreach (var variable in variables) {
453          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
454          if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
455            nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
456          } else {
457            nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
458          }
459        }
460        // interpret all trees
461        for (int treeIdx = 0; treeIdx < optimizationData.trees.Length; treeIdx++) {
462          var tree = optimizationData.trees[treeIdx];
463          var pred = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup);
464          var y = optimizationData.targetValues[treeIdx][trainIdx];
465          fi[outputIdx++] = (y - pred) * optimizationData.inverseStandardDeviation[treeIdx];
466        }
467      }
468    }
469
470    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
471    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
472      // extract variable values from dataset
473      var variableValues = new Dictionary<string, Tuple<double, Vector>>();
474      var problemData = optimizationData.problemData;
475      var ds = problemData.Dataset;
476      var rows = optimizationData.rows;
477      var variables = optimizationData.variables;
478
479      var nodeValueLookup = optimizationData.nodeValueLookup;
480      nodeValueLookup.UpdateParamValues(x);
481
482      int termIdx = 0;
483
484      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
485        // update variable values
486        foreach (var variable in variables) {
487          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
488          if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
489            nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
490          } else {
491            nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
492          }
493        }
494
495        var calculatedVariables = optimizationData.targetVariables;
496
497        var trees = optimizationData.trees;
498        for (int i = 0; i < trees.Length; i++) {
499          var tree = trees[i];
500          var targetVarName = calculatedVariables[i];
501
502          double f; Vector g;
503          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup, out f, out g);
504
505          var y = optimizationData.targetValues[i][trainIdx];
506          fi[termIdx] = (y - f) * optimizationData.inverseStandardDeviation[i]; // scale of NMSE
507          if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[termIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i];
508
509          termIdx++;
510        }
511      }
512
513    }
514
515    // helper
516    public static double EvaluateIntegratedMSE(double[] x, OptimizationData optimizationData) {
517      var fi = new double[optimizationData.rows.Count() * optimizationData.targetVariables.Length];
518      IntegrateAndEvaluateObjectiveVector(x, fi, optimizationData);
519      return fi.Sum(fii => fii * fii) / fi.Length;
520    }
521    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { IntegrateAndEvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
522    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
523      IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData);
524    }
525
526    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
527    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
528      var rows = optimizationData.rows.ToArray();
529      var problemData = optimizationData.problemData;
530      var nodeValueLookup = optimizationData.nodeValueLookup;
531      var ds = problemData.Dataset;
532      int outputIdx = 0;
533
534      nodeValueLookup.UpdateParamValues(x);
535
536      Integrate(optimizationData, fi, jac, null);
537      var trees = optimizationData.trees;
538
539      // update result with error
540      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
541        for (int i = 0; i < optimizationData.targetVariables.Length; i++) {
542          var tree = trees[i];
543          var y = optimizationData.targetValues[i][trainIdx];
544          fi[outputIdx] = (y - fi[outputIdx]) * optimizationData.inverseStandardDeviation[i];  // scale for normalized squared error
545          if (jac != null) for (int j = 0; j < x.Length; j++) jac[outputIdx, j] = -jac[outputIdx, j] * optimizationData.inverseStandardDeviation[i];
546          outputIdx++;
547        }
548      }
549    }
550
551    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
552      base.Analyze(individuals, qualities, results, random);
553
554      if (!results.ContainsKey("Prediction (training)")) {
555        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
556      }
557      if (!results.ContainsKey("Prediction (test)")) {
558        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
559      }
560      if (!results.ContainsKey("Models")) {
561        results.Add(new Result("Models", typeof(VariableCollection)));
562      }
563      if (!results.ContainsKey("SNMSE")) {
564        results.Add(new Result("SNMSE", typeof(DoubleValue)));
565      }
566      if (!results.ContainsKey("Solution")) {
567        results.Add(new Result("Solution", typeof(Solution)));
568      }
569      if (!results.ContainsKey("Squared error and gradient")) {
570        results.Add(new Result("Squared error and gradient", typeof(DataTable)));
571      }
572
573      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
574      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
575
576      results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
577
578      var problemData = ProblemData;
579      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
580      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
581
582      var trainingList = new ItemList<DataTable>();
583
584      if (OptimizeParametersForEpisodes) {
585        throw new NotSupportedException();
586        var eIdx = 0;
587        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
588        foreach (var episode in TrainingEpisodes) {
589          var episodes = new[] { episode };
590          var optimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver);
591          var trainingPrediction = Integrate(optimizationData).ToArray();
592          trainingPredictions.Add(trainingPrediction);
593          eIdx++;
594        }
595
596        // only for target values
597        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
598        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
599          var targetVar = targetVars[colIdx];
600          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
601          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
602          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
603          trainingDataTable.Rows.Add(actualValuesRow);
604          trainingDataTable.Rows.Add(predictedValuesRow);
605          trainingList.Add(trainingDataTable);
606        }
607        results["Prediction (training)"].Value = trainingList.AsReadOnly();
608
609
610        var models = new VariableCollection();
611
612        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
613          var targetVarName = tup.Item1;
614          var tree = tup.Item2;
615
616          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
617          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
618          models.Add(origTreeVar);
619        }
620        results["Models"].Value = models;
621      } else {
622        // data for input variables is assumed to be known
623        // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
624        var inputVariables = trees
625          .SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
626          .Except(targetVars)
627          .Except(latentVariables)
628          .Distinct();
629
630        var optimizationData = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);
631        var numParams = optimizationData.nodeValueLookup.ParameterCount;
632
633        var fi = new double[optimizationData.rows.Length * targetVars.Length];
634        var jac = new double[optimizationData.rows.Length * targetVars.Length, numParams];
635        var latentValues = new double[optimizationData.rows.Length, latentVariables.Length];
636        Integrate(optimizationData, fi, jac, latentValues);
637
638
639        // for target values and latent variables
640        var trainingRows = optimizationData.rows;
641        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
642          // is target variable
643          if (colIdx < targetVars.Length) {
644            var targetVar = targetVars[colIdx];
645            var trainingDataTable = new DataTable(targetVar + " prediction (training)");
646            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
647            var idx = Enumerable.Range(0, trainingRows.Length).Select(i => i * targetVars.Length + colIdx);
648            var pred = idx.Select(i => fi[i]);
649            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, pred.ToArray());
650            trainingDataTable.Rows.Add(actualValuesRow);
651            trainingDataTable.Rows.Add(predictedValuesRow);
652
653            for (int paramIdx = 0; paramIdx < numParams; paramIdx++) {
654              var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", idx.Select(i => jac[i, paramIdx]).ToArray());
655              paramSensitivityRow.VisualProperties.SecondYAxis = true;
656              trainingDataTable.Rows.Add(paramSensitivityRow);
657            }
658            trainingList.Add(trainingDataTable);
659          } else {
660            var latentVar = latentVariables[colIdx - targetVars.Length];
661            var trainingDataTable = new DataTable(latentVar + " prediction (training)");
662            var idx = Enumerable.Range(0, trainingRows.Length);
663            var pred = idx.Select(i => latentValues[i, colIdx - targetVars.Length]);
664            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, pred.ToArray());
665            var emptyRow = new DataRow(latentVar);
666            trainingDataTable.Rows.Add(emptyRow);
667            trainingDataTable.Rows.Add(predictedValuesRow);
668            trainingList.Add(trainingDataTable);
669          }
670        }
671
672        var errorTable = new DataTable("Squared error and gradient");
673        var seRow = new DataRow("Squared error");
674        var gradientRows = Enumerable.Range(0, numParams).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
675        errorTable.Rows.Add(seRow);
676        foreach (var gRow in gradientRows) {
677          gRow.VisualProperties.SecondYAxis = true;
678          errorTable.Rows.Add(gRow);
679        }
680        var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
681        int r = 0;
682
683        // foreach (var y_pred in trainingPrediction) {
684        //   // calculate objective function gradient
685        //   double f_i = 0.0;
686        //   Vector g_i = Vector.CreateNew(new double[numParams]);
687        //   for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
688        //     var y_pred_f = y_pred[colIdx].Item1;
689        //     var y = targetValues[colIdx][r];
690        //
691        //     var res = (y - y_pred_f) * optimizationData.inverseStandardDeviation[colIdx];
692        //     var ressq = res * res;
693        //     f_i += ressq;
694        //     g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res));
695        //   }
696        //   seRow.Values.Add(f_i);
697        //   for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
698        //   r++;
699        // }
700        // results["Squared error and gradient"].Value = errorTable;
701
702        // TODO: DRY for training and test
703        var testList = new ItemList<DataTable>();
704        var testRows = ProblemData.TestIndices.ToArray();
705        var testOptimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver);
706        var testPrediction = Integrate(testOptimizationData).ToArray();
707
708        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
709          // is target variable
710          if (colIdx < targetVars.Length) {
711            var targetVar = targetVars[colIdx];
712            var testDataTable = new DataTable(targetVar + " prediction (test)");
713            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
714            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
715            testDataTable.Rows.Add(actualValuesRow);
716            testDataTable.Rows.Add(predictedValuesRow);
717            testList.Add(testDataTable);
718
719          } else {
720            // var latentVar = latentVariables[colIdx - targetVars.Length];
721            // var testDataTable = new DataTable(latentVar + " prediction (test)");
722            // var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
723            // var emptyRow = new DataRow(latentVar);
724            // testDataTable.Rows.Add(emptyRow);
725            // testDataTable.Rows.Add(predictedValuesRow);
726            // testList.Add(testDataTable);
727          }
728        }
729
730        results["Prediction (training)"].Value = trainingList.AsReadOnly();
731        results["Prediction (test)"].Value = testList.AsReadOnly();
732
733
734        #region simplification of models
735        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
736        var models = new VariableCollection();    // to store target var names and original version of tree
737
738        var clonedTrees = new List<ISymbolicExpressionTree>();
739        for (int idx = 0; idx < trees.Length; idx++) {
740          clonedTrees.Add((ISymbolicExpressionTree)trees[idx].Clone());
741        }
742        var ds = problemData.Dataset;
743        var newProblemData = new RegressionProblemData((IDataset)ds.Clone(), problemData.AllowedInputVariables, problemData.TargetVariable);
744        results["Solution"].Value = new Solution(clonedTrees.ToArray(),
745                   // optTheta,
746                   newProblemData,
747                   targetVars,
748                   latentVariables,
749                   TrainingEpisodes,
750                   OdeSolver,
751                   NumericIntegrationSteps);
752
753
754        for (int idx = 0; idx < trees.Length; idx++) {
755          var varName = string.Empty;
756          if (idx < targetVars.Length) {
757            varName = targetVars[idx];
758          } else {
759            varName = latentVariables[idx - targetVars.Length];
760          }
761          var tree = trees[idx];
762
763          var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
764          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
765          models.Add(origTreeVar);
766          var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
767          simplifiedTreeVar.Value = TreeSimplifier.Simplify(tree);
768          models.Add(simplifiedTreeVar);
769        }
770
771        results["Models"].Value = models;
772        #endregion
773
774        #region produce classical solutions to allow visualization with PDP
775        for (int treeIdx = 0; treeIdx < trees.Length; treeIdx++) {
776          var t = (ISymbolicExpressionTree)trees[treeIdx].Clone();
777          var name = targetVars.Concat(latentVariables).ElementAt(treeIdx); // whatever
778          var model = new SymbolicRegressionModel(name + "_diff", t, new SymbolicDataAnalysisExpressionTreeLinearInterpreter());
779          var solutionDataset = ((Dataset)problemData.Dataset).ToModifiable();
780          if (treeIdx < targetVars.Length) {
781            var absValues = solutionDataset.GetDoubleValues(name).ToArray();
782            solutionDataset.AddVariable(name + "_diff", absValues.Skip(1).Zip(absValues, (v1, v0) => v1 - v0).Concat(new double[] { 0.0 }).ToList());
783          }
784          var solutionProblemData = new RegressionProblemData(solutionDataset, problemData.AllowedInputVariables, name + "_diff");
785          var solution = model.CreateRegressionSolution(solutionProblemData);
786          results.AddOrUpdateResult("Solution " + name, solution);
787        }
788        #endregion
789      }
790    }
791
792    #region interpretation
793
794    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
795    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
796
797    // a comparison of three potential calculation methods for the gradient is given in:
798    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
799    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
800    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
801
802    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
803
804    public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData) {
805      var nTargets = optimizationData.targetVariables.Length;
806      var n = optimizationData.rows.Length * optimizationData.targetVariables.Length;
807      var d = optimizationData.nodeValueLookup.ParameterCount;
808      double[] fi = new double[n];
809      double[,] jac = new double[n, d];
810      Integrate(optimizationData, fi, jac, null);
811      for (int i = 0; i < optimizationData.rows.Length; i++) {
812        var res = new Tuple<double, Vector>[nTargets];
813        for (int j = 0; j < nTargets; j++) {
814          res[j] = Tuple.Create(fi[i * nTargets + j], Vector.CreateFromMatrixRow(jac, i * nTargets + j));
815        }
816        yield return res;
817      }
818    }
819
820    public static void Integrate(OptimizationData optimizationData, double[] fi, double[,] jac, double[,] latentValues) {
821      var trees = optimizationData.trees;
822      var dataset = optimizationData.problemData.Dataset;
823      var inputVariables = optimizationData.variables;
824      var targetVariables = optimizationData.targetVariables;
825      var latentVariables = optimizationData.latentVariables;
826      var episodes = optimizationData.episodes;
827      var odeSolver = optimizationData.odeSolver;
828      var numericIntegrationSteps = optimizationData.numericIntegrationSteps;
829      var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
830
831
832
833      var nodeValues = optimizationData.nodeValueLookup;
834
835      // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
836      var outputRowIdx = 0;
837      var episodeIdx = 0;
838      foreach (var episode in optimizationData.episodes) {
839        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start).ToArray();
840
841        var t0 = rows.First();
842
843        // initialize values for inputs and targets from dataset
844        foreach (var varName in inputVariables) {
845          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
846          if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
847            nodeValues.SetVariableValue(varName, value, Vector.Zero);
848          } else {
849            var y0 = dataset.GetDoubleValue(varName, t0);
850            nodeValues.SetVariableValue(varName, y0, Vector.Zero);
851          }
852        }
853        foreach (var varName in targetVariables) {
854          var y0 = dataset.GetDoubleValue(varName, t0);
855          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
856
857          // output starting value
858          fi[outputRowIdx] = y0;
859          Vector.Zero.CopyTo(jac, outputRowIdx);
860
861          outputRowIdx++;
862        }
863
864        var latentValueRowIdx = 0;
865        var latentValueColIdx = 0;
866        foreach (var varName in latentVariables) {
867          var y0 = 0.0; // assume we start at zero
868          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
869
870          if (latentValues != null) {
871            latentValues[latentValueRowIdx, latentValueColIdx++] = y0;
872          }
873        }
874        latentValueColIdx = 0; latentValueRowIdx++;
875
876        { // CODE BELOW DOESN'T WORK ANYMORE
877          // if (latentVariables.Length > 0) throw new NotImplementedException();
878          //
879          // // add value entries for latent variables which are also integrated
880          // // initial values are at the end of the parameter vector
881          // // separate initial values for each episode
882          // var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
883          // foreach (var latentVar in latentVariables) {
884          //   var arr = new double[parameterValues.Length]; // backing array
885          //   arr[initialValueIdx] = 1.0;
886          //   var g = new Vector(arr);
887          //   nodeValues.SetVariableValue(latentVar, parameterValues[initialValueIdx], g); // we don't have observations for latent variables therefore we optimize the initial value for each episode
888          //   initialValueIdx++;
889          // }
890        }
891
892        var prevT = t0; // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
893        foreach (var t in rows.Skip(1)) {
894          if (odeSolver == "HeuristicLab")
895            IntegrateHL(trees, calculatedVariables, nodeValues, numericIntegrationSteps); // integrator updates nodeValues
896          else if (odeSolver == "CVODES")
897            throw new NotImplementedException();
898          // IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT);
899          else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
900          prevT = t;
901
902          // update output for target variables (TODO: if we want to visualize the latent variables then we need to provide a separate output)
903          for (int i = 0; i < targetVariables.Length; i++) {
904            var targetVar = targetVariables[i];
905            var yt = nodeValues.GetVariableValue(targetVar);
906
907            // fill up remaining rows with last valid value if there are invalid values
908            if (double.IsNaN(yt.Item1) || double.IsInfinity(yt.Item1)) {
909              for (; outputRowIdx < fi.Length; outputRowIdx++) {
910                var prevIdx = outputRowIdx - targetVariables.Length;
911                fi[outputRowIdx] = fi[prevIdx]; // current <- prev
912                if (jac != null) for (int j = 0; j < jac.GetLength(1); j++) jac[outputRowIdx, j] = jac[prevIdx, j];
913              }
914              return;
915            };
916
917            fi[outputRowIdx] = yt.Item1;
918            var g = yt.Item2;
919            g.CopyTo(jac, outputRowIdx);
920            outputRowIdx++;
921          }
922          if (latentValues != null) {
923            foreach (var latentVariable in latentVariables) {
924              var lt = nodeValues.GetVariableValue(latentVariable).Item1;
925              latentValues[latentValueRowIdx, latentValueColIdx++] = lt;
926            }
927            latentValueRowIdx++; latentValueColIdx = 0;
928          }
929
930          // update for next time step (only the inputs)
931          foreach (var varName in inputVariables) {
932            // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
933            if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
934              // value is unchanged
935            } else {
936              nodeValues.SetVariableValue(varName, dataset.GetDoubleValue(varName, t), Vector.Zero);
937            }
938          }
939        }
940        episodeIdx++;
941      }
942    }
943
944    #region CVODES
945
946    /*
947    /// <summary>
948    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
949    /// </summary>
950    /// <param name="trees">Each equation in the ODE represented as a tree</param>
951    /// <param name="calculatedVariables">The names of the calculated variables</param>
952    /// <param name="variableValues">The start values of the calculated variables as well as their sensitivites over parameters</param>
953    /// <param name="parameterValues">The current parameter values</param>
954    /// <param name="t">The time t up to which we need to integrate.</param>
955    private static void IntegrateCVODES(
956      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
957      string[] calculatedVariables, // names of elements of y
958      Dictionary<string, Tuple<double, Vector>> variableValues,  //  y (intput and output) input: y(t0), output: y(t0+t)
959      double[] parameterValues, // p
960      double t // duration t for which we want to integrate
961      ) {
962
963      // the RHS of the ODE
964      // dy/dt = f(y_t,x_t,p)
965      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, parameterValues);
966      // the Jacobian ∂f/∂y
967      CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, parameterValues);
968
969      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
970      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, parameterValues);
971
972      // setup solver
973      int numberOfEquations = trees.Length;
974      IntPtr y = IntPtr.Zero;
975      IntPtr cvode_mem = IntPtr.Zero;
976      IntPtr A = IntPtr.Zero;
977      IntPtr yS0 = IntPtr.Zero;
978      IntPtr linearSolver = IntPtr.Zero;
979      var ns = parameterValues.Length; // number of parameters
980
981      try {
982        y = CVODES.N_VNew_Serial(numberOfEquations);
983        // init y to current values of variables
984        // y must be initialized before calling CVodeInit
985        for (int i = 0; i < calculatedVariables.Length; i++) {
986          CVODES.NV_Set_Ith_S(y, i, variableValues[calculatedVariables[i]].Item1);
987        }
988
989        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
990
991        var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
992        Assert(CVODES.CV_SUCCESS == flag);
993
994        double relTol = 1.0e-2;
995        double absTol = 1.0;
996        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
997        Assert(CVODES.CV_SUCCESS == flag);
998
999        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
1000        Assert(A != IntPtr.Zero);
1001
1002        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
1003        Assert(linearSolver != IntPtr.Zero);
1004
1005        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
1006        Assert(CVODES.CV_SUCCESS == flag);
1007
1008        flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
1009        Assert(CVODES.CV_SUCCESS == flag);
1010
1011        yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
1012        unsafe {
1013          // set to initial sensitivities supplied by caller
1014          for (int pIdx = 0; pIdx < ns; pIdx++) {
1015            var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
1016            for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1017              CVODES.NV_Set_Ith_S(yS0_i, varIdx, variableValues[calculatedVariables[varIdx]].Item2[pIdx]);
1018            }
1019          }
1020        }
1021
1022        flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
1023        Assert(CVODES.CV_SUCCESS == flag);
1024
1025        flag = CVODES.CVodeSensEEtolerances(cvode_mem);
1026        Assert(CVODES.CV_SUCCESS == flag);
1027
1028        // make one forward integration step
1029        double tout = 0.0; // first output time
1030        flag = CVODES.CVode(cvode_mem, t, y, ref tout, CVODES.CV_NORMAL);
1031        if (flag == CVODES.CV_SUCCESS) {
1032          Assert(t == tout);
1033
1034          // get sensitivities
1035          flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
1036          Assert(CVODES.CV_SUCCESS == flag);
1037
1038          // update variableValues based on integration results
1039          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1040            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
1041            var gArr = new double[parameterValues.Length];
1042            for (var pIdx = 0; pIdx < parameterValues.Length; pIdx++) {
1043              unsafe {
1044                var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
1045                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
1046              }
1047            }
1048            variableValues[calculatedVariables[varIdx]] = Tuple.Create(yi, new Vector(gArr));
1049          }
1050        } else {
1051          variableValues.Clear();   // indicate problems by not returning new values
1052        }
1053
1054        // cleanup all allocated objects
1055      } finally {
1056        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
1057        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
1058        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
1059        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
1060        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
1061      }
1062    }
1063
1064
1065    private static CVODES.CVRhsFunc CreateOdeRhs(
1066      ISymbolicExpressionTree[] trees,
1067      string[] calculatedVariables,
1068      double[] parameterValues) {
1069      // we don't need to calculate a gradient here
1070      return (double t,
1071              IntPtr y, // N_Vector, current value of y (input)
1072              IntPtr ydot, // N_Vector (calculated value of y' (output)
1073              IntPtr user_data // optional user data, (unused here)
1074              ) => {
1075                // TODO: perf
1076                var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1077
1078                int pIdx = 0;
1079                foreach (var tree in trees) {
1080                  foreach (var n in tree.IterateNodesPrefix()) {
1081                    if (IsConstantNode(n)) {
1082                      nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we do not need a gradient
1083                      pIdx++;
1084                    } else if (n.SubtreeCount == 0) {
1085                      // for variables and latent variables get the value from variableValues
1086                      var varName = n.Symbol.Name;
1087                      var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
1088                      if (varIdx < 0) throw new InvalidProgramException();
1089                      var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1090                      nodeValues.Add(n, Tuple.Create(y_i, Vector.Zero)); // no gradient needed
1091                    }
1092                  }
1093                }
1094                for (int i = 0; i < trees.Length; i++) {
1095                  var tree = trees[i];
1096                  var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1097                  CVODES.NV_Set_Ith_S(ydot, i, res_i.Item1);
1098                }
1099                return 0;
1100              };
1101    }
1102
1103    private static CVODES.CVDlsJacFunc CreateJac(
1104      ISymbolicExpressionTree[] trees,
1105      string[] calculatedVariables,
1106      double[] parameterValues) {
1107
1108      return (
1109        double t, // current time (input)
1110        IntPtr y, // N_Vector, current value of y (input)
1111        IntPtr fy, // N_Vector, current value of f (input)
1112        IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
1113        IntPtr user_data, // optional (unused here)
1114        IntPtr tmp1, // N_Vector, optional (unused here)
1115        IntPtr tmp2, // N_Vector, optional (unused here)
1116        IntPtr tmp3 // N_Vector, optional (unused here)
1117      ) => {
1118        // here we need to calculate partial derivatives for the calculated variables y
1119        var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1120        int pIdx = 0;
1121        foreach (var tree in trees) {
1122          foreach (var n in tree.IterateNodesPrefix()) {
1123            if (IsConstantNode(n)) {
1124              nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
1125              pIdx++;
1126            } else if (n.SubtreeCount == 0) {
1127              // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1128              var varName = n.Symbol.Name;
1129              var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
1130              if (varIdx < 0) throw new InvalidProgramException();
1131
1132              var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1133              var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
1134              gArr[varIdx] = 1.0;
1135              var g = new Vector(gArr);
1136              nodeValues.Add(n, Tuple.Create(y_i, g));
1137            }
1138          }
1139        }
1140
1141        for (int i = 0; i < trees.Length; i++) {
1142          var tree = trees[i];
1143          var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1144          var g = res.Item2;
1145          for (int j = 0; j < calculatedVariables.Length; j++) {
1146            CVODES.SUNDenseMatrix_Set(Jac, i, j, g[j]);
1147          }
1148        }
1149        return 0; // on success
1150      };
1151    }
1152
1153
1154    // to calculate sensitivities RHS for all equations at once
1155    // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
1156    // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
1157    private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, double[] parameterValues) {
1158      return (
1159              int Ns, // number of parameters
1160              double t, // current time
1161              IntPtr y, // N_Vector y(t) (input)
1162              IntPtr ydot, // N_Vector dy/dt(t) (input)
1163              IntPtr yS, // N_Vector*, one vector for each parameter (input)
1164              IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
1165              IntPtr user_data, // optional (unused here)
1166              IntPtr tmp1, // N_Vector, optional (unused here)
1167              IntPtr tmp2 // N_Vector, optional (unused here)
1168        ) => {
1169          // here we need to calculate partial derivatives for the calculated variables y as well as for the parameters
1170          var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1171          var d = calculatedVariables.Length + parameterValues.Length; // dimensionality of gradient
1172          // first collect variable values
1173          foreach (var tree in trees) {
1174            foreach (var n in tree.IterateNodesPrefix()) {
1175              if (IsVariableNode(n)) {
1176                // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1177                var varName = n.Symbol.Name;
1178                var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
1179                if (varIdx < 0) throw new InvalidProgramException();
1180
1181                var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1182                var gArr = new double[d]; // backing array
1183                gArr[varIdx] = 1.0;
1184                var g = new Vector(gArr);
1185                nodeValues.Add(n, Tuple.Create(y_i, g));
1186              }
1187            }
1188          }
1189          // then collect constants
1190          int pIdx = 0;
1191          foreach (var tree in trees) {
1192            foreach (var n in tree.IterateNodesPrefix()) {
1193              if (IsConstantNode(n)) {
1194                var gArr = new double[d];
1195                gArr[calculatedVariables.Length + pIdx] = 1.0;
1196                var g = new Vector(gArr);
1197                nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], g));
1198                pIdx++;
1199              }
1200            }
1201          }
1202          // gradient vector is [∂f/∂y_1, ∂f/∂y_2, ... ∂f/∂yN, ∂f/∂p_1 ... ∂f/∂p_K]
1203
1204
1205          for (pIdx = 0; pIdx < Ns; pIdx++) {
1206            unsafe {
1207              var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1208              CVODES.N_VConst_Serial(0.0, sDot_pi);
1209            }
1210          }
1211
1212          for (int i = 0; i < trees.Length; i++) {
1213            var tree = trees[i];
1214            var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1215            var g = res.Item2;
1216
1217
1218            // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
1219
1220            for (pIdx = 0; pIdx < Ns; pIdx++) {
1221              unsafe {
1222                var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1223                var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1224
1225                var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
1226                // (∂f/∂y)s_i(t)
1227                var p = 0.0;
1228                for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
1229                  p += g[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
1230                }
1231                // + ∂f/∂p_i
1232                CVODES.NV_Set_Ith_S(sDot_pi, i, v + p + g[calculatedVariables.Length + pIdx]);
1233              }
1234            }
1235
1236          }
1237          return 0; // on success
1238        };
1239    }
1240    */
1241    #endregion
1242
1243    private static void IntegrateHL(
1244      ISymbolicExpressionTree[] trees,
1245      string[] calculatedVariables, // names of integrated variables
1246      NodeValueLookup nodeValues,
1247      int numericIntegrationSteps) {
1248
1249
1250      double[] deltaF = new double[calculatedVariables.Length];
1251      Vector[] deltaG = new Vector[calculatedVariables.Length];
1252
1253      double h = 1.0 / numericIntegrationSteps;
1254      for (int step = 0; step < numericIntegrationSteps; step++) {
1255
1256        // evaluate all trees
1257        for (int i = 0; i < trees.Length; i++) {
1258          var tree = trees[i];
1259
1260          // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
1261          double f; Vector g;
1262          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
1263          deltaF[i] = f;
1264          deltaG[i] = g;
1265        }
1266
1267        // update variableValues for next step, trapezoid integration
1268        for (int i = 0; i < trees.Length; i++) {
1269          var varName = calculatedVariables[i];
1270          var oldVal = nodeValues.GetVariableValue(varName);
1271          nodeValues.SetVariableValue(varName, oldVal.Item1 + h * deltaF[i], oldVal.Item2.Add(deltaG[i].Scale(h)));
1272        }
1273      }
1274    }
1275
1276    // TODO: use an existing interpreter implementation instead
1277    private static double InterpretRec(ISymbolicExpressionTreeNode node, NodeValueLookup nodeValues) {
1278      if (node is ConstantTreeNode) {
1279        return ((ConstantTreeNode)node).Value;
1280      } else if (node is VariableTreeNode) {
1281        return nodeValues.NodeValue(node);
1282      } else if (node.Symbol is Addition) {
1283        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1284        for (int i = 1; i < node.SubtreeCount; i++) {
1285          f += InterpretRec(node.GetSubtree(i), nodeValues);
1286        }
1287        return f;
1288      } else if (node.Symbol is Multiplication) {
1289        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1290        for (int i = 1; i < node.SubtreeCount; i++) {
1291          f *= InterpretRec(node.GetSubtree(i), nodeValues);
1292        }
1293        return f;
1294      } else if (node.Symbol is Subtraction) {
1295        if (node.SubtreeCount == 1) {
1296          return -InterpretRec(node.GetSubtree(0), nodeValues);
1297        } else {
1298          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1299          for (int i = 1; i < node.SubtreeCount; i++) {
1300            f -= InterpretRec(node.GetSubtree(i), nodeValues);
1301          }
1302          return f;
1303        }
1304      } else if (node.Symbol is Division) {
1305        if (node.SubtreeCount == 1) {
1306          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1307          // protected division
1308          if (f.IsAlmost(0.0)) {
1309            return 0;
1310          } else {
1311            return 1.0 / f;
1312          }
1313        } else {
1314          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1315          for (int i = 1; i < node.SubtreeCount; i++) {
1316            var g = InterpretRec(node.GetSubtree(i), nodeValues);
1317            // protected division
1318            if (g.IsAlmost(0.0)) {
1319              return 0;
1320            } else {
1321              f /= g;
1322            }
1323          }
1324          return f;
1325        }
1326      } else if (node.Symbol is Sine) {
1327        Assert(node.SubtreeCount == 1);
1328
1329        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1330        return Math.Sin(f);
1331      } else if (node.Symbol is Cosine) {
1332        Assert(node.SubtreeCount == 1);
1333
1334        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1335        return Math.Cos(f);
1336      } else if (node.Symbol is Square) {
1337        Assert(node.SubtreeCount == 1);
1338
1339        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1340        return f * f;
1341      } else if (node.Symbol is Exponential) {
1342        Assert(node.SubtreeCount == 1);
1343
1344        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1345        return Math.Exp(f);
1346      } else if (node.Symbol is Logarithm) {
1347        Assert(node.SubtreeCount == 1);
1348
1349        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1350        return Math.Log(f);
1351      } else if (node.Symbol is HyperbolicTangent) {
1352        Assert(node.SubtreeCount == 1);
1353
1354        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1355        return Math.Tanh(f);
1356      } else if (node.Symbol is AnalyticQuotient) {
1357        Assert(node.SubtreeCount == 2);
1358
1359        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1360        var g = InterpretRec(node.GetSubtree(1), nodeValues);
1361        return f / Math.Sqrt(1 + g * g);
1362      } else throw new NotSupportedException("unsupported symbol");
1363    }
1364
1365    private static void Assert(bool cond) {
1366#if DEBUG
1367      if (!cond) throw new InvalidOperationException("Assertion failed");
1368#endif
1369    }
1370
1371    private static void InterpretRec(
1372      ISymbolicExpressionTreeNode node,
1373       NodeValueLookup nodeValues,      // contains value and gradient vector for a node (variables and constants only)
1374      out double z,
1375      out Vector dz
1376      ) {
1377      double f, g;
1378      Vector df, dg;
1379      if (node.Symbol is Constant || node.Symbol is Variable) {
1380        z = nodeValues.NodeValue(node);
1381        dz = Vector.CreateNew(nodeValues.NodeGradient(node)); // original gradient vectors are never changed by evaluation
1382      } else if (node.Symbol is Addition) {
1383        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1384        for (int i = 1; i < node.SubtreeCount; i++) {
1385          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1386          f = f + g;
1387          df = df.Add(dg);
1388        }
1389        z = f;
1390        dz = df;
1391      } else if (node.Symbol is Multiplication) {
1392        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1393        for (int i = 1; i < node.SubtreeCount; i++) {
1394          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1395          f = f * g;
1396          df = df.Scale(g).Add(dg.Scale(f));  // f'*g + f*g'
1397        }
1398        z = f;
1399        dz = df;
1400      } else if (node.Symbol is Subtraction) {
1401        if (node.SubtreeCount == 1) {
1402          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1403          z = -f;
1404          dz = df.Scale(-1.0);
1405        } else {
1406          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1407          for (int i = 1; i < node.SubtreeCount; i++) {
1408            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1409            f = f - g;
1410            df = df.Subtract(dg);
1411          }
1412          z = f;
1413          dz = df;
1414        }
1415      } else if (node.Symbol is Division) {
1416        if (node.SubtreeCount == 1) {
1417          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1418          // protected division
1419          if (f.IsAlmost(0.0)) {
1420            z = 0;
1421            dz = Vector.Zero;
1422          } else {
1423            z = 1.0 / f;
1424            dz = df.Scale(-1 * z * z);
1425          }
1426        } else {
1427          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1428          for (int i = 1; i < node.SubtreeCount; i++) {
1429            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1430            // protected division
1431            if (g.IsAlmost(0.0)) {
1432              z = 0;
1433              dz = Vector.Zero;
1434              return;
1435            } else {
1436              var inv_g = 1.0 / g;
1437              f = f * inv_g;
1438              df = dg.Scale(-f * inv_g * inv_g).Add(df.Scale(inv_g));
1439            }
1440          }
1441          z = f;
1442          dz = df;
1443        }
1444      } else if (node.Symbol is Sine) {
1445        Assert(node.SubtreeCount == 1);
1446        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1447        z = Math.Sin(f);
1448        dz = df.Scale(Math.Cos(f));
1449      } else if (node.Symbol is Cosine) {
1450        Assert(node.SubtreeCount == 1);
1451        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1452        z = Math.Cos(f);
1453        dz = df.Scale(-Math.Sin(f));
1454      } else if (node.Symbol is Square) {
1455        Assert(node.SubtreeCount == 1);
1456        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1457        z = f * f;
1458        dz = df.Scale(2.0 * f);
1459      } else if (node.Symbol is Exponential) {
1460        Assert(node.SubtreeCount == 1);
1461        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1462        z = Math.Exp(f);
1463        dz = df.Scale(Math.Exp(f));
1464      } else if (node.Symbol is Logarithm) {
1465        Assert(node.SubtreeCount == 1);
1466        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1467        z = Math.Log(f);
1468        dz = df.Scale(1.0 / f);
1469      } else if (node.Symbol is HyperbolicTangent) {
1470        Assert(node.SubtreeCount == 1);
1471        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1472        z = Math.Tanh(f);
1473        dz = df.Scale(1 - z * z); // tanh(f(x))' = f(x)'sech²(f(x)) = f(x)'(1 - tanh²(f(x)))
1474      } else if (node.Symbol is AnalyticQuotient) {
1475        Assert(node.SubtreeCount == 2);
1476        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1477        InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1478        z = f / Math.Sqrt(1 + g * g);
1479        var denom = 1.0 / Math.Pow(1 + g * g, 1.5);
1480        dz = df.Scale(1 + g * g).Subtract(dg.Scale(f * g)).Scale(denom);
1481      } else {
1482        throw new NotSupportedException("unsupported symbol");
1483      }
1484    }
1485
1486    #endregion
1487
1488    #region events
1489    /*
1490     * Dependencies between parameters:
1491     *
1492     * ProblemData
1493     *    |
1494     *    V
1495     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables
1496     *               |   |                 |                   |
1497     *               V   V                 |                   |
1498     *             Grammar <---------------+-------------------
1499     *                |
1500     *                V
1501     *            Encoding
1502     */
1503    private void RegisterEventHandlers() {
1504      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
1505      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
1506
1507      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
1508      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1509
1510      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
1511      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1512
1513      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
1514
1515      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
1516    }
1517
1518    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1519      UpdateGrammarAndEncoding();
1520    }
1521
1522    private void MaximumLengthChanged(object sender, EventArgs e) {
1523      UpdateGrammarAndEncoding();
1524    }
1525
1526    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1527      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1528    }
1529
1530    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1531      UpdateGrammarAndEncoding();
1532    }
1533
1534    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1535      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1536    }
1537
1538    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1539      UpdateGrammarAndEncoding();
1540    }
1541
1542    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
1543      ProblemDataParameter.Value.Changed += ProblemData_Changed;
1544      OnProblemDataChanged();
1545      OnReset();
1546    }
1547
1548    private void ProblemData_Changed(object sender, EventArgs e) {
1549      OnProblemDataChanged();
1550      OnReset();
1551    }
1552
1553    private void OnProblemDataChanged() {
1554      UpdateTargetVariables();        // implicitly updates other dependent parameters
1555      var handler = ProblemDataChanged;
1556      if (handler != null) handler(this, EventArgs.Empty);
1557    }
1558
1559    #endregion
1560
1561    #region  helper
1562
1563    private static IEnumerable<T> EveryNth<T>(IEnumerable<T> xs, int step) {
1564      var e = xs.GetEnumerator();
1565      while (e.MoveNext()) {
1566        for (int i = 0; i < step; i++) {
1567          if (!e.MoveNext()) yield break;
1568        }
1569        yield return e.Current;
1570      }
1571    }
1572
1573    private void InitAllParameters() {
1574      UpdateTargetVariables(); // implicitly updates the grammar and the encoding
1575    }
1576
1577    private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
1578      var l = new CheckedItemList<StringValue>();
1579      l.Add(new StringValue("Addition").AsReadOnly());
1580      l.Add(new StringValue("Multiplication").AsReadOnly());
1581      l.Add(new StringValue("Division").AsReadOnly());
1582      l.Add(new StringValue("Subtraction").AsReadOnly());
1583      l.Add(new StringValue("Sine").AsReadOnly());
1584      l.Add(new StringValue("Cosine").AsReadOnly());
1585      l.Add(new StringValue("Square").AsReadOnly());
1586      l.Add(new StringValue("Logarithm").AsReadOnly());
1587      l.Add(new StringValue("Exponential").AsReadOnly());
1588      l.Add(new StringValue("HyperbolicTangent").AsReadOnly());
1589      l.Add(new StringValue("AnalyticQuotient").AsReadOnly());
1590      return l.AsReadOnly();
1591    }
1592
1593    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
1594      // return n.Symbol.Name[0] == 'θ';
1595      return n is ConstantTreeNode;
1596    }
1597    private static double GetConstantValue(ISymbolicExpressionTreeNode n) {
1598      return ((ConstantTreeNode)n).Value;
1599    }
1600    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
1601      return n.Symbol.Name[0] == 'λ';
1602    }
1603    private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
1604      return (n.SubtreeCount == 0) && !IsConstantNode(n) && !IsLatentVariableNode(n);
1605    }
1606    private static string GetVariableName(ISymbolicExpressionTreeNode n) {
1607      return ((VariableTreeNode)n).VariableName;
1608    }
1609
1610    private void UpdateTargetVariables() {
1611      var currentlySelectedVariables = TargetVariables.CheckedItems
1612        .OrderBy(i => i.Index)
1613        .Select(i => i.Value.Value)
1614        .ToArray();
1615
1616      var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
1617      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
1618      foreach (var item in newVariablesList) {
1619        if (currentlySelectedVariables.Contains(item.Value)) {
1620          newVariablesList.SetItemCheckedState(item, true);
1621        } else {
1622          newVariablesList.SetItemCheckedState(item, false);
1623        }
1624      }
1625      TargetVariablesParameter.Value = newVariablesList;
1626    }
1627
1628    private void UpdateGrammarAndEncoding() {
1629      var encoding = new MultiEncoding();
1630      var g = CreateGrammar();
1631      foreach (var targetVar in TargetVariables.CheckedItems) {
1632        var e = new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength);
1633        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1634        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1635        // make sure our multi-manipulator is the only manipulator
1636        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1637
1638        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1639        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1640        foreach (var xover in subtreeCrossovers) {
1641          xover.CrossoverProbability.Value = 0.3;
1642        }
1643
1644        encoding = encoding.Add(e); // only limit by length
1645      }
1646      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1647        var e = new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength);
1648        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1649        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1650        // make sure our multi-manipulator is the only manipulator
1651        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1652
1653        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1654        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1655        foreach (var xover in subtreeCrossovers) {
1656          xover.CrossoverProbability.Value = 0.3;
1657        }
1658
1659        encoding = encoding.Add(e);
1660      }
1661      Encoding = encoding;
1662    }
1663
1664    private ISymbolicExpressionGrammar CreateGrammar() {
1665      var grammar = new TypeCoherentExpressionGrammar();
1666      grammar.StartGrammarManipulation();
1667
1668      var problemData = ProblemData;
1669      var ds = problemData.Dataset;
1670      grammar.MaximumFunctionArguments = 0;
1671      grammar.MaximumFunctionDefinitions = 0;
1672      var allowedVariables = problemData.AllowedInputVariables.Concat(TargetVariables.CheckedItems.Select(chk => chk.Value.Value));
1673      foreach (var varSymbol in grammar.Symbols.OfType<HeuristicLab.Problems.DataAnalysis.Symbolic.VariableBase>()) {
1674        if (!varSymbol.Fixed) {
1675          varSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<double>(x));
1676          varSymbol.VariableNames = allowedVariables.Where(x => ds.VariableHasType<double>(x));
1677        }
1678      }
1679      foreach (var factorSymbol in grammar.Symbols.OfType<BinaryFactorVariable>()) {
1680        if (!factorSymbol.Fixed) {
1681          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1682          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1683          factorSymbol.VariableValues = factorSymbol.VariableNames
1684            .ToDictionary(varName => varName, varName => ds.GetStringValues(varName).Distinct().ToList());
1685        }
1686      }
1687      foreach (var factorSymbol in grammar.Symbols.OfType<FactorVariable>()) {
1688        if (!factorSymbol.Fixed) {
1689          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1690          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1691          factorSymbol.VariableValues = factorSymbol.VariableNames
1692            .ToDictionary(varName => varName,
1693            varName => ds.GetStringValues(varName).Distinct()
1694            .Select((n, i) => Tuple.Create(n, i))
1695            .ToDictionary(tup => tup.Item1, tup => tup.Item2));
1696        }
1697      }
1698
1699      grammar.ConfigureAsDefaultRegressionGrammar();
1700      grammar.GetSymbol("Logarithm").Enabled = false; // not supported yet
1701      grammar.GetSymbol("Exponential").Enabled = false; // not supported yet
1702
1703      // configure initialization of constants
1704      var constSy = (Constant)grammar.GetSymbol("Constant");
1705      // max and min are only relevant for initialization
1706      constSy.MaxValue = +1.0e-1; // small initial values for constant opt
1707      constSy.MinValue = -1.0e-1;
1708      constSy.MultiplicativeManipulatorSigma = 1.0; // allow large jumps for manipulation
1709      constSy.ManipulatorMu = 0.0;
1710      constSy.ManipulatorSigma = 1.0; // allow large jumps
1711
1712      // configure initialization of variables
1713      var varSy = (Variable)grammar.GetSymbol("Variable");
1714      // fix variable weights to 1.0
1715      varSy.WeightMu = 1.0;
1716      varSy.WeightSigma = 0.0;
1717      varSy.WeightManipulatorMu = 0.0;
1718      varSy.WeightManipulatorSigma = 0.0;
1719      varSy.MultiplicativeWeightManipulatorSigma = 0.0;
1720
1721      foreach (var f in FunctionSet) {
1722        grammar.GetSymbol(f.Value).Enabled = FunctionSet.ItemChecked(f);
1723      }
1724
1725      grammar.FinishedGrammarManipulation();
1726      return grammar;
1727      // // whenever ProblemData is changed we create a new grammar with the necessary symbols
1728      // var g = new SimpleSymbolicExpressionGrammar();
1729      // var unaryFunc = new string[] { "sin", "cos", "sqr" };
1730      // var binaryFunc = new string[] { "+", "-", "*", "%" };
1731      // foreach (var func in unaryFunc) {
1732      //   if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 1, 1);
1733      // }
1734      // foreach (var func in binaryFunc) {
1735      //   if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 2, 2);
1736      // }
1737      //
1738      // foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value.Value)))
1739      //   g.AddTerminalSymbol(variableName);
1740      //
1741      // // generate symbols for numeric parameters for which the value is optimized using AutoDiff
1742      // // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
1743      // var numericConstantsFactor = 2.0;
1744      // for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
1745      //   g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
1746      // }
1747      //
1748      // // generate symbols for latent variables
1749      // for (int i = 1; i <= NumberOfLatentVariables; i++) {
1750      //   g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
1751      // }
1752      //
1753      // return g;
1754    }
1755    #endregion
1756
1757
1758    #region Import & Export
1759    public void Load(IRegressionProblemData data) {
1760      Name = data.Name;
1761      Description = data.Description;
1762      ProblemData = data;
1763    }
1764
1765    public IRegressionProblemData Export() {
1766      return ProblemData;
1767    }
1768    #endregion
1769
1770
1771    // TODO: for integration we only need a part of the data that we need for optimization
1772
1773    public class OptimizationData {
1774      public readonly ISymbolicExpressionTree[] trees;
1775      public readonly string[] targetVariables;
1776      public readonly IRegressionProblemData problemData;
1777      public readonly double[][] targetValues;
1778      public readonly double[] inverseStandardDeviation;
1779      public readonly IntRange[] episodes;
1780      public readonly int numericIntegrationSteps;
1781      public readonly string[] latentVariables;
1782      public readonly string odeSolver;
1783      public readonly NodeValueLookup nodeValueLookup;
1784      public readonly int[] rows;
1785      internal readonly string[] variables;
1786
1787      public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, string[] inputVariables,
1788        IRegressionProblemData problemData,
1789        double[][] targetValues,
1790        IntRange[] episodes,
1791        int numericIntegrationSteps, string[] latentVariables, string odeSolver) {
1792        this.trees = trees;
1793        this.targetVariables = targetVars;
1794        this.problemData = problemData;
1795        this.targetValues = targetValues;
1796        this.variables = inputVariables;
1797        if (targetValues != null) {
1798          this.inverseStandardDeviation = new double[targetValues.Length];
1799          for (int i = 0; i < targetValues.Length; i++) {
1800            // calculate variance for each episode separately and calc the average
1801            var epStartIdx = 0;
1802            var stdevs = new List<double>();
1803            foreach (var ep in episodes) {
1804              var epValues = targetValues[i].Skip(epStartIdx).Take(ep.Size);
1805              stdevs.Add(epValues.StandardDeviation());
1806              epStartIdx += ep.Size;
1807            }
1808            inverseStandardDeviation[i] = 1.0 / stdevs.Average();
1809          }
1810        } else
1811          this.inverseStandardDeviation = Enumerable.Repeat(1.0, trees.Length).ToArray();
1812        this.episodes = episodes;
1813        this.numericIntegrationSteps = numericIntegrationSteps;
1814        this.latentVariables = latentVariables;
1815        this.odeSolver = odeSolver;
1816        this.nodeValueLookup = new NodeValueLookup(trees);
1817        this.rows = episodes.SelectMany(ep => Enumerable.Range(ep.Start, ep.Size)).ToArray();
1818      }
1819    }
1820
1821    public class NodeValueLookup {
1822      private readonly Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> node2val = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1823      private readonly Dictionary<string, List<ISymbolicExpressionTreeNode>> name2nodes = new Dictionary<string, List<ISymbolicExpressionTreeNode>>();
1824      private readonly ConstantTreeNode[] constantNodes;
1825      private readonly Vector[] constantGradientVectors;
1826
1827      // private readonly Dictionary<int, ISymbolicExpressionTreeNode> paramIdx2node = new Dictionary<int, ISymbolicExpressionTreeNode>();
1828
1829      public double NodeValue(ISymbolicExpressionTreeNode node) => node2val[node].Item1;
1830      public Vector NodeGradient(ISymbolicExpressionTreeNode node) => node2val[node].Item2;
1831
1832      public NodeValueLookup(ISymbolicExpressionTree[] trees) {
1833
1834        this.constantNodes = trees.SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>()).ToArray();
1835        constantGradientVectors = new Vector[constantNodes.Length];
1836        for (int paramIdx = 0; paramIdx < constantNodes.Length; paramIdx++) {
1837          constantGradientVectors[paramIdx] = Vector.CreateIndicator(length: constantNodes.Length, idx: paramIdx);
1838
1839          var node = constantNodes[paramIdx];
1840          node2val[node] = Tuple.Create(node.Value, constantGradientVectors[paramIdx]);
1841        }
1842
1843        foreach (var tree in trees) {
1844          foreach (var node in tree.IterateNodesPrefix().Where(IsVariableNode)) {
1845            var varName = GetVariableName(node);
1846            if (!name2nodes.TryGetValue(varName, out List<ISymbolicExpressionTreeNode> nodes)) {
1847              nodes = new List<ISymbolicExpressionTreeNode>();
1848              name2nodes.Add(varName, nodes);
1849            }
1850            nodes.Add(node);
1851            SetVariableValue(varName, 0.0);  // this value is updated in the prediction loop
1852          }
1853        }
1854      }
1855
1856      public int ParameterCount => constantNodes.Length;
1857
1858      public void SetVariableValue(string variableName, double val) {
1859        SetVariableValue(variableName, val, Vector.Zero);
1860      }
1861      public Tuple<double, Vector> GetVariableValue(string variableName) {
1862        return node2val[name2nodes[variableName].First()];
1863      }
1864      public void SetVariableValue(string variableName, double val, Vector dVal) {
1865        if (name2nodes.TryGetValue(variableName, out List<ISymbolicExpressionTreeNode> nodes)) {
1866          nodes.ForEach(n => node2val[n] = Tuple.Create(val, dVal));
1867        } else {
1868          var fakeNode = new VariableTreeNode(new Variable());
1869          fakeNode.Weight = 1.0;
1870          fakeNode.VariableName = variableName;
1871          var newNodeList = new List<ISymbolicExpressionTreeNode>();
1872          newNodeList.Add(fakeNode);
1873          name2nodes.Add(variableName, newNodeList);
1874          node2val[fakeNode] = Tuple.Create(val, dVal);
1875        }
1876      }
1877
1878      internal void UpdateParamValues(double[] x) {
1879        for (int i = 0; i < x.Length; i++) {
1880          constantNodes[i].Value = x[i];
1881          node2val[constantNodes[i]] = Tuple.Create(x[i], constantGradientVectors[i]);
1882        }
1883      }
1884    }
1885  }
1886}
Note: See TracBrowser for help on using the repository browser.