Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16976

Last change on this file since 16976 was 16976, checked in by gkronber, 5 years ago

#2925: Update data files to have equidistant time steps. Reactivated CVODES solver.

File size: 100.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Globalization;
26using System.Linq;
27using HeuristicLab.Analysis;
28using HeuristicLab.Collections;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
33using HeuristicLab.Optimization;
34using HeuristicLab.Parameters;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Problems.DataAnalysis.Symbolic;
37using HeuristicLab.Problems.Instances;
38using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
39using HEAL.Attic;
40using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
41using System.Runtime.InteropServices;
42
43namespace HeuristicLab.Problems.DynamicalSystemsModelling {
44  [Item("Dynamical Systems Modelling Problem", "TODO")]
45  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
46  [StorableType("065C6A61-773A-42C9-9DE5-61A5D1D823EB")]
47  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<Problem> {
48    #region parameter names
49    private const string ProblemDataParameterName = "Data";
50    private const string TargetVariablesParameterName = "Target variables";
51    private const string FunctionSetParameterName = "Function set";
52    private const string MaximumLengthParameterName = "Size limit";
53    private const string MaximumPretuningParameterOptimizationIterationsParameterName = "Max. pre-tuning parameter optimization iterations";
54    private const string MaximumOdeParameterOptimizationIterationsParameterName = "Max. ODE parameter optimization iterations";
55    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
56    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
57    private const string TrainingEpisodesParameterName = "Training episodes";
58    private const string TestEpisodesParameterName = "Test episodes";
59    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
60    private const string OdeSolverParameterName = "ODE Solver";
61    #endregion
62
63    #region Parameter Properties
64    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
65
66    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
67      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
68    }
69    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
70      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
71    }
72    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
73      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
74    }
75    public IFixedValueParameter<IntValue> MaximumLengthParameter {
76      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
77    }
78
79    public IFixedValueParameter<IntValue> MaximumPretuningParameterOptimizationIterationsParameter {
80      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumPretuningParameterOptimizationIterationsParameterName]; }
81    }
82    public IFixedValueParameter<IntValue> MaximumOdeParameterOptimizationIterationsParameter {
83      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumOdeParameterOptimizationIterationsParameterName]; }
84    }
85    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
86      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
87    }
88    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
89      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
90    }
91    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
92      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
93    }
94    public IValueParameter<ItemList<IntRange>> TestEpisodesParameter {
95      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TestEpisodesParameterName]; }
96    }
97    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
98      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
99    }
100    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
101      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
102    }
103    public IFixedValueParameter<DoubleValue> PretuningErrorWeight {
104      get { return (IFixedValueParameter<DoubleValue>)Parameters["Pretuning NMSE weight"]; }
105    }
106    public IFixedValueParameter<DoubleValue> OdeErrorWeight {
107      get { return (IFixedValueParameter<DoubleValue>)Parameters["ODE NMSE weight"]; }
108    }
109    public IFixedValueParameter<DoubleValue> NumericDifferencesSmoothingParameter {
110      get { return (IFixedValueParameter<DoubleValue>)Parameters["Numeric differences smoothing"]; }
111    }
112    #endregion
113
114    #region Properties
115    public IRegressionProblemData ProblemData {
116      get { return ProblemDataParameter.Value; }
117      set { ProblemDataParameter.Value = value; }
118    }
119    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
120
121    public ReadOnlyCheckedItemList<StringValue> TargetVariables {
122      get { return TargetVariablesParameter.Value; }
123    }
124
125    public ReadOnlyCheckedItemList<StringValue> FunctionSet {
126      get { return FunctionSetParameter.Value; }
127    }
128
129    public int MaximumLength {
130      get { return MaximumLengthParameter.Value.Value; }
131    }
132    public int MaximumPretuningParameterOptimizationIterations {
133      get { return MaximumPretuningParameterOptimizationIterationsParameter.Value.Value; }
134    }
135    public int MaximumOdeParameterOptimizationIterations {
136      get { return MaximumOdeParameterOptimizationIterationsParameter.Value.Value; }
137    }
138    public int NumberOfLatentVariables {
139      get { return NumberOfLatentVariablesParameter.Value.Value; }
140    }
141    public int NumericIntegrationSteps {
142      get { return NumericIntegrationStepsParameter.Value.Value; }
143    }
144    public IList<IntRange> TrainingEpisodes {
145      get { return TrainingEpisodesParameter.Value; }
146    }
147    public IList<IntRange> TestEpisodes {
148      get { return TestEpisodesParameter.Value; }
149    }
150    public bool OptimizeParametersForEpisodes {
151      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
152    }
153    public double NumericDifferencesSmoothing {
154      get { return NumericDifferencesSmoothingParameter.Value.Value; }
155    }
156
157
158    public string OdeSolver {
159      get { return OdeSolverParameter.Value.Value; }
160      set {
161        var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
162        if (matchingValue == null) throw new ArgumentOutOfRangeException();
163        else OdeSolverParameter.Value = matchingValue;
164      }
165    }
166
167    #endregion
168
169    public event EventHandler ProblemDataChanged;
170
171    public override bool Maximization {
172      get { return false; } // we minimize NMSE
173    }
174
175    #region item cloning and persistence
176    // persistence
177    [StorableConstructor]
178    private Problem(StorableConstructorFlag _) : base(_) { }
179    [StorableHook(HookType.AfterDeserialization)]
180    private void AfterDeserialization() {
181      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
182        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
183      }
184      int iters = 100;
185      if (Parameters.ContainsKey("Max. parameter optimization iterations")) {
186        iters = ((IFixedValueParameter<IntValue>)Parameters["Max. parameter optimization iterations"]).Value.Value;
187      }
188      if (!Parameters.ContainsKey(MaximumPretuningParameterOptimizationIterationsParameterName)) {
189        Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters)));
190      }
191      if (!Parameters.ContainsKey(MaximumOdeParameterOptimizationIterationsParameterName)) {
192        Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters)));
193      }
194
195      if (!Parameters.ContainsKey("Pretuning NMSE weight"))
196        Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
197      if (!Parameters.ContainsKey("ODE NMSE weight"))
198        Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
199
200
201      RegisterEventHandlers();
202    }
203
204    // cloning
205    private Problem(Problem original, Cloner cloner)
206      : base(original, cloner) {
207      RegisterEventHandlers();
208    }
209    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
210    #endregion
211
212    public Problem()
213      : base() {
214      var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
215      var functions = CreateFunctionSet();
216      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
217      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
218      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
219      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
220      Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
221      Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
222      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
223      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
224      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
225      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TestEpisodesParameterName, "A list of ranges that should be used for validation, each range represents an independent episode. This overrides the TestSet parameter in ProblemData.", new ItemList<IntRange>()));
226      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
227      Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
228      Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
229      Parameters.Add(new FixedValueParameter<DoubleValue>("Numeric differences smoothing", "Determines the amount of smoothing for the numeric differences which are calculated for pre-tuning. Values from -8 to 8 are reasonable. Use very low value if the data contains no noise. Default: 2.", new DoubleValue(2.0)));
230
231      var solversStr = new string[] { "HeuristicLab", "CVODES" };
232      var solvers = new ItemSet<StringValue>(
233        solversStr.Select(s => new StringValue(s).AsReadOnly())
234        );
235      Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
236
237      RegisterEventHandlers();
238      InitAllParameters();
239
240      // TODO: use training range as default training episode
241      // TODO: optimization of starting values for latent variables in CVODES solver
242      // TODO: allow to specify the name for the time variable in the dataset and allow variable step-sizes
243    }
244
245    public override double Evaluate(Individual individual, IRandom random) {
246      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
247
248      var problemData = ProblemData;
249      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
250      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
251      if (OptimizeParametersForEpisodes) {
252        throw new NotImplementedException();
253        int eIdx = 0;
254        double totalNMSE = 0.0;
255        int totalSize = 0;
256        foreach (var episode in TrainingEpisodes) {
257          // double[] optTheta;
258          double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations);
259          // individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
260          eIdx++;
261          totalNMSE += nmse * episode.Size;
262          totalSize += episode.Size;
263        }
264        return totalNMSE / totalSize;
265      } else {
266        // when no training episodes are specified then we implicitly use the training parition from the problemData
267        var trainingEpisodes = TrainingEpisodes;
268        if (!trainingEpisodes.Any()) {
269          trainingEpisodes = new List<IntRange>();
270          trainingEpisodes.Add((IntRange)ProblemData.TrainingPartition.Clone());
271        }
272        double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, trainingEpisodes, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations,
273          PretuningErrorWeight.Value.Value, OdeErrorWeight.Value.Value, NumericDifferencesSmoothing);
274        // individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
275        return nmse;
276      }
277    }
278
279    public static double OptimizeForEpisodes(
280      ISymbolicExpressionTree[] trees,
281      IRegressionProblemData problemData,
282      string[] targetVars,
283      string[] latentVariables,
284      IRandom random,
285      IEnumerable<IntRange> episodes,
286      int maxPretuningParameterOptIterations,
287      int numericIntegrationSteps,
288      string odeSolver,
289      int maxOdeParameterOptIterations,
290      double pretuningErrorWeight = 0.5,
291      double odeErrorWeight = 0.5,
292      double numericDifferencesSmoothing = 2
293      ) {
294
295
296
297      // extract constants from trees (without trees for latent variables)
298      var targetVariableTrees = trees.Take(targetVars.Length).ToArray();
299      var latentVariableTrees = trees.Skip(targetVars.Length).ToArray();
300      var constantNodes = targetVariableTrees.Select(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().ToArray()).ToArray();
301      var initialTheta = constantNodes.Select(nodes => nodes.Select(n => n.Value).ToArray()).ToArray();
302
303      // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t)
304      double[] pretunedParameters = initialTheta.SelectMany(v => v).ToArray();
305      double nmse = 0;
306      if (pretuningErrorWeight > 0 || maxPretuningParameterOptIterations > -1) {
307        nmse += pretuningErrorWeight * PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes,
308          maxPretuningParameterOptIterations, numericDifferencesSmoothing,
309          initialTheta, out pretunedParameters);
310      }
311
312      // extend parameter vector to include parameters for latent variable trees
313      pretunedParameters = pretunedParameters
314        .Concat(latentVariableTrees
315        .SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().Select(n => n.Value)))
316        .ToArray();
317
318      double[] optTheta = pretunedParameters;
319      if (odeErrorWeight > 0 || maxOdeParameterOptIterations > -1) {
320        // optimize parameters using integration of f(x,y) to calculate y(t)
321        nmse += odeErrorWeight * OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxOdeParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,
322          out optTheta);
323      }
324      // var optTheta = pretunedParameters;
325
326      if (double.IsNaN(nmse) ||
327        double.IsInfinity(nmse) ||
328        nmse > 100 * trees.Length * episodes.Sum(ep => ep.Size))
329        return 100 * trees.Length * episodes.Sum(ep => ep.Size);
330
331      // update tree nodes with optimized values
332      var paramIdx = 0;
333      for (var treeIdx = 0; treeIdx < constantNodes.Length; treeIdx++) {
334        for (int i = 0; i < constantNodes[treeIdx].Length; i++)
335          constantNodes[treeIdx][i].Value = optTheta[paramIdx++];
336      }
337      return nmse;
338    }
339
340    private static double PreTuneParameters(
341      ISymbolicExpressionTree[] trees,
342      IRegressionProblemData problemData,
343      string[] targetVars,
344      string[] latentVariables,
345      IRandom random,
346      IEnumerable<IntRange> episodes,
347      int maxParameterOptIterations,
348      double numericDifferencesSmoothing, // for smoothing of numeric differences
349      double[][] initialTheta,
350      out double[] optTheta) {
351      var thetas = new List<double>();
352      double nmse = 0.0;
353      var maxTreeNmse = 100 * episodes.Sum(ep => ep.Size);
354
355      var targetTrees = trees.Take(targetVars.Length).ToArray();
356      var latentTrees = trees.Take(latentVariables.Length).ToArray();
357
358      // first calculate values of latent variables by integration
359      if (latentVariables.Length > 0) {
360        var inputVariables = targetVars.Concat(latentTrees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))).Except(latentVariables).Distinct();
361        var myState = new OptimizationData(latentTrees, targetVars, inputVariables.ToArray(), problemData, null, episodes.ToArray(), 10, latentVariables, "HeuristicLab");
362
363        var fi = new double[myState.rows.Length * targetVars.Length];
364        var jac = new double[myState.rows.Length * targetVars.Length, myState.nodeValueLookup.ParameterCount];
365        var latentValues = new double[myState.rows.Length, latentVariables.Length];
366        Integrate(myState, fi, jac, latentValues);
367
368        // add integrated latent variables to dataset
369        var modifiedDataset = ((Dataset)problemData.Dataset).ToModifiable();
370        foreach (var variable in latentVariables) {
371          modifiedDataset.AddVariable(variable, Enumerable.Repeat(0.0, modifiedDataset.Rows).ToList()); // empty column
372        }
373        int predIdx = 0;
374        foreach (var ep in episodes) {
375          for (int r = ep.Start; r < ep.End; r++) {
376            for (int latVarIdx = 0; latVarIdx < latentVariables.Length; latVarIdx++) {
377              modifiedDataset.SetVariableValue(latentValues[predIdx, latVarIdx], latentVariables[latVarIdx], r);
378            }
379            predIdx++;
380          }
381        }
382
383        problemData = new RegressionProblemData(modifiedDataset, problemData.AllowedInputVariables, problemData.TargetVariable);
384      }
385      // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
386      for (int treeIdx = 0; treeIdx < targetTrees.Length; treeIdx++) {
387        var t = targetTrees[treeIdx];
388
389        // check if we need to change the problem data
390        var targetValuesDiff = new List<double>();
391
392        // TODO: smooth only once
393        foreach (var ep in episodes) {
394          var episodeRows = Enumerable.Range(ep.Start, ep.Size);
395          var targetValues = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], episodeRows).ToArray();
396          targetValuesDiff.AddRange(CalculateDifferences(targetValues, numericDifferencesSmoothing));
397        }
398        var adjustedEpisodes = episodes.Select(ep => new IntRange(ep.Start, ep.End));
399
400        // data for input variables is assumed to be known
401        // input variables in pretuning are all target variables and all variable names that occur in the tree
402        var inputVariables = targetVars.Concat(t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName)).Distinct();
403
404        var myState = new OptimizationData(new[] { t },
405          targetVars,
406          inputVariables.ToArray(),
407          problemData, new[] { targetValuesDiff.ToArray() }, adjustedEpisodes.ToArray(), -99, latentVariables, string.Empty); // TODO
408        var paramCount = myState.nodeValueLookup.ParameterCount;
409
410        optTheta = initialTheta[treeIdx];
411        if (initialTheta[treeIdx].Length > 0 && maxParameterOptIterations > -1) {
412          try {
413            alglib.minlmstate state;
414            alglib.minlmreport report;
415            var p = new double[initialTheta[treeIdx].Length];
416            var lowerBounds = Enumerable.Repeat(-1000.0, p.Length).ToArray();
417            var upperBounds = Enumerable.Repeat(1000.0, p.Length).ToArray();
418            Array.Copy(initialTheta[treeIdx], p, p.Length);
419            alglib.minlmcreatevj(targetValuesDiff.Count, p, out state);
420            alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
421            alglib.minlmsetbc(state, lowerBounds, upperBounds);
422#if DEBUG
423            //alglib.minlmsetgradientcheck(state, 1.0e-7);
424#endif
425            alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
426
427            alglib.minlmresults(state, out optTheta, out report);
428            if (report.terminationtype < 0) {
429#if DEBUG
430              if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
431#endif
432              optTheta = initialTheta[treeIdx];
433            }
434          } catch (alglib.alglibexception) {
435            optTheta = initialTheta[treeIdx];
436          }
437        }
438        var tree_nmse = EvaluateMSE(optTheta, myState);
439        if (double.IsNaN(tree_nmse) || double.IsInfinity(tree_nmse) || tree_nmse > maxTreeNmse) {
440          nmse += maxTreeNmse;
441          thetas.AddRange(initialTheta[treeIdx]);
442        } else {
443          nmse += tree_nmse;
444          thetas.AddRange(optTheta);
445        }
446      } // foreach tree
447      optTheta = thetas.ToArray();
448
449      return nmse;
450    }
451
452
453
454    // similar to above but this time we integrate and optimize all parameters for all targets concurrently
455    private static double OptimizeParameters(ISymbolicExpressionTree[] trees, IRegressionProblemData problemData, string[] targetVars, string[] latentVariables,
456      IEnumerable<IntRange> episodes, int maxParameterOptIterations, double[] initialTheta, int numericIntegrationSteps, string odeSolver, out double[] optTheta) {
457      var rowsForDataExtraction = episodes.SelectMany(e => Enumerable.Range(e.Start, e.Size)).ToArray();
458      var targetValues = new double[targetVars.Length][];
459      for (int treeIdx = 0; treeIdx < targetVars.Length; treeIdx++) {
460        var t = trees[treeIdx];
461
462        targetValues[treeIdx] = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], rowsForDataExtraction).ToArray();
463      }
464
465      // data for input variables is assumed to be known
466      // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
467      var inputVariables = trees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
468        .Except(targetVars)
469        .Except(latentVariables)
470        .Distinct();
471
472      var myState = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver);
473      optTheta = initialTheta;
474
475      if (initialTheta.Length > 0 && maxParameterOptIterations > -1) {
476        var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray();
477        var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray();
478        try {
479          alglib.minlmstate state;
480          alglib.minlmreport report;
481          alglib.minlmcreatevj(rowsForDataExtraction.Length * trees.Length, initialTheta, out state);
482          alglib.minlmsetbc(state, lowerBounds, upperBounds);
483          alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
484#if DEBUG         
485          //alglib.minlmsetgradientcheck(state, 1.0e-7);
486#endif
487          alglib.minlmoptimize(state, IntegrateAndEvaluateObjectiveVector, IntegrateAndEvaluateObjectiveVectorAndJacobian, null, myState);
488
489          alglib.minlmresults(state, out optTheta, out report);
490
491          if (report.terminationtype < 0) {
492#if DEBUG
493            if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
494#endif            // there was a problem: reset theta and evaluate for inital values
495            optTheta = initialTheta;
496          }
497        } catch (alglib.alglibexception) {
498          optTheta = initialTheta;
499        }
500      }
501      var nmse = EvaluateIntegratedMSE(optTheta, myState);
502      var maxNmse = 100 * targetValues.Length * rowsForDataExtraction.Length;
503      if (double.IsNaN(nmse) || double.IsInfinity(nmse) || nmse > maxNmse) nmse = maxNmse;
504      return nmse;
505    }
506
507
508    // helper
509    public static double EvaluateMSE(double[] x, OptimizationData optimizationData) {
510      var fi = new double[optimizationData.rows.Count()];
511      EvaluateObjectiveVector(x, fi, optimizationData);
512      return fi.Sum(fii => fii * fii) / fi.Length;
513    }
514    public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
515    public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
516      var rows = optimizationData.rows;
517      var problemData = optimizationData.problemData;
518      var nodeValueLookup = optimizationData.nodeValueLookup;
519      var ds = problemData.Dataset;
520      var variables = optimizationData.variables;
521
522      nodeValueLookup.UpdateParamValues(x);
523
524      int outputIdx = 0;
525      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
526        // update variable values
527        foreach (var variable in variables) {
528          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
529          if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
530            nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
531          } else {
532            nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
533          }
534        }
535        // interpret all trees
536        for (int treeIdx = 0; treeIdx < optimizationData.trees.Length; treeIdx++) {
537          var tree = optimizationData.trees[treeIdx];
538          var pred = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup);
539          var y = optimizationData.targetValues[treeIdx][trainIdx];
540          fi[outputIdx++] = (y - pred) * optimizationData.inverseStandardDeviation[treeIdx];
541        }
542      }
543    }
544
545    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
546    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
547      // extract variable values from dataset
548      var variableValues = new Dictionary<string, Tuple<double, Vector>>();
549      var problemData = optimizationData.problemData;
550      var ds = problemData.Dataset;
551      var rows = optimizationData.rows;
552      var variables = optimizationData.variables;
553
554      var nodeValueLookup = optimizationData.nodeValueLookup;
555      nodeValueLookup.UpdateParamValues(x);
556
557      int termIdx = 0;
558
559      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
560        // update variable values
561        foreach (var variable in variables) {
562          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
563          if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
564            nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
565          } else {
566            nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
567          }
568        }
569
570        var calculatedVariables = optimizationData.targetVariables;
571
572        var trees = optimizationData.trees;
573        for (int i = 0; i < trees.Length; i++) {
574          var tree = trees[i];
575          var targetVarName = calculatedVariables[i];
576
577          double f; Vector g;
578          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup, out f, out g);
579
580          var y = optimizationData.targetValues[i][trainIdx];
581          fi[termIdx] = (y - f) * optimizationData.inverseStandardDeviation[i]; // scale of NMSE
582          if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[termIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i];
583
584          termIdx++;
585        }
586      }
587
588    }
589
590    // helper
591    public static double EvaluateIntegratedMSE(double[] x, OptimizationData optimizationData) {
592      var fi = new double[optimizationData.rows.Count() * optimizationData.targetVariables.Length];
593      IntegrateAndEvaluateObjectiveVector(x, fi, optimizationData);
594      return fi.Sum(fii => fii * fii) / fi.Length;
595    }
596    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { IntegrateAndEvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
597    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
598      IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData);
599    }
600
601    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
602    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
603      var rows = optimizationData.rows.ToArray();
604      var problemData = optimizationData.problemData;
605      var nodeValueLookup = optimizationData.nodeValueLookup;
606      var ds = problemData.Dataset;
607      int outputIdx = 0;
608
609      nodeValueLookup.UpdateParamValues(x);
610
611      Integrate(optimizationData, fi, jac, null);
612      var trees = optimizationData.trees;
613
614      // update result with error
615      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
616        for (int i = 0; i < optimizationData.targetVariables.Length; i++) {
617          var tree = trees[i];
618          var y = optimizationData.targetValues[i][trainIdx];
619          fi[outputIdx] = (y - fi[outputIdx]) * optimizationData.inverseStandardDeviation[i];  // scale for normalized squared error
620          if (jac != null) for (int j = 0; j < x.Length; j++) jac[outputIdx, j] = -jac[outputIdx, j] * optimizationData.inverseStandardDeviation[i];
621          outputIdx++;
622        }
623      }
624    }
625
626    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
627      base.Analyze(individuals, qualities, results, random);
628
629      if (!results.ContainsKey("Prediction (training)")) {
630        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
631      }
632      if (!results.ContainsKey("Prediction (test)")) {
633        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
634      }
635      if (!results.ContainsKey("Models")) {
636        results.Add(new Result("Models", typeof(VariableCollection)));
637      }
638      if (!results.ContainsKey("SNMSE")) {
639        results.Add(new Result("SNMSE", typeof(DoubleValue)));
640      }
641      if (!results.ContainsKey("SNMSE values")) {
642        var dt = new DataTable("SNMSE values");
643        dt.Rows.Add(new DataRow("ODE SNMSE"));
644        dt.Rows.Add(new DataRow("Fitness"));
645        results.Add(new Result("SNMSE values", dt));
646      }
647      if (!results.ContainsKey("Solution")) {
648        results.Add(new Result("Solution", typeof(Solution)));
649      }
650
651
652      // when no training episodes are specified then we implicitly use the training parition from the problemData
653      var trainingEpisodes = TrainingEpisodes;
654      if (!trainingEpisodes.Any()) {
655        trainingEpisodes = new List<IntRange>();
656        trainingEpisodes.Add((IntRange)ProblemData.TrainingPartition.Clone());
657      }
658
659      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
660      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
661
662      results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
663
664      var problemData = ProblemData;
665      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
666      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
667
668      var trainingList = new ItemList<DataTable>();
669
670      if (OptimizeParametersForEpisodes) {
671        throw new NotSupportedException();
672        var eIdx = 0;
673        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
674        foreach (var episode in TrainingEpisodes) {
675          var episodes = new[] { episode };
676          var optimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver);
677          var trainingPrediction = Integrate(optimizationData).ToArray();
678          trainingPredictions.Add(trainingPrediction);
679          eIdx++;
680        }
681
682        // only for target values
683        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
684        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
685          var targetVar = targetVars[colIdx];
686          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
687          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
688          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
689          trainingDataTable.Rows.Add(actualValuesRow);
690          trainingDataTable.Rows.Add(predictedValuesRow);
691          trainingList.Add(trainingDataTable);
692        }
693        results["Prediction (training)"].Value = trainingList.AsReadOnly();
694
695
696        var models = new VariableCollection();
697
698        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
699          var targetVarName = tup.Item1;
700          var tree = tup.Item2;
701
702          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
703          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
704          models.Add(origTreeVar);
705        }
706        results["Models"].Value = models;
707      } else {
708        // data for input variables is assumed to be known
709        // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
710        var inputVariables = trees
711          .SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
712          .Except(targetVars)
713          .Except(latentVariables)
714          .Distinct();
715
716        var optimizationData = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, trainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);
717        var numParams = optimizationData.nodeValueLookup.ParameterCount;
718
719        var fi = new double[optimizationData.rows.Length * targetVars.Length];
720        var jac = new double[optimizationData.rows.Length * targetVars.Length, numParams];
721        var latentValues = new double[optimizationData.rows.Length, latentVariables.Length];
722        Integrate(optimizationData, fi, jac, latentValues);
723
724
725        // for target values and latent variables
726        var trainingRows = optimizationData.rows;
727        double trainingSNMSE = 0.0;
728        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
729          // is target variable
730          if (colIdx < targetVars.Length) {
731            var targetVar = targetVars[colIdx];
732            var trainingDataTable = new DataTable(targetVar + " prediction (training)");
733            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
734            var idx = Enumerable.Range(0, trainingRows.Length).Select(i => i * targetVars.Length + colIdx);
735            var pred = idx.Select(i => fi[i]);
736            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, pred.ToArray());
737            trainingDataTable.Rows.Add(actualValuesRow);
738            trainingDataTable.Rows.Add(predictedValuesRow);
739
740            // again calculate the integrated error (regardless how fitness is determined)
741            trainingSNMSE += actualValuesRow.Values.Zip(predictedValuesRow.Values, (a, p) => Math.Pow(a - p, 2)).Average() / actualValuesRow.Values.Variance() / targetVars.Length;
742
743            for (int paramIdx = 0; paramIdx < numParams; paramIdx++) {
744              var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", idx.Select(i => jac[i, paramIdx]).ToArray());
745              paramSensitivityRow.VisualProperties.SecondYAxis = true;
746              trainingDataTable.Rows.Add(paramSensitivityRow);
747            }
748            trainingList.Add(trainingDataTable);
749          } else {
750            var latentVar = latentVariables[colIdx - targetVars.Length];
751            var trainingDataTable = new DataTable(latentVar + " prediction (training)");
752            var idx = Enumerable.Range(0, trainingRows.Length);
753            var pred = idx.Select(i => latentValues[i, colIdx - targetVars.Length]);
754            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, pred.ToArray());
755            var emptyRow = new DataRow(latentVar);
756            trainingDataTable.Rows.Add(emptyRow);
757            trainingDataTable.Rows.Add(predictedValuesRow);
758            trainingList.Add(trainingDataTable);
759          }
760        }
761
762        results.AddOrUpdateResult("ODE SNMSE", new DoubleValue(trainingSNMSE));
763        var odeSNMSETable = (DataTable)results["SNMSE values"].Value;
764        odeSNMSETable.Rows["ODE SNMSE"].Values.Add(trainingSNMSE);
765        odeSNMSETable.Rows["Fitness"].Values.Add(bestIndividualAndQuality.Item2);
766
767        // var errorTable = new DataTable("Squared error and gradient");
768        // var seRow = new DataRow("Squared error");
769        // var gradientRows = Enumerable.Range(0, numParams).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
770        // errorTable.Rows.Add(seRow);
771        // foreach (var gRow in gradientRows) {
772        //   gRow.VisualProperties.SecondYAxis = true;
773        //   errorTable.Rows.Add(gRow);
774        // }
775        // var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
776        // int r = 0;
777
778        // foreach (var y_pred in fi) {
779        //   // calculate objective function gradient
780        //   double f_i = 0.0;
781        //   Vector g_i = Vector.CreateNew(new double[numParams]);
782        //   for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
783        //     var y_pred_f = y_pred[colIdx].Item1;
784        //     var y = targetValues[colIdx][r];
785        //
786        //     var res = (y - y_pred_f) * optimizationData.inverseStandardDeviation[colIdx];
787        //     var ressq = res * res;
788        //     f_i += ressq;
789        //     g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res));
790        //   }
791        //   seRow.Values.Add(f_i);
792        //   for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
793        //   r++;
794        // }
795        // results["Squared error and gradient"].Value = errorTable;
796
797        // only if there is a non-empty test partition
798        if (ProblemData.TestIndices.Any()) {
799          // TODO: DRY for training and test
800
801          var testList = new ItemList<DataTable>();
802          var testRows = ProblemData.TestIndices.ToArray();
803          var testOptimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver);
804          var testPrediction = Integrate(testOptimizationData).ToArray();
805
806          for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
807            // is target variable
808            if (colIdx < targetVars.Length) {
809              var targetVar = targetVars[colIdx];
810              var testDataTable = new DataTable(targetVar + " prediction (test)");
811              var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
812              var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
813              testDataTable.Rows.Add(actualValuesRow);
814              testDataTable.Rows.Add(predictedValuesRow);
815              testList.Add(testDataTable);
816
817            } else {
818              // var latentVar = latentVariables[colIdx - targetVars.Length];
819              // var testDataTable = new DataTable(latentVar + " prediction (test)");
820              // var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
821              // var emptyRow = new DataRow(latentVar);
822              // testDataTable.Rows.Add(emptyRow);
823              // testDataTable.Rows.Add(predictedValuesRow);
824              // testList.Add(testDataTable);
825            }
826          }
827
828          results["Prediction (training)"].Value = trainingList.AsReadOnly();
829          results["Prediction (test)"].Value = testList.AsReadOnly();
830
831        }
832
833        #region simplification of models
834        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
835        var models = new VariableCollection();    // to store target var names and original version of tree
836
837        var clonedTrees = new List<ISymbolicExpressionTree>();
838        for (int idx = 0; idx < trees.Length; idx++) {
839          clonedTrees.Add((ISymbolicExpressionTree)trees[idx].Clone());
840        }
841        var ds = problemData.Dataset;
842        var newProblemData = new RegressionProblemData((IDataset)ds.Clone(), problemData.AllowedInputVariables, problemData.TargetVariable);
843        results["Solution"].Value = new Solution(clonedTrees.ToArray(),
844                   // optTheta,
845                   newProblemData,
846                   targetVars,
847                   latentVariables,
848                   trainingEpisodes,
849                   OdeSolver,
850                   NumericIntegrationSteps);
851
852
853        for (int idx = 0; idx < trees.Length; idx++) {
854          var varName = string.Empty;
855          if (idx < targetVars.Length) {
856            varName = targetVars[idx];
857          } else {
858            varName = latentVariables[idx - targetVars.Length];
859          }
860          var tree = trees[idx];
861
862          var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
863          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
864          models.Add(origTreeVar);
865          var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
866          simplifiedTreeVar.Value = TreeSimplifier.Simplify(tree);
867          models.Add(simplifiedTreeVar);
868        }
869
870        results["Models"].Value = models;
871        #endregion
872
873        #region produce classical solutions to allow visualization with PDP
874        for (int treeIdx = 0; treeIdx < targetVars.Length; treeIdx++) {
875          var t = (ISymbolicExpressionTree)trees[treeIdx].Clone();
876          var name = targetVars.Concat(latentVariables).ElementAt(treeIdx); // whatever
877          var model = new SymbolicRegressionModel(name + "_diff", t, new SymbolicDataAnalysisExpressionTreeLinearInterpreter());
878          var solutionDataset = ((Dataset)problemData.Dataset).ToModifiable();
879          solutionDataset.Name = ((Dataset)problemData.Dataset).Name;
880          solutionDataset.Description = ((Dataset)problemData.Dataset).Description;
881
882          var absValues = solutionDataset.GetDoubleValues(name).ToArray();
883
884          var diffValues = new double[absValues.Length];
885          foreach (var ep in TrainingEpisodes.Concat(TestEpisodes)) {
886            var y = solutionDataset.GetDoubleValues(name, Enumerable.Range(ep.Start, ep.End - ep.Start)).ToArray();
887            var yd = CalculateDifferences(y, NumericDifferencesSmoothing).ToArray();
888            for (int r = ep.Start; r < ep.End; r++) {
889              diffValues[r] = yd[r - ep.Start];
890            }
891          }
892
893          solutionDataset.AddVariable(name + "_diff", diffValues);
894          var solutionProblemData = new RegressionProblemData(solutionDataset, problemData.AllowedInputVariables, name + "_diff");
895          solutionProblemData.Name = problemData.Name;
896          solutionProblemData.Description = problemData.Description;
897
898          solutionProblemData.TrainingPartition.Start = TrainingEpisodes.Select(ep => ep.Start).Min();
899          solutionProblemData.TrainingPartition.End = TrainingEpisodes.Select(ep => ep.End).Max(); // assumes training episodes are sequential without gaps
900          if (TestEpisodes.Any()) {
901            solutionProblemData.TestPartition.Start = TestEpisodes.Select(ep => ep.Start).Min();
902            solutionProblemData.TestPartition.End = TestEpisodes.Select(ep => ep.End).Max();
903          } else {
904            solutionProblemData.TestPartition.Start = problemData.TestPartition.Start;
905            solutionProblemData.TestPartition.End = problemData.TestPartition.End;
906          }
907          var solution = model.CreateRegressionSolution(solutionProblemData);
908          results.AddOrUpdateResult("Solution " + name, solution);
909        }
910        #endregion
911      }
912    }
913
914    #region interpretation
915
916    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
917    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
918
919    // a comparison of three potential calculation methods for the gradient is given in:
920    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
921    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
922    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
923
924    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
925
926    public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData) {
927      var nTargets = optimizationData.targetVariables.Length;
928      var n = optimizationData.rows.Length * optimizationData.targetVariables.Length;
929      var d = optimizationData.nodeValueLookup.ParameterCount;
930      double[] fi = new double[n];
931      double[,] jac = new double[n, d];
932      Integrate(optimizationData, fi, jac, null);
933      for (int i = 0; i < optimizationData.rows.Length; i++) {
934        var res = new Tuple<double, Vector>[nTargets];
935        for (int j = 0; j < nTargets; j++) {
936          res[j] = Tuple.Create(fi[i * nTargets + j], Vector.CreateFromMatrixRow(jac, i * nTargets + j));
937        }
938        yield return res;
939      }
940    }
941
942    public static void Integrate(OptimizationData optimizationData, double[] fi, double[,] jac, double[,] latentValues) {
943      var trees = optimizationData.trees;
944      var dataset = optimizationData.problemData.Dataset;
945      var inputVariables = optimizationData.variables;
946      var targetVariables = optimizationData.targetVariables;
947      var latentVariables = optimizationData.latentVariables;
948      var episodes = optimizationData.episodes;
949      var odeSolver = optimizationData.odeSolver;
950      var numericIntegrationSteps = optimizationData.numericIntegrationSteps;
951      var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
952
953
954
955      var nodeValues = optimizationData.nodeValueLookup;
956
957      // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
958      var outputRowIdx = 0;
959      var episodeIdx = 0;
960      foreach (var episode in optimizationData.episodes) {
961        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start).ToArray();
962
963        var t0 = rows.First();
964
965        // initialize values for inputs and targets from dataset
966        foreach (var varName in inputVariables) {
967          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
968          if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
969            nodeValues.SetVariableValue(varName, value, Vector.Zero);
970          } else {
971            var y0 = dataset.GetDoubleValue(varName, t0);
972            nodeValues.SetVariableValue(varName, y0, Vector.Zero);
973          }
974        }
975        foreach (var varName in targetVariables) {
976          var y0 = dataset.GetDoubleValue(varName, t0);
977          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
978
979          // output starting value
980          fi[outputRowIdx] = y0;
981          Vector.Zero.CopyTo(jac, outputRowIdx);
982
983          outputRowIdx++;
984        }
985
986        var latentValueRowIdx = 0;
987        var latentValueColIdx = 0;
988        foreach (var varName in latentVariables) {
989          var y0 = 0.0; // assume we start at zero
990          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
991
992          if (latentValues != null) {
993            latentValues[latentValueRowIdx, latentValueColIdx++] = y0;
994          }
995        }
996        latentValueColIdx = 0; latentValueRowIdx++;
997
998        { // CODE BELOW DOESN'T WORK ANYMORE
999          // if (latentVariables.Length > 0) throw new NotImplementedException();
1000          //
1001          // // add value entries for latent variables which are also integrated
1002          // // initial values are at the end of the parameter vector
1003          // // separate initial values for each episode
1004          // var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
1005          // foreach (var latentVar in latentVariables) {
1006          //   var arr = new double[parameterValues.Length]; // backing array
1007          //   arr[initialValueIdx] = 1.0;
1008          //   var g = new Vector(arr);
1009          //   nodeValues.SetVariableValue(latentVar, parameterValues[initialValueIdx], g); // we don't have observations for latent variables therefore we optimize the initial value for each episode
1010          //   initialValueIdx++;
1011          // }
1012        }
1013
1014        var prevT = t0; // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
1015        foreach (var t in rows.Skip(1)) {
1016          if (odeSolver == "HeuristicLab")
1017            IntegrateHL(trees, calculatedVariables, nodeValues, numericIntegrationSteps); // integrator updates nodeValues
1018          else if (odeSolver == "CVODES")
1019            IntegrateCVODES(trees, calculatedVariables, nodeValues);
1020          else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
1021          prevT = t;
1022
1023          // update output for target variables (TODO: if we want to visualize the latent variables then we need to provide a separate output)
1024          for (int i = 0; i < targetVariables.Length; i++) {
1025            var targetVar = targetVariables[i];
1026            var yt = nodeValues.GetVariableValue(targetVar);
1027
1028            // fill up remaining rows with last valid value if there are invalid values
1029            if (double.IsNaN(yt.Item1) || double.IsInfinity(yt.Item1)) {
1030              for (; outputRowIdx < fi.Length; outputRowIdx++) {
1031                var prevIdx = outputRowIdx - targetVariables.Length;
1032                fi[outputRowIdx] = fi[prevIdx]; // current <- prev
1033                if (jac != null) for (int j = 0; j < jac.GetLength(1); j++) jac[outputRowIdx, j] = jac[prevIdx, j];
1034              }
1035              return;
1036            };
1037
1038            fi[outputRowIdx] = yt.Item1;
1039            var g = yt.Item2;
1040            g.CopyTo(jac, outputRowIdx);
1041            outputRowIdx++;
1042          }
1043          if (latentValues != null) {
1044            foreach (var latentVariable in latentVariables) {
1045              var lt = nodeValues.GetVariableValue(latentVariable).Item1;
1046              latentValues[latentValueRowIdx, latentValueColIdx++] = lt;
1047            }
1048            latentValueRowIdx++; latentValueColIdx = 0;
1049          }
1050
1051          // update for next time step (only the inputs)
1052          foreach (var varName in inputVariables) {
1053            // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
1054            if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
1055              // value is unchanged
1056            } else {
1057              nodeValues.SetVariableValue(varName, dataset.GetDoubleValue(varName, t), Vector.Zero);
1058            }
1059          }
1060        }
1061        episodeIdx++;
1062      }
1063    }
1064
1065    #region CVODES
1066
1067
1068    /// <summary>
1069    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
1070    /// </summary>
1071    /// <param name="trees">Each equation in the ODE represented as a tree</param>
1072    /// <param name="calculatedVariables">The names of the calculated variables</param>
1073    /// <param name="t">The time t up to which we need to integrate.</param>
1074    private static void IntegrateCVODES(
1075      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
1076      string[] calculatedVariables, // names of elements of y
1077      NodeValueLookup nodeValues
1078      ) {
1079
1080      // the RHS of the ODE
1081      // dy/dt = f(y_t,x_t,p)
1082      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, nodeValues);
1083      // the Jacobian ∂f/∂y
1084      CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, nodeValues);
1085
1086      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
1087      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, nodeValues);
1088
1089      // setup solver
1090      int numberOfEquations = trees.Length;
1091      IntPtr y = IntPtr.Zero;
1092      IntPtr cvode_mem = IntPtr.Zero;
1093      IntPtr A = IntPtr.Zero;
1094      IntPtr yS0 = IntPtr.Zero;
1095      IntPtr linearSolver = IntPtr.Zero;
1096      var ns = nodeValues.ParameterCount; // number of parameters
1097
1098      try {
1099        y = CVODES.N_VNew_Serial(numberOfEquations);
1100        // init y to current values of variables
1101        // y must be initialized before calling CVodeInit
1102        for (int i = 0; i < calculatedVariables.Length; i++) {
1103          CVODES.NV_Set_Ith_S(y, i, nodeValues.GetVariableValue(calculatedVariables[i]).Item1);
1104        }
1105
1106        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
1107
1108        var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
1109        Assert(CVODES.CV_SUCCESS == flag);
1110
1111        flag = CVODES.CVodeSetErrHandlerFn(cvode_mem, errorFunction, IntPtr.Zero);
1112        Assert(CVODES.CV_SUCCESS == flag);
1113
1114
1115        double relTol = 1.0e-2;
1116        double absTol = 1.0;
1117        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
1118        Assert(CVODES.CV_SUCCESS == flag);
1119
1120        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
1121        Assert(A != IntPtr.Zero);
1122
1123        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
1124        Assert(linearSolver != IntPtr.Zero);
1125
1126        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
1127        Assert(CVODES.CV_SUCCESS == flag);
1128
1129        flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
1130        Assert(CVODES.CV_SUCCESS == flag);
1131
1132        yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
1133        unsafe {
1134          // set to initial sensitivities supplied by caller
1135          for (int pIdx = 0; pIdx < ns; pIdx++) {
1136            var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
1137            for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1138              CVODES.NV_Set_Ith_S(yS0_i, varIdx, nodeValues.GetVariableValue(calculatedVariables[varIdx]).Item2[pIdx]); // TODO: perf
1139            }
1140          }
1141        }
1142
1143        flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
1144        Assert(CVODES.CV_SUCCESS == flag);
1145
1146        flag = CVODES.CVodeSensEEtolerances(cvode_mem);
1147        Assert(CVODES.CV_SUCCESS == flag);
1148
1149        // make one forward integration step
1150        double tout = 0.0; // first output time
1151        flag = CVODES.CVode(cvode_mem, 1.0, y, ref tout, CVODES.CV_NORMAL);
1152        if (flag == CVODES.CV_SUCCESS) {
1153          Assert(1.0 == tout);
1154
1155          // get sensitivities
1156          flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
1157          Assert(CVODES.CV_SUCCESS == flag);
1158
1159          // update variableValues based on integration results
1160          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1161            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
1162            var gArr = new double[ns];
1163            for (var pIdx = 0; pIdx < ns; pIdx++) {
1164              unsafe {
1165                var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
1166                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
1167              }
1168            }
1169            nodeValues.SetVariableValue(calculatedVariables[varIdx], yi, new Vector(gArr));
1170          }
1171        } else {
1172          throw new InvalidOperationException();
1173        }
1174
1175        // cleanup all allocated objects
1176      } finally {
1177        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
1178        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
1179        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
1180        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
1181        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
1182      }
1183    }
1184
1185    private static void errorFunction(int errorCode, IntPtr module, IntPtr function, IntPtr msg, IntPtr ehdata) {
1186      var moduleStr = Marshal.PtrToStringAnsi(module);
1187      var functionStr = Marshal.PtrToStringAnsi(function);
1188      var msgStr = Marshal.PtrToStringAnsi(msg);
1189      string type = errorCode == 0 ? "Warning" : "Error";
1190      throw new InvalidProgramException($"{type}: {msgStr} Module: {moduleStr} Function: {functionStr}");
1191    }
1192
1193    private static CVODES.CVRhsFunc CreateOdeRhs(
1194      ISymbolicExpressionTree[] trees,
1195      string[] calculatedVariables,
1196      NodeValueLookup nodeValues) {
1197      // we don't need to calculate a gradient here
1198      return (double t,
1199              IntPtr y, // N_Vector, current value of y (input)
1200              IntPtr ydot, // N_Vector (calculated value of y' (output)
1201              IntPtr user_data // optional user data, (unused here)
1202              ) => {
1203                for (int i = 0; i < calculatedVariables.Length; i++) {
1204                  var y_i = CVODES.NV_Get_Ith_S(y, (long)i);
1205                  nodeValues.SetVariableValue(calculatedVariables[i], y_i);
1206                }
1207                for (int i = 0; i < trees.Length; i++) {
1208                  var tree = trees[i];
1209                  var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1210                  CVODES.NV_Set_Ith_S(ydot, i, res_i);
1211                }
1212                return 0;
1213              };
1214    }
1215
1216    private static CVODES.CVDlsJacFunc CreateJac(
1217      ISymbolicExpressionTree[] trees,
1218      string[] calculatedVariables,
1219      NodeValueLookup nodeValues) {
1220
1221      return (
1222        double t, // current time (input)
1223        IntPtr y, // N_Vector, current value of y (input)
1224        IntPtr fy, // N_Vector, current value of f (input)
1225        IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
1226        IntPtr user_data, // optional (unused here)
1227        IntPtr tmp1, // N_Vector, optional (unused here)
1228        IntPtr tmp2, // N_Vector, optional (unused here)
1229        IntPtr tmp3 // N_Vector, optional (unused here)
1230      ) => {
1231        // int pIdx = 0;
1232        // foreach (var tree in trees) {
1233        //   foreach (var n in tree.IterateNodesPrefix()) {
1234        //     if (IsConstantNode(n)) {
1235        //       nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
1236        //       pIdx++;
1237        //     } else if (n.SubtreeCount == 0) {
1238        //       // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1239        //       var varName = n.Symbol.Name;
1240        //       var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
1241        //       if (varIdx < 0) throw new InvalidProgramException();
1242        //
1243        //       var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1244        //       var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
1245        //       gArr[varIdx] = 1.0;
1246        //       var g = new Vector(gArr);
1247        //       nodeValues.Add(n, Tuple.Create(y_i, g));
1248        //     }
1249        //   }
1250        // }
1251        for (int i = 0; i < calculatedVariables.Length; i++) {
1252          var y_i = CVODES.NV_Get_Ith_S(y, (long)i);
1253          nodeValues.SetVariableValue(calculatedVariables[i], y_i);
1254        }
1255        for (int i = 0; i < trees.Length; i++) {
1256          var tree = trees[i];
1257          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out double z, out Vector dz);
1258          for (int j = 0; j < calculatedVariables.Length; j++) {
1259            CVODES.SUNDenseMatrix_Set(Jac, i, j, dz[j]);
1260          }
1261        }
1262        return 0; // on success
1263      };
1264    }
1265
1266
1267    // to calculate sensitivities RHS for all equations at once
1268    // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
1269    // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
1270    private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, NodeValueLookup nodeValues) {
1271      return (
1272              int Ns, // number of parameters
1273              double t, // current time
1274              IntPtr y, // N_Vector y(t) (input)
1275              IntPtr ydot, // N_Vector dy/dt(t) (input)
1276              IntPtr yS, // N_Vector*, one vector for each parameter (input)
1277              IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
1278              IntPtr user_data, // optional (unused here)
1279              IntPtr tmp1, // N_Vector, optional (unused here)
1280              IntPtr tmp2 // N_Vector, optional (unused here)
1281        ) => {
1282
1283          var tmpNodeValues = new NodeValueLookup(trees, variableGradient: true); // for df / dy calculation
1284
1285          // update variableValues based on integration results
1286          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1287            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
1288            var gArr = new double[Ns];
1289            for (var pIdx = 0; pIdx < Ns; pIdx++) {
1290              unsafe {
1291                var yS_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1292                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS_pi, varIdx);
1293              }
1294            }
1295            nodeValues.SetVariableValue(calculatedVariables[varIdx], yi, new Vector(gArr));
1296            tmpNodeValues.SetVariableValue(calculatedVariables[varIdx], yi, Vector.CreateIndicator(calculatedVariables.Length, varIdx));
1297          }
1298
1299          for (int pIdx = 0; pIdx < Ns; pIdx++) {
1300            unsafe {
1301              var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1302              CVODES.N_VConst_Serial(0.0, sDot_pi);
1303            }
1304          }
1305
1306
1307          for (int i = 0; i < trees.Length; i++) {
1308            var tree = trees[i];
1309
1310            // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
1311
1312            // 1. interpret tree to calculate (∂f/∂y)
1313            // we need a different nodeValue object for (∂f/∂y)
1314            InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), tmpNodeValues, out double z1, out Vector df_dy);
1315
1316            // 2. interpret tree to calculate ∂f/∂p_i
1317            InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out double z, out Vector df_dp);
1318
1319            for (int pIdx = 0; pIdx < Ns; pIdx++) {
1320              unsafe {
1321                var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1322                var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1323
1324                var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
1325                // (∂f/∂y)s_i(t)
1326                var p = 0.0;
1327                for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
1328                  p += df_dy[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
1329                }
1330                // + ∂f/∂p_i
1331                CVODES.NV_Set_Ith_S(sDot_pi, i, p + df_dp[pIdx]);
1332              }
1333            }
1334
1335          }
1336          return 0; // on success
1337        };
1338    }
1339
1340    #endregion
1341
1342    private static void IntegrateHL(
1343      ISymbolicExpressionTree[] trees,
1344      string[] calculatedVariables, // names of integrated variables
1345      NodeValueLookup nodeValues,
1346      int numericIntegrationSteps) {
1347
1348
1349      double[] deltaF = new double[calculatedVariables.Length];
1350      Vector[] deltaG = new Vector[calculatedVariables.Length];
1351
1352      double h = 1.0 / numericIntegrationSteps;
1353      for (int step = 0; step < numericIntegrationSteps; step++) {
1354
1355        // evaluate all trees
1356        for (int i = 0; i < trees.Length; i++) {
1357          var tree = trees[i];
1358
1359          // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
1360          double f; Vector g;
1361          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
1362          deltaF[i] = f;
1363          deltaG[i] = g;
1364        }
1365
1366        // update variableValues for next step, trapezoid integration
1367        for (int i = 0; i < trees.Length; i++) {
1368          var varName = calculatedVariables[i];
1369          var oldVal = nodeValues.GetVariableValue(varName);
1370          nodeValues.SetVariableValue(varName, oldVal.Item1 + h * deltaF[i], oldVal.Item2.Add(deltaG[i].Scale(h)));
1371        }
1372      }
1373    }
1374
1375    // TODO: use an existing interpreter implementation instead
1376    private static double InterpretRec(ISymbolicExpressionTreeNode node, NodeValueLookup nodeValues) {
1377      if (node is ConstantTreeNode) {
1378        return ((ConstantTreeNode)node).Value;
1379      } else if (node is VariableTreeNode) {
1380        return nodeValues.NodeValue(node);
1381      } else if (node.Symbol is Addition) {
1382        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1383        for (int i = 1; i < node.SubtreeCount; i++) {
1384          f += InterpretRec(node.GetSubtree(i), nodeValues);
1385        }
1386        return f;
1387      } else if (node.Symbol is Multiplication) {
1388        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1389        for (int i = 1; i < node.SubtreeCount; i++) {
1390          f *= InterpretRec(node.GetSubtree(i), nodeValues);
1391        }
1392        return f;
1393      } else if (node.Symbol is Subtraction) {
1394        if (node.SubtreeCount == 1) {
1395          return -InterpretRec(node.GetSubtree(0), nodeValues);
1396        } else {
1397          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1398          for (int i = 1; i < node.SubtreeCount; i++) {
1399            f -= InterpretRec(node.GetSubtree(i), nodeValues);
1400          }
1401          return f;
1402        }
1403      } else if (node.Symbol is Division) {
1404        if (node.SubtreeCount == 1) {
1405          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1406          // protected division
1407          if (f.IsAlmost(0.0)) {
1408            return 0;
1409          } else {
1410            return 1.0 / f;
1411          }
1412        } else {
1413          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1414          for (int i = 1; i < node.SubtreeCount; i++) {
1415            var g = InterpretRec(node.GetSubtree(i), nodeValues);
1416            // protected division
1417            if (g.IsAlmost(0.0)) {
1418              return 0;
1419            } else {
1420              f /= g;
1421            }
1422          }
1423          return f;
1424        }
1425      } else if (node.Symbol is Sine) {
1426        Assert(node.SubtreeCount == 1);
1427
1428        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1429        return Math.Sin(f);
1430      } else if (node.Symbol is Cosine) {
1431        Assert(node.SubtreeCount == 1);
1432
1433        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1434        return Math.Cos(f);
1435      } else if (node.Symbol is Square) {
1436        Assert(node.SubtreeCount == 1);
1437
1438        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1439        return f * f;
1440      } else if (node.Symbol is Exponential) {
1441        Assert(node.SubtreeCount == 1);
1442
1443        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1444        return Math.Exp(f);
1445      } else if (node.Symbol is Logarithm) {
1446        Assert(node.SubtreeCount == 1);
1447
1448        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1449        return Math.Log(f);
1450      } else if (node.Symbol is HyperbolicTangent) {
1451        Assert(node.SubtreeCount == 1);
1452
1453        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1454        return Math.Tanh(f);
1455      } else if (node.Symbol is AnalyticQuotient) {
1456        Assert(node.SubtreeCount == 2);
1457
1458        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1459        var g = InterpretRec(node.GetSubtree(1), nodeValues);
1460        return f / Math.Sqrt(1 + g * g);
1461      } else throw new NotSupportedException("unsupported symbol");
1462    }
1463
1464    private static void Assert(bool cond) {
1465#if DEBUG
1466      if (!cond) throw new InvalidOperationException("Assertion failed");
1467#endif
1468    }
1469
1470    private static void InterpretRec(
1471      ISymbolicExpressionTreeNode node,
1472       NodeValueLookup nodeValues,      // contains value and gradient vector for a node (variables and constants only)
1473      out double z,
1474      out Vector dz
1475      ) {
1476      double f, g;
1477      Vector df, dg;
1478      if (node.Symbol is Constant || node.Symbol is Variable) {
1479        z = nodeValues.NodeValue(node);
1480        dz = Vector.CreateNew(nodeValues.NodeGradient(node)); // original gradient vectors are never changed by evaluation
1481      } else if (node.Symbol is Addition) {
1482        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1483        for (int i = 1; i < node.SubtreeCount; i++) {
1484          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1485          f = f + g;
1486          df = df.Add(dg);
1487        }
1488        z = f;
1489        dz = df;
1490      } else if (node.Symbol is Multiplication) {
1491        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1492        for (int i = 1; i < node.SubtreeCount; i++) {
1493          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1494          f = f * g;
1495          df = df.Scale(g).Add(dg.Scale(f));  // f'*g + f*g'
1496        }
1497        z = f;
1498        dz = df;
1499      } else if (node.Symbol is Subtraction) {
1500        if (node.SubtreeCount == 1) {
1501          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1502          z = -f;
1503          dz = df.Scale(-1.0);
1504        } else {
1505          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1506          for (int i = 1; i < node.SubtreeCount; i++) {
1507            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1508            f = f - g;
1509            df = df.Subtract(dg);
1510          }
1511          z = f;
1512          dz = df;
1513        }
1514      } else if (node.Symbol is Division) {
1515        if (node.SubtreeCount == 1) {
1516          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1517          // protected division
1518          if (f.IsAlmost(0.0)) {
1519            z = 0;
1520            dz = Vector.Zero;
1521          } else {
1522            z = 1.0 / f;
1523            dz = df.Scale(-1 * z * z);
1524          }
1525        } else {
1526          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1527          for (int i = 1; i < node.SubtreeCount; i++) {
1528            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1529            // protected division
1530            if (g.IsAlmost(0.0)) {
1531              z = 0;
1532              dz = Vector.Zero;
1533              return;
1534            } else {
1535              var inv_g = 1.0 / g;
1536              f = f * inv_g;
1537              df = dg.Scale(-f * inv_g * inv_g).Add(df.Scale(inv_g));
1538            }
1539          }
1540          z = f;
1541          dz = df;
1542        }
1543      } else if (node.Symbol is Sine) {
1544        Assert(node.SubtreeCount == 1);
1545        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1546        z = Math.Sin(f);
1547        dz = df.Scale(Math.Cos(f));
1548      } else if (node.Symbol is Cosine) {
1549        Assert(node.SubtreeCount == 1);
1550        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1551        z = Math.Cos(f);
1552        dz = df.Scale(-Math.Sin(f));
1553      } else if (node.Symbol is Square) {
1554        Assert(node.SubtreeCount == 1);
1555        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1556        z = f * f;
1557        dz = df.Scale(2.0 * f);
1558      } else if (node.Symbol is Exponential) {
1559        Assert(node.SubtreeCount == 1);
1560        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1561        z = Math.Exp(f);
1562        dz = df.Scale(Math.Exp(f));
1563      } else if (node.Symbol is Logarithm) {
1564        Assert(node.SubtreeCount == 1);
1565        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1566        z = Math.Log(f);
1567        dz = df.Scale(1.0 / f);
1568      } else if (node.Symbol is HyperbolicTangent) {
1569        Assert(node.SubtreeCount == 1);
1570        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1571        z = Math.Tanh(f);
1572        dz = df.Scale(1 - z * z); // tanh(f(x))' = f(x)'sech²(f(x)) = f(x)'(1 - tanh²(f(x)))
1573      } else if (node.Symbol is AnalyticQuotient) {
1574        Assert(node.SubtreeCount == 2);
1575        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1576        InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1577        z = f / Math.Sqrt(1 + g * g);
1578        var denom = 1.0 / Math.Pow(1 + g * g, 1.5);
1579        dz = df.Scale(1 + g * g).Subtract(dg.Scale(f * g)).Scale(denom);
1580      } else {
1581        throw new NotSupportedException("unsupported symbol");
1582      }
1583    }
1584
1585    #endregion
1586
1587    #region events
1588    /*
1589     * Dependencies between parameters:
1590     *
1591     * ProblemData
1592     *    |                                                                         
1593     *    V                                                                         
1594     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables     
1595     *               |   |                 |                   |                     
1596     *               V   V                 |                   |                     
1597     *             Grammar <---------------+-------------------                     
1598     *                |                                                             
1599     *                V                                                             
1600     *            Encoding                                                           
1601     */
1602    private void RegisterEventHandlers() {
1603      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
1604      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
1605
1606      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
1607      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1608
1609      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
1610      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1611
1612      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
1613
1614      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
1615    }
1616
1617    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1618      UpdateGrammarAndEncoding();
1619    }
1620
1621    private void MaximumLengthChanged(object sender, EventArgs e) {
1622      UpdateGrammarAndEncoding();
1623    }
1624
1625    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1626      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1627    }
1628
1629    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1630      UpdateGrammarAndEncoding();
1631    }
1632
1633    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1634      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1635      UpdateGrammarAndEncoding();
1636    }
1637
1638    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1639      UpdateGrammarAndEncoding();
1640    }
1641
1642    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
1643      ProblemDataParameter.Value.Changed += ProblemData_Changed;
1644      OnProblemDataChanged();
1645      OnReset();
1646    }
1647
1648    private void ProblemData_Changed(object sender, EventArgs e) {
1649      OnProblemDataChanged();
1650      OnReset();
1651    }
1652
1653    private void OnProblemDataChanged() {
1654      UpdateTargetVariables();        // implicitly updates other dependent parameters
1655      var handler = ProblemDataChanged;
1656      if (handler != null) handler(this, EventArgs.Empty);
1657    }
1658
1659    #endregion
1660
1661    #region  helper
1662
1663    private static double[] CalculateDifferences(double[] targetValues, double numericDifferencesSmoothing) {
1664      return CalculateDifferencesSavitykzGolay(targetValues);
1665    }
1666
1667    private static double[] CalculateDifferencesPenalizedSplines(double[] targetValues, double numericDifferencesSmoothing) {
1668      var x = Enumerable.Range(0, targetValues.Length).Select(i => (double)i).ToArray();
1669      alglib.spline1dfitpenalized(x, targetValues, targetValues.Length / 2, numericDifferencesSmoothing,
1670        out int info, out alglib.spline1dinterpolant s, out alglib.spline1dfitreport rep);
1671      if (info <= 0) throw new ArgumentException("There was a problem while smoothing numeric differences. Try to use a different smoothing parameter value.");
1672
1673      double[] dy = new double[x.Length];
1674      for (int i = 0; i < x.Length; i++) {
1675        double xi = x[i];
1676        alglib.spline1ddiff(s, xi, out double y, out double dyi, out double d2y);
1677        dy[i] = dyi;
1678      }
1679      return dy;
1680    }
1681
1682    private static readonly double[] sgCoeffMiddle = SavitzkyGolayCoefficients(3, 3, 1, 3);
1683    private static readonly double[] sgCoeffStart = SavitzkyGolayCoefficients(0, 3, 1, 3);
1684    private static readonly double[] sgCoeffEnd = SavitzkyGolayCoefficients(3, 0, 1, 3);
1685    private static double[] CalculateDifferencesSavitykzGolay(double[] y) {
1686      double[] dy = new double[y.Length];
1687      for (int i = 3; i < y.Length - 3; i++) {
1688        for (int j = -3; j <= 3; j++) {
1689          dy[i] += y[i + j] * sgCoeffMiddle[j + 3];
1690        }
1691      }
1692
1693      // start
1694      for (int i = 0; i < 3; i++) {
1695        for (int j = 0; j <= 3; j++) {
1696          dy[i] += y[i + j] * sgCoeffStart[j];
1697        }
1698      }
1699
1700      // end
1701      for (int i = y.Length - 3; i < y.Length; i++) {
1702        for (int j = -3; j <= 0; j++) {
1703          dy[i] += y[i + j] * sgCoeffEnd[j + 3];
1704        }
1705      }
1706
1707      return dy;
1708    }
1709
1710    /// <summary>
1711    /// Calculates coefficients for Savitzky-Golay filtering. (Numerical Recipes, page 769), one important change is that the coefficients are returned in normal order instead of wraparound order
1712    /// </summary>
1713    /// <param name="nl">number of samples to the left</param>
1714    /// <param name="nr">number of samples to the right</param>
1715    /// <param name="ld">order of derivative (smoothing=0)</param>
1716    /// <param name="order">order of the polynomial to fit</param>
1717    /// <param name="c">resulting coefficients for convolution, in correct order (t-nl, ... t-1, t+0, t+1, ... t+nr)</param>
1718    private static double[] SavitzkyGolayCoefficients(int nl, int nr, int ld, int order) {
1719      int np = nl + nr + 1;
1720
1721      int j, k, imj, ipj, kk, mm;
1722      double fac = 0;
1723      double sum = 0;
1724      if (nl < 0 || nr < 0 || ld > order || nl + nr < order) throw new ArgumentException();
1725
1726      double[,] a = new double[order + 1, order + 1];
1727      double[] b = new double[order + 1];
1728      var c = new double[np];
1729
1730      for (ipj = 0; ipj <= (order << 1); ipj++) {
1731        sum = (ipj > 0 ? 0.0 : 1.0);
1732        for (k = 1; k <= nr; k++) sum += Math.Pow((double)k, (double)ipj);
1733        for (k = 1; k <= nl; k++) sum += Math.Pow((double)-k, (double)ipj);
1734        mm = Math.Min(ipj, 2 * order - ipj);
1735        for (imj = -mm; imj <= mm; imj += 2)
1736          a[(ipj + imj) / 2, (ipj - imj) / 2] = sum;
1737      }
1738      for (j = 0; j < order + 1; j++) b[j] = 0;
1739      b[ld] = 1.0;
1740      alglib.densesolverreport rep;
1741      int info;
1742      double[] x = new double[b.Length];
1743      alglib.rmatrixsolve(a, b.Length, b, out info, out rep, out x);
1744
1745      for (kk = 0; kk < np; kk++) c[kk] = 0.0;
1746      for (k = -nl; k <= nr; k++) {
1747        sum = x[0];
1748        fac = 1.0;
1749        for (mm = 1; mm <= order; mm++) sum += x[mm] * (fac *= k);
1750        kk = k + nl;
1751        c[kk] = sum;
1752      }
1753      return c;
1754    }
1755
1756
1757    private void InitAllParameters() {
1758      UpdateTargetVariables(); // implicitly updates the grammar and the encoding
1759    }
1760
1761    private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
1762      var l = new CheckedItemList<StringValue>();
1763      l.Add(new StringValue("Addition").AsReadOnly());
1764      l.Add(new StringValue("Multiplication").AsReadOnly());
1765      l.Add(new StringValue("Division").AsReadOnly());
1766      l.Add(new StringValue("Subtraction").AsReadOnly());
1767      l.Add(new StringValue("Sine").AsReadOnly());
1768      l.Add(new StringValue("Cosine").AsReadOnly());
1769      l.Add(new StringValue("Square").AsReadOnly());
1770      l.Add(new StringValue("Logarithm").AsReadOnly());
1771      l.Add(new StringValue("Exponential").AsReadOnly());
1772      l.Add(new StringValue("HyperbolicTangent").AsReadOnly());
1773      l.Add(new StringValue("AnalyticQuotient").AsReadOnly());
1774      return l.AsReadOnly();
1775    }
1776
1777    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
1778      return n is ConstantTreeNode;
1779    }
1780    private static double GetConstantValue(ISymbolicExpressionTreeNode n) {
1781      return ((ConstantTreeNode)n).Value;
1782    }
1783    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
1784      return n.Symbol.Name[0] == 'λ';
1785    }
1786    private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
1787      return (n.SubtreeCount == 0) && !IsConstantNode(n) && !IsLatentVariableNode(n);
1788    }
1789    private static string GetVariableName(ISymbolicExpressionTreeNode n) {
1790      return ((VariableTreeNode)n).VariableName;
1791    }
1792
1793    private void UpdateTargetVariables() {
1794      var currentlySelectedVariables = TargetVariables.CheckedItems
1795        .OrderBy(i => i.Index)
1796        .Select(i => i.Value.Value)
1797        .ToArray();
1798
1799      var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
1800      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
1801      foreach (var item in newVariablesList) {
1802        if (currentlySelectedVariables.Contains(item.Value)) {
1803          newVariablesList.SetItemCheckedState(item, true);
1804        } else {
1805          newVariablesList.SetItemCheckedState(item, false);
1806        }
1807      }
1808      TargetVariablesParameter.Value = newVariablesList;
1809    }
1810
1811    private void UpdateGrammarAndEncoding() {
1812      var encoding = new MultiEncoding();
1813      var g = CreateGrammar();
1814      foreach (var targetVar in TargetVariables.CheckedItems) {
1815        var e = new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength);
1816        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1817        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1818        // make sure our multi-manipulator is the only manipulator
1819        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1820
1821        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1822        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1823        foreach (var xover in subtreeCrossovers) {
1824          xover.CrossoverProbability.Value = 0.3;
1825        }
1826
1827        encoding = encoding.Add(e); // only limit by length
1828      }
1829      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1830        var e = new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength);
1831        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1832        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1833        // make sure our multi-manipulator is the only manipulator
1834        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
1835
1836        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1837        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1838        foreach (var xover in subtreeCrossovers) {
1839          xover.CrossoverProbability.Value = 0.3;
1840        }
1841
1842        encoding = encoding.Add(e);
1843      }
1844      Encoding = encoding;
1845    }
1846
1847    private ISymbolicExpressionGrammar CreateGrammar() {
1848      var grammar = new TypeCoherentExpressionGrammar();
1849      grammar.StartGrammarManipulation();
1850
1851      var problemData = ProblemData;
1852      var ds = problemData.Dataset;
1853      grammar.MaximumFunctionArguments = 0;
1854      grammar.MaximumFunctionDefinitions = 0;
1855      var allowedVariables = problemData.AllowedInputVariables.Concat(TargetVariables.CheckedItems.Select(chk => chk.Value.Value));
1856      foreach (var varSymbol in grammar.Symbols.OfType<HeuristicLab.Problems.DataAnalysis.Symbolic.VariableBase>()) {
1857        if (!varSymbol.Fixed) {
1858          varSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<double>(x));
1859          varSymbol.VariableNames = allowedVariables.Where(x => ds.VariableHasType<double>(x));
1860        }
1861      }
1862      foreach (var factorSymbol in grammar.Symbols.OfType<BinaryFactorVariable>()) {
1863        if (!factorSymbol.Fixed) {
1864          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1865          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1866          factorSymbol.VariableValues = factorSymbol.VariableNames
1867            .ToDictionary(varName => varName, varName => ds.GetStringValues(varName).Distinct().ToList());
1868        }
1869      }
1870      foreach (var factorSymbol in grammar.Symbols.OfType<FactorVariable>()) {
1871        if (!factorSymbol.Fixed) {
1872          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1873          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1874          factorSymbol.VariableValues = factorSymbol.VariableNames
1875            .ToDictionary(varName => varName,
1876            varName => ds.GetStringValues(varName).Distinct()
1877            .Select((n, i) => Tuple.Create(n, i))
1878            .ToDictionary(tup => tup.Item1, tup => tup.Item2));
1879        }
1880      }
1881
1882      grammar.ConfigureAsDefaultRegressionGrammar();
1883
1884      // configure initialization of constants
1885      var constSy = (Constant)grammar.GetSymbol("Constant");
1886      // max and min are only relevant for initialization
1887      constSy.MaxValue = +1.0e-1; // small initial values for constant opt
1888      constSy.MinValue = -1.0e-1;
1889      constSy.MultiplicativeManipulatorSigma = 1.0; // allow large jumps for manipulation
1890      constSy.ManipulatorMu = 0.0;
1891      constSy.ManipulatorSigma = 1.0; // allow large jumps
1892
1893      // configure initialization of variables
1894      var varSy = (Variable)grammar.GetSymbol("Variable");
1895      // fix variable weights to 1.0
1896      varSy.WeightMu = 1.0;
1897      varSy.WeightSigma = 0.0;
1898      varSy.WeightManipulatorMu = 0.0;
1899      varSy.WeightManipulatorSigma = 0.0;
1900      varSy.MultiplicativeWeightManipulatorSigma = 0.0;
1901
1902      foreach (var f in FunctionSet) {
1903        grammar.GetSymbol(f.Value).Enabled = FunctionSet.ItemChecked(f);
1904      }
1905
1906      grammar.FinishedGrammarManipulation();
1907      return grammar;
1908
1909    }
1910    #endregion
1911
1912
1913    #region Import
1914    public void Load(Problem problem) {
1915      // transfer parameter values from problem parameter
1916      this.ProblemData = problem.ProblemData;
1917      this.TrainingEpisodesParameter.Value = problem.TrainingEpisodesParameter.Value;
1918      this.TargetVariablesParameter.Value = problem.TargetVariablesParameter.Value;
1919      this.Name = problem.Name;
1920      this.Description = problem.Description;
1921    }
1922    #endregion
1923
1924
1925    // TODO: for integration we only need a part of the data that we need for optimization
1926
1927    public class OptimizationData {
1928      public readonly ISymbolicExpressionTree[] trees;
1929      public readonly string[] targetVariables;
1930      public readonly IRegressionProblemData problemData;
1931      public readonly double[][] targetValues;
1932      public readonly double[] inverseStandardDeviation;
1933      public readonly IntRange[] episodes;
1934      public readonly int numericIntegrationSteps;
1935      public readonly string[] latentVariables;
1936      public readonly string odeSolver;
1937      public readonly NodeValueLookup nodeValueLookup;
1938      public readonly int[] rows;
1939      internal readonly string[] variables;
1940
1941      public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, string[] inputVariables,
1942        IRegressionProblemData problemData,
1943        double[][] targetValues,
1944        IntRange[] episodes,
1945        int numericIntegrationSteps, string[] latentVariables, string odeSolver) {
1946        this.trees = trees;
1947        this.targetVariables = targetVars;
1948        this.problemData = problemData;
1949        this.targetValues = targetValues;
1950        this.variables = inputVariables;
1951        if (targetValues != null) {
1952          this.inverseStandardDeviation = new double[targetValues.Length];
1953          for (int i = 0; i < targetValues.Length; i++) {
1954            // calculate variance for each episode separately and calc the average
1955            var epStartIdx = 0;
1956            var stdevs = new List<double>();
1957            foreach (var ep in episodes) {
1958              var epValues = targetValues[i].Skip(epStartIdx).Take(ep.Size);
1959              stdevs.Add(epValues.StandardDeviation());
1960              epStartIdx += ep.Size;
1961            }
1962            inverseStandardDeviation[i] = 1.0 / stdevs.Average();
1963          }
1964        } else
1965          this.inverseStandardDeviation = Enumerable.Repeat(1.0, trees.Length).ToArray();
1966        this.episodes = episodes;
1967        this.numericIntegrationSteps = numericIntegrationSteps;
1968        this.latentVariables = latentVariables;
1969        this.odeSolver = odeSolver;
1970        this.nodeValueLookup = new NodeValueLookup(trees);
1971        this.rows = episodes.SelectMany(ep => Enumerable.Range(ep.Start, ep.Size)).ToArray();
1972      }
1973    }
1974
1975    public class NodeValueLookup {
1976      private readonly Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> node2val = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1977      private readonly Dictionary<string, List<ISymbolicExpressionTreeNode>> name2nodes = new Dictionary<string, List<ISymbolicExpressionTreeNode>>();
1978      private readonly ConstantTreeNode[] constantNodes;
1979      private readonly Vector[] constantGradientVectors;
1980
1981
1982      public double NodeValue(ISymbolicExpressionTreeNode node) => node2val[node].Item1;
1983      public Vector NodeGradient(ISymbolicExpressionTreeNode node) => node2val[node].Item2;
1984
1985      public NodeValueLookup(ISymbolicExpressionTree[] trees, bool variableGradient = false) {
1986        this.constantNodes = trees.SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>()).ToArray();
1987        if (!variableGradient) {
1988          constantGradientVectors = new Vector[constantNodes.Length];
1989          for (int paramIdx = 0; paramIdx < constantNodes.Length; paramIdx++) {
1990            constantGradientVectors[paramIdx] = Vector.CreateIndicator(length: constantNodes.Length, idx: paramIdx);
1991
1992            var node = constantNodes[paramIdx];
1993            node2val[node] = Tuple.Create(node.Value, constantGradientVectors[paramIdx]);
1994          }
1995
1996          foreach (var tree in trees) {
1997            foreach (var node in tree.IterateNodesPrefix().Where(IsVariableNode)) {
1998              var varName = GetVariableName(node);
1999              if (!name2nodes.TryGetValue(varName, out List<ISymbolicExpressionTreeNode> nodes)) {
2000                nodes = new List<ISymbolicExpressionTreeNode>();
2001                name2nodes.Add(varName, nodes);
2002              }
2003              nodes.Add(node);
2004              SetVariableValue(varName, 0.0);  // this value is updated in the prediction loop
2005            }
2006          }
2007        }
2008        else {
2009          // variable gradient means we want to calculate the gradient over the target variables instead of parameters
2010          for (int paramIdx = 0; paramIdx < constantNodes.Length; paramIdx++) {
2011            var node = constantNodes[paramIdx];
2012            node2val[node] = Tuple.Create(node.Value, Vector.Zero);
2013          }
2014
2015          foreach (var tree in trees) {
2016            foreach (var node in tree.IterateNodesPrefix().Where(IsVariableNode)) {
2017              var varName = GetVariableName(node);
2018              if (!name2nodes.TryGetValue(varName, out List<ISymbolicExpressionTreeNode> nodes)) {
2019                nodes = new List<ISymbolicExpressionTreeNode>();
2020                name2nodes.Add(varName, nodes);
2021              }
2022              nodes.Add(node);
2023              SetVariableValue(varName, 0.0);  // this value is updated in the prediction loop
2024            }
2025          }
2026        }
2027      }
2028
2029      public int ParameterCount => constantNodes.Length;
2030
2031      public void SetVariableValue(string variableName, double val) {
2032        SetVariableValue(variableName, val, Vector.Zero);
2033      }
2034      public Tuple<double, Vector> GetVariableValue(string variableName) {
2035        return node2val[name2nodes[variableName].First()];
2036      }
2037      public void SetVariableValue(string variableName, double val, Vector dVal) {
2038        if (name2nodes.TryGetValue(variableName, out List<ISymbolicExpressionTreeNode> nodes)) {
2039          nodes.ForEach(n => node2val[n] = Tuple.Create(val, dVal));
2040        } else {
2041          var fakeNode = new VariableTreeNode(new Variable());
2042          fakeNode.Weight = 1.0;
2043          fakeNode.VariableName = variableName;
2044          var newNodeList = new List<ISymbolicExpressionTreeNode>();
2045          newNodeList.Add(fakeNode);
2046          name2nodes.Add(variableName, newNodeList);
2047          node2val[fakeNode] = Tuple.Create(val, dVal);
2048        }
2049      }
2050
2051      internal void UpdateParamValues(double[] x) {
2052        for (int i = 0; i < x.Length; i++) {
2053          constantNodes[i].Value = x[i];
2054          node2val[constantNodes[i]] = Tuple.Create(x[i], constantGradientVectors[i]);
2055        }
2056      }
2057    }
2058  }
2059}
Note: See TracBrowser for help on using the repository browser.