Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16600

Last change on this file since 16600 was 16600, checked in by gkronber, 5 years ago

#2925: made some simplifications (Vector) to aid debugging.

File size: 69.3 KB
RevLine 
[15964]1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Linq;
[15968]26using HeuristicLab.Analysis;
27using HeuristicLab.Collections;
[15964]28using HeuristicLab.Common;
29using HeuristicLab.Core;
[15968]30using HeuristicLab.Data;
[15964]31using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
[15968]32using HeuristicLab.Optimization;
[15964]33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.DataAnalysis;
[16126]36using HeuristicLab.Problems.DataAnalysis.Symbolic;
[15964]37using HeuristicLab.Problems.Instances;
[16153]38using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
[15964]39
40namespace HeuristicLab.Problems.DynamicalSystemsModelling {
[16245]41  // Eine weitere Möglichkeit ist spline-smoothing der Daten (über Zeit) um damit für jede Zielvariable
42  // einen bereinigten (Rauschen) Wert und die Ableitung dy/dt für alle Beobachtungen zu bekommen
43  // danach kann man die Regression direkt für dy/dt anwenden (mit bereinigten y als inputs)
[15964]44  [Item("Dynamical Systems Modelling Problem", "TODO")]
45  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
46  [StorableClass]
[15968]47  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
[15964]48    #region parameter names
[15968]49    private const string ProblemDataParameterName = "Data";
50    private const string TargetVariablesParameterName = "Target variables";
51    private const string FunctionSetParameterName = "Function set";
52    private const string MaximumLengthParameterName = "Size limit";
53    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
[15970]54    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
55    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
[16153]56    private const string TrainingEpisodesParameterName = "Training episodes";
[16155]57    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
[16250]58    private const string OdeSolverParameterName = "ODE Solver";
[15964]59    #endregion
60
61    #region Parameter Properties
62    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
63
64    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
65      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
66    }
[16268]67    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
68      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
[15968]69    }
[16268]70    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
71      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
[15968]72    }
73    public IFixedValueParameter<IntValue> MaximumLengthParameter {
74      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
75    }
76    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
77      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
78    }
[15970]79    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
80      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
81    }
82    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
83      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
84    }
[16153]85    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
86      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
87    }
[16155]88    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
89      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
90    }
[16250]91    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
92      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
93    }
[15964]94    #endregion
95
96    #region Properties
97    public IRegressionProblemData ProblemData {
98      get { return ProblemDataParameter.Value; }
99      set { ProblemDataParameter.Value = value; }
100    }
101    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
102
[16268]103    public ReadOnlyCheckedItemList<StringValue> TargetVariables {
[15968]104      get { return TargetVariablesParameter.Value; }
105    }
106
[16268]107    public ReadOnlyCheckedItemList<StringValue> FunctionSet {
[15968]108      get { return FunctionSetParameter.Value; }
109    }
110
111    public int MaximumLength {
112      get { return MaximumLengthParameter.Value.Value; }
113    }
114    public int MaximumParameterOptimizationIterations {
115      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
116    }
[15970]117    public int NumberOfLatentVariables {
118      get { return NumberOfLatentVariablesParameter.Value.Value; }
119    }
120    public int NumericIntegrationSteps {
121      get { return NumericIntegrationStepsParameter.Value.Value; }
122    }
[16153]123    public IEnumerable<IntRange> TrainingEpisodes {
124      get { return TrainingEpisodesParameter.Value; }
125    }
[16155]126    public bool OptimizeParametersForEpisodes {
127      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
128    }
[15970]129
[16250]130    public string OdeSolver {
131      get { return OdeSolverParameter.Value.Value; }
132      set {
133        var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
134        if (matchingValue == null) throw new ArgumentOutOfRangeException();
135        else OdeSolverParameter.Value = matchingValue;
136      }
137    }
138
[16153]139    #endregion
[15968]140
[15964]141    public event EventHandler ProblemDataChanged;
142
143    public override bool Maximization {
144      get { return false; } // we minimize NMSE
145    }
146
147    #region item cloning and persistence
148    // persistence
149    [StorableConstructor]
150    private Problem(bool deserializing) : base(deserializing) { }
151    [StorableHook(HookType.AfterDeserialization)]
152    private void AfterDeserialization() {
[16215]153      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
[16155]154        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
155      }
[15964]156      RegisterEventHandlers();
157    }
158
159    // cloning
160    private Problem(Problem original, Cloner cloner)
161      : base(original, cloner) {
162      RegisterEventHandlers();
163    }
164    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
165    #endregion
166
167    public Problem()
168      : base() {
[16268]169      var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
[15968]170      var functions = CreateFunctionSet();
[15970]171      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
[16268]172      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
173      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
[15970]174      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
175      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
176      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
177      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
[16153]178      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
[16155]179      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
[16250]180
[16251]181      var solversStr = new string[] { "HeuristicLab", "CVODES" };
[16250]182      var solvers = new ItemSet<StringValue>(
183        solversStr.Select(s => new StringValue(s).AsReadOnly())
184        );
[16251]185      Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
[16250]186
[15964]187      RegisterEventHandlers();
[15968]188      InitAllParameters();
[16152]189
[16153]190      // TODO: do not clear selection of target variables when the input variables are changed (keep selected target variables)
[16152]191      // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item
[16215]192      // TODO: use training range as default training episode
[16251]193      // TODO: write back optimized parameters to solution?
[16398]194      // TODO: optimization of starting values for latent variables in CVODES solver
[16153]195
[15964]196    }
197
[15968]198    public override double Evaluate(Individual individual, IRandom random) {
199      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
[16399]200                                                                                                      // write back optimized parameters to tree nodes instead of the separate OptTheta variable
201                                                                                                      // retreive optimized parameters from nodes?
[15968]202
[16599]203      var problemData = Standardize(ProblemData);
[16399]204      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
205      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
[16215]206      if (OptimizeParametersForEpisodes) {
207        int eIdx = 0;
[16155]208        double totalNMSE = 0.0;
209        int totalSize = 0;
[16215]210        foreach (var episode in TrainingEpisodes) {
[16155]211          double[] optTheta;
212          double nmse;
[16399]213          OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
[16155]214          individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
215          eIdx++;
216          totalNMSE += nmse * episode.Size;
217          totalSize += episode.Size;
218        }
219        return totalNMSE / totalSize;
220      } else {
221        double[] optTheta;
222        double nmse;
[16399]223        OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
[16155]224        individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
225        return nmse;
226      }
227    }
228
[16599]229    private IRegressionProblemData Standardize(IRegressionProblemData problemData) {
230      // var standardizedDataset = new Dataset(
231      //   problemData.Dataset.DoubleVariables,
232      //   problemData.Dataset.DoubleVariables.Select(v => Standardize(problemData.Dataset.GetReadOnlyDoubleValues(v)).ToList()));
233      // return new RegressionProblemData(standardizedDataset, problemData.AllowedInputVariables, problemData.TargetVariable);
234      return new RegressionProblemData(problemData.Dataset, problemData.AllowedInputVariables, problemData.TargetVariable);
235    }
236
[16399]237    public static void OptimizeForEpisodes(
[16250]238      ISymbolicExpressionTree[] trees,
[16399]239      IRegressionProblemData problemData,
240      string[] targetVars,
241      string[] latentVariables,
[16250]242      IRandom random,
243      IEnumerable<IntRange> episodes,
[16399]244      int maxParameterOptIterations,
245      int numericIntegrationSteps,
246      string odeSolver,
[16250]247      out double[] optTheta,
248      out double nmse) {
[16155]249      var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray();
[15970]250      var targetValues = new double[rows.Length, targetVars.Length];
251
[16600]252
[16599]253      // collect values of all target variables                                           
[15968]254      var colIdx = 0;
[16215]255      foreach (var targetVar in targetVars) {
[15968]256        int rowIdx = 0;
[16215]257        foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
[15968]258          targetValues[rowIdx, colIdx] = value;
259          rowIdx++;
260        }
261        colIdx++;
262      }
[15964]263
[16251]264      // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
265      var paramNodes = new List<ISymbolicExpressionTreeNode>();
266      foreach (var t in trees) {
267        foreach (var n in t.IterateNodesPrefix()) {
268          if (IsConstantNode(n))
269            paramNodes.Add(n);
[15968]270        }
[15964]271      }
[16398]272      // init params randomly
[16329]273      // theta contains parameter values for trees and then the initial values for latent variables (a separate vector for each episode)
274      // inital values for latent variables are also optimized
275      var theta = new double[paramNodes.Count + latentVariables.Length * episodes.Count()];
276      for (int i = 0; i < theta.Length; i++)
[16600]277        theta[i] = random.NextDouble() * 2.0e-1 - 1.0e-1;
[15964]278
[16155]279      optTheta = new double[0];
[16215]280      if (theta.Length > 0) {
[16599]281        alglib.minlmstate state;
282        alglib.minlmreport report;
283        alglib.minlmcreatevj(targetValues.Length, theta, out state);
284        alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
285        alglib.minlmsetgradientcheck(state, 1.0e-3);
[16600]286        var myState = new OptimizationData(trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver);
287        alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
[16250]288
[16599]289        alglib.minlmresults(state, out optTheta, out report);
[15964]290
[16599]291        /*************************************************************************
292         Levenberg-Marquardt algorithm results
293         
294         INPUT PARAMETERS:
295             State   -   algorithm state
296         
297         OUTPUT PARAMETERS:
298             X       -   array[0..N-1], solution
299             Rep     -   optimization  report;  includes  termination   codes   and
300                         additional information. Termination codes are listed below,
301                         see comments for this structure for more info.
302                         Termination code is stored in rep.terminationtype field:
303                         * -8    optimizer detected NAN/INF values either in the
304                                 function itself, or in its Jacobian
305                         * -7    derivative correctness check failed;
306                                 see rep.funcidx, rep.varidx for
307                                 more information.
308                         * -3    constraints are inconsistent
309                         *  2    relative step is no more than EpsX.
310                         *  5    MaxIts steps was taken
311                         *  7    stopping conditions are too stringent,
312                                 further improvement is impossible
313                         *  8    terminated by user who called minlmrequesttermination().
314                                 X contains point which was "current accepted" when
315                                 termination request was submitted.
316         
317           -- ALGLIB --
318              Copyright 10.03.2009 by Bochkanov Sergey
319         *************************************************************************/
320        if (report.terminationtype < 0) { nmse = 10.0; return; }
[15964]321
[16599]322        nmse = state.f; //TODO check
[15964]323
[16599]324        // var myState = new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver };
325        // EvaluateObjectiveVector(optTheta, ref nmse, grad,myState);
326        if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10.0; return; } // return a large value (TODO: be consistent by using NMSE)
327      } else {
328        // no parameters
329        nmse = targetValues.Length;
[15964]330      }
[16600]331
[16599]332    }
[15964]333
[16599]334    // private static IEnumerable<double> Standardize(IEnumerable<double> xs) {
335    //   var m = xs.Average();
336    //   var s = xs.StandardDeviationPop();
337    //   return xs.Select(xi => (xi - m) / s);
338    // }
339
340    alglib.ndimensional_fvec fvec;
341    alglib.ndimensional_jac jac;
342
[16600]343    public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
344    public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
345      EvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData);
[15964]346    }
347
[16600]348    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
349    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
[15964]350
351
[16250]352      Tuple<double, Vector>[][] predicted = null; // one array of predictions for each episode
[16600]353      predicted = Integrate(optimizationData, x).ToArray();
[15968]354
[16599]355      // clear all result data structures
356      for (int j = 0; j < fi.Length; j++) {
357        fi[j] = 10.0;
358        if (jac != null) Array.Clear(jac, 0, jac.Length);
359      }
360
[16600]361      if (predicted.Length != optimizationData.targetValues.GetLength(0)) {
[16251]362        return;
363      }
[16250]364
[15968]365      // for normalized MSE = 1/variance(t) * MSE(t, pred)
[16153]366      // TODO: Perf. (by standardization of target variables before evaluation of all trees)     
[16597]367      // var invVar = Enumerable.Range(0, targetVariables.Length)
368      //   .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors
369      //   .Select(vec => vec.StandardDeviation()) // TODO: variance of stddev
370      //   .Select(v => 1.0 / v)
371      //   .ToArray();
[15968]372
[16599]373      // double[] invVar = Enumerable.Repeat(1.0, targetVariables.Length).ToArray();
[16597]374
375
[15968]376      // objective function is NMSE
[15964]377      int n = predicted.Length;
378      double invN = 1.0 / n;
[16599]379      int i = 0;
[15968]380      int r = 0;
[16215]381      foreach (var y_pred in predicted) {
[16329]382        // y_pred contains the predicted values for target variables first and then predicted values for latent variables
[16600]383        for (int c = 0; c < optimizationData.targetVariables.Length; c++) {
[15970]384
[15968]385          var y_pred_f = y_pred[c].Item1;
[16600]386          var y = optimizationData.targetValues[r, c];
[15964]387
[16599]388          fi[i] = (y - y_pred_f);
389          var g = y_pred[c].Item2;
390          if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[i, j] = -g[j];
391          i++; // we put the errors for each target variable after each other
[15968]392        }
393        r++;
[15964]394      }
395    }
396
[15968]397    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
398      base.Analyze(individuals, qualities, results, random);
[15964]399
[16215]400      if (!results.ContainsKey("Prediction (training)")) {
[15968]401        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
402      }
[16215]403      if (!results.ContainsKey("Prediction (test)")) {
[15968]404        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
405      }
[16215]406      if (!results.ContainsKey("Models")) {
[16153]407        results.Add(new Result("Models", typeof(VariableCollection)));
[15968]408      }
[16399]409      if (!results.ContainsKey("SNMSE")) {
[16398]410        results.Add(new Result("SNMSE", typeof(DoubleValue)));
411      }
[16399]412      if (!results.ContainsKey("Solution")) {
413        results.Add(new Result("Solution", typeof(Solution)));
414      }
[16597]415      if (!results.ContainsKey("Squared error and gradient")) {
416        results.Add(new Result("Squared error and gradient", typeof(DataTable)));
417      }
[15968]418
419      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
420      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
[16155]421
[16398]422      results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
423
[16599]424      var problemData = Standardize(ProblemData);
[16268]425      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
[15970]426      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
[15968]427
428      var trainingList = new ItemList<DataTable>();
429
[16215]430      if (OptimizeParametersForEpisodes) {
[16155]431        var eIdx = 0;
432        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
[16215]433        foreach (var episode in TrainingEpisodes) {
[16155]434          var episodes = new[] { episode };
435          var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta_" + eIdx]).ToArray(); // see evaluate
[16600]436          var optimizationData = new OptimizationData(trees, targetVars, problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver);
437          var trainingPrediction = Integrate(optimizationData, optTheta).ToArray();
[16155]438          trainingPredictions.Add(trainingPrediction);
439          eIdx++;
440        }
[15968]441
[16329]442        // only for target values
[16155]443        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
[16215]444        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
[16155]445          var targetVar = targetVars[colIdx];
446          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
447          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
448          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
449          trainingDataTable.Rows.Add(actualValuesRow);
450          trainingDataTable.Rows.Add(predictedValuesRow);
451          trainingList.Add(trainingDataTable);
452        }
453        results["Prediction (training)"].Value = trainingList.AsReadOnly();
[15968]454
455
[16155]456        var models = new VariableCollection();
[16126]457
[16215]458        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
[16155]459          var targetVarName = tup.Item1;
460          var tree = tup.Item2;
[16126]461
[16155]462          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
463          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
464          models.Add(origTreeVar);
465        }
466        results["Models"].Value = models;
467      } else {
468        var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate
[16600]469        var optimizationData = new OptimizationData(trees, targetVars, problemData, null, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);
470        var trainingPrediction = Integrate(optimizationData, optTheta).ToArray();
[16329]471        // for target values and latent variables
[16155]472        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
[16329]473        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
474          // is target variable
475          if (colIdx < targetVars.Length) {
476            var targetVar = targetVars[colIdx];
477            var trainingDataTable = new DataTable(targetVar + " prediction (training)");
478            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
479            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
480            trainingDataTable.Rows.Add(actualValuesRow);
481            trainingDataTable.Rows.Add(predictedValuesRow);
[16597]482
483            for (int paramIdx = 0; paramIdx < optTheta.Length; paramIdx++) {
484              var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", trainingPrediction.Select(arr => arr[colIdx].Item2[paramIdx]).ToArray());
485              paramSensitivityRow.VisualProperties.SecondYAxis = true;
486              trainingDataTable.Rows.Add(paramSensitivityRow);
487            }
[16329]488            trainingList.Add(trainingDataTable);
489          } else {
490            var latentVar = latentVariables[colIdx - targetVars.Length];
491            var trainingDataTable = new DataTable(latentVar + " prediction (training)");
492            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
493            var emptyRow = new DataRow(latentVar);
494            trainingDataTable.Rows.Add(emptyRow);
495            trainingDataTable.Rows.Add(predictedValuesRow);
496            trainingList.Add(trainingDataTable);
497          }
[16155]498        }
[16597]499
500        var errorTable = new DataTable("Squared error and gradient");
501        var seRow = new DataRow("Squared error");
502        var gradientRows = Enumerable.Range(0, optTheta.Length).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
503        errorTable.Rows.Add(seRow);
504        foreach (var gRow in gradientRows) {
505          gRow.VisualProperties.SecondYAxis = true;
506          errorTable.Rows.Add(gRow);
507        }
508        var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
509        int r = 0;
510        double invN = 1.0 / trainingRows.Count();
511        foreach (var y_pred in trainingPrediction) {
512          // calculate objective function gradient
513          double f_i = 0.0;
514          Vector g_i = Vector.CreateNew(new double[optTheta.Length]);
515          for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
516            var y_pred_f = y_pred[colIdx].Item1;
517            var y = targetValues[colIdx][r];
518
519            var res = (y - y_pred_f);
520            var ressq = res * res;
521            f_i += ressq * invN /* * Math.Exp(-0.2 * r) */;
522            g_i = g_i - 2.0 * res * y_pred[colIdx].Item2 * invN /* * Math.Exp(-0.2 * r)*/;
523          }
524          seRow.Values.Add(f_i);
525          for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
526          r++;
527        }
528        results["Squared error and gradient"].Value = errorTable;
529
[16155]530        // TODO: DRY for training and test
531        var testList = new ItemList<DataTable>();
532        var testRows = ProblemData.TestIndices.ToArray();
[16600]533        var testOptimizationData = new OptimizationData(trees, targetVars, problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver);
534        var testPrediction = Integrate(testOptimizationData, optTheta).ToArray();
[16126]535
[16329]536        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
537          // is target variable
538          if (colIdx < targetVars.Length) {
539            var targetVar = targetVars[colIdx];
540            var testDataTable = new DataTable(targetVar + " prediction (test)");
541            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
542            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
543            testDataTable.Rows.Add(actualValuesRow);
544            testDataTable.Rows.Add(predictedValuesRow);
545            testList.Add(testDataTable);
546
547          } else {
548            var latentVar = latentVariables[colIdx - targetVars.Length];
549            var testDataTable = new DataTable(latentVar + " prediction (test)");
550            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
551            var emptyRow = new DataRow(latentVar);
552            testDataTable.Rows.Add(emptyRow);
553            testDataTable.Rows.Add(predictedValuesRow);
554            testList.Add(testDataTable);
555          }
[16155]556        }
[16126]557
[16155]558        results["Prediction (training)"].Value = trainingList.AsReadOnly();
559        results["Prediction (test)"].Value = testList.AsReadOnly();
[16399]560
561
[16155]562        #region simplification of models
563        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
564        var models = new VariableCollection();    // to store target var names and original version of tree
[16126]565
[16399]566        var optimizedTrees = new List<ISymbolicExpressionTree>();
[16275]567        int nextParIdx = 0;
[16329]568        for (int idx = 0; idx < trees.Length; idx++) {
[16399]569          var tree = trees[idx];
570          optimizedTrees.Add(new SymbolicExpressionTree(FixParameters(tree.Root, optTheta.ToArray(), ref nextParIdx)));
571        }
572        var ds = problemData.Dataset;
573        var newVarNames = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
574        var allVarNames = ds.DoubleVariables.Concat(newVarNames);
575        var newVarValues = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
576        var allVarValues = ds.DoubleVariables.Select(varName => ds.GetDoubleValues(varName).ToList())
577          .Concat(Enumerable.Range(0, nextParIdx).Select(i => Enumerable.Repeat(optTheta[i], ds.Rows).ToList()))
578          .ToList();
579        var newDs = new Dataset(allVarNames, allVarValues);
580        var newProblemData = new RegressionProblemData(newDs, problemData.AllowedInputVariables.Concat(newVarValues).ToArray(), problemData.TargetVariable);
581        results["Solution"].Value = new Solution(optimizedTrees.ToArray(),
582                   // optTheta,
583                   newProblemData,
584                   targetVars,
585                   latentVariables,
586                   TrainingEpisodes,
587                   OdeSolver,
588                   NumericIntegrationSteps);
589
590
591        nextParIdx = 0;
592        for (int idx = 0; idx < trees.Length; idx++) {
[16329]593          var varName = string.Empty;
594          if (idx < targetVars.Length) {
595            varName = targetVars[idx];
596          } else {
597            varName = latentVariables[idx - targetVars.Length];
598          }
599          var tree = trees[idx];
[16153]600
[16155]601          // when we reference HeuristicLab.Problems.DataAnalysis.Symbolic we can translate symbols
[16329]602          var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta.ToArray(),
603            ref nextParIdx));
[16155]604
605          // var shownTree = (SymbolicExpressionTree)tree.Clone();
606          // var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode);
607          // var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode);
608          //
609          // foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) {
610          //   double constantsVal = optTheta[nodeIdx[n.original]];
611          //
612          //   ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal };
613          //
614          //   var parentNode = n.shown.Parent;
615          //   int replacementIndex = parentNode.IndexOfSubtree(n.shown);
616          //   parentNode.RemoveSubtree(replacementIndex);
617          //   parentNode.InsertSubtree(replacementIndex, replacementNode);
618          // }
619
[16329]620          var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
[16155]621          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
622          models.Add(origTreeVar);
[16329]623          var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
[16155]624          simplifiedTreeVar.Value = TreeSimplifier.Simplify(shownTree);
625          models.Add(simplifiedTreeVar);
626
627        }
[16399]628
[16155]629        results["Models"].Value = models;
630        #endregion
[16126]631      }
[15968]632    }
633
634
635    #region interpretation
[16222]636
637    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
638    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
639
640    // a comparison of three potential calculation methods for the gradient is given in:
641    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
642    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
643    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
644
645    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
[16253]646
[16600]647    public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData, double[] parameterValues) {
[15964]648
[16600]649      var trees = optimizationData.trees;
650      var dataset = optimizationData.problemData.Dataset;
651      var inputVariables = optimizationData.problemData.AllowedInputVariables.ToArray();
652      var targetVariables = optimizationData.targetVariables;
653      var latentVariables = optimizationData.latentVariables;
654      var episodes = optimizationData.episodes;
655      var odeSolver = optimizationData.odeSolver;
656      var numericIntegrationSteps = optimizationData.numericIntegrationSteps;
657
658
[16250]659      // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
[16329]660      var episodeIdx = 0;
[16600]661      foreach (var episode in optimizationData.episodes) {
[16153]662        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start);
[15968]663
[16153]664        // integrate forward starting with known values for the target in t0
[15964]665
[16153]666        var variableValues = new Dictionary<string, Tuple<double, Vector>>();
667        var t0 = rows.First();
[16215]668        foreach (var varName in inputVariables) {
[16153]669          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
670        }
[16215]671        foreach (var varName in targetVariables) {
[16153]672          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
673        }
674        // add value entries for latent variables which are also integrated
[16329]675        // initial values are at the end of the parameter vector
676        // separete initial values for each episode
677        var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
[16215]678        foreach (var latentVar in latentVariables) {
[16329]679          var arr = new double[parameterValues.Length]; // backing array
680          arr[initialValueIdx] = 1.0;
681          var g = new Vector(arr);
682          variableValues.Add(latentVar,
683            Tuple.Create(parameterValues[initialValueIdx], g)); // we don't have observations for latent variables therefore we optimize the initial value for each episode
684          initialValueIdx++;
[16153]685        }
[16329]686
[16250]687        var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
[15964]688
[16329]689        // return first value as stored in the dataset
690        yield return calculatedVariables
691          .Select(calcVarName => variableValues[calcVarName])
692          .ToArray();
693
[16253]694        var prevT = rows.First(); // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
[16215]695        foreach (var t in rows.Skip(1)) {
[16250]696          if (odeSolver == "HeuristicLab")
[16251]697            IntegrateHL(trees, calculatedVariables, variableValues, parameterValues, numericIntegrationSteps);
[16250]698          else if (odeSolver == "CVODES")
[16597]699            throw new NotImplementedException();
700          // IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT);
[16250]701          else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
[16253]702          prevT = t;
[15964]703
[16275]704          // This check doesn't work with the HeuristicLab integrator if there are input variables
705          //if (variableValues.Count == targetVariables.Length) {
706          // only return the target variables for calculation of errors
[16329]707          var res = calculatedVariables
[16275]708            .Select(targetVar => variableValues[targetVar])
709            .ToArray();
710          if (res.Any(ri => double.IsNaN(ri.Item1) || double.IsInfinity(ri.Item1))) yield break;
711          yield return res;
712          //} else {
713          //  yield break; // stop early on problems
714          //}
[15964]715
[16251]716
[16153]717          // update for next time step
[16215]718          foreach (var varName in inputVariables) {
[16153]719            variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero);
720          }
[15964]721        }
[16329]722        episodeIdx++;
[15964]723      }
724    }
725
[16398]726    #region CVODES
[16253]727
[16597]728    /*
[16253]729    /// <summary>
730    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
731    /// </summary>
732    /// <param name="trees">Each equation in the ODE represented as a tree</param>
733    /// <param name="calculatedVariables">The names of the calculated variables</param>
734    /// <param name="variableValues">The start values of the calculated variables as well as their sensitivites over parameters</param>
735    /// <param name="parameterValues">The current parameter values</param>
736    /// <param name="t">The time t up to which we need to integrate.</param>
[16250]737    private static void IntegrateCVODES(
[16251]738      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
739      string[] calculatedVariables, // names of elements of y
740      Dictionary<string, Tuple<double, Vector>> variableValues,  //  y (intput and output) input: y(t0), output: y(t0+t)
741      double[] parameterValues, // p
742      double t // duration t for which we want to integrate
[16250]743      ) {
[16251]744
[16250]745      // the RHS of the ODE
[16251]746      // dy/dt = f(y_t,x_t,p)
747      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, parameterValues);
748      // the Jacobian ∂f/∂y
749      CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, parameterValues);
750
751      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
752      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, parameterValues);
753
754      // setup solver
755      int numberOfEquations = trees.Length;
756      IntPtr y = IntPtr.Zero;
757      IntPtr cvode_mem = IntPtr.Zero;
758      IntPtr A = IntPtr.Zero;
759      IntPtr yS0 = IntPtr.Zero;
760      IntPtr linearSolver = IntPtr.Zero;
761      var ns = parameterValues.Length; // number of parameters
762
763      try {
764        y = CVODES.N_VNew_Serial(numberOfEquations);
765        // init y to current values of variables
766        // y must be initialized before calling CVodeInit
767        for (int i = 0; i < calculatedVariables.Length; i++) {
768          CVODES.NV_Set_Ith_S(y, i, variableValues[calculatedVariables[i]].Item1);
769        }
770
771        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
772
773        var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
774        Debug.Assert(CVODES.CV_SUCCESS == flag);
775
776        double relTol = 1.0e-2;
777        double absTol = 1.0;
778        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
779        Debug.Assert(CVODES.CV_SUCCESS == flag);
780
781        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
782        Debug.Assert(A != IntPtr.Zero);
783
784        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
785        Debug.Assert(linearSolver != IntPtr.Zero);
786
787        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
788        Debug.Assert(CVODES.CV_SUCCESS == flag);
789
790        flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
791        Debug.Assert(CVODES.CV_SUCCESS == flag);
792
793        yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
794        unsafe {
795          // set to initial sensitivities supplied by caller
796          for (int pIdx = 0; pIdx < ns; pIdx++) {
797            var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
798            for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
799              CVODES.NV_Set_Ith_S(yS0_i, varIdx, variableValues[calculatedVariables[varIdx]].Item2[pIdx]);
800            }
801          }
802        }
803
804        flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
805        Debug.Assert(CVODES.CV_SUCCESS == flag);
806
807        flag = CVODES.CVodeSensEEtolerances(cvode_mem);
808        Debug.Assert(CVODES.CV_SUCCESS == flag);
809
810        // make one forward integration step
811        double tout = 0.0; // first output time
812        flag = CVODES.CVode(cvode_mem, t, y, ref tout, CVODES.CV_NORMAL);
813        if (flag == CVODES.CV_SUCCESS) {
814          Debug.Assert(t == tout);
815
816          // get sensitivities
817          flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
818          Debug.Assert(CVODES.CV_SUCCESS == flag);
819
820          // update variableValues based on integration results
821          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
822            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
823            var gArr = new double[parameterValues.Length];
824            for (var pIdx = 0; pIdx < parameterValues.Length; pIdx++) {
825              unsafe {
826                var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
827                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
828              }
829            }
830            variableValues[calculatedVariables[varIdx]] = Tuple.Create(yi, new Vector(gArr));
831          }
832        } else {
833          variableValues.Clear();   // indicate problems by not returning new values
834        }
835
836        // cleanup all allocated objects
837      } finally {
838        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
[16253]839        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
[16251]840        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
841        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
842        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
843      }
[16250]844    }
845
[16251]846
[16250]847    private static CVODES.CVRhsFunc CreateOdeRhs(
848      ISymbolicExpressionTree[] trees,
849      string[] calculatedVariables,
850      double[] parameterValues) {
[16398]851      // we don't need to calculate a gradient here
[16250]852      return (double t,
853              IntPtr y, // N_Vector, current value of y (input)
854              IntPtr ydot, // N_Vector (calculated value of y' (output)
855              IntPtr user_data // optional user data, (unused here)
856              ) => {
[16251]857                // TODO: perf
858                var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
859
860                int pIdx = 0;
861                foreach (var tree in trees) {
862                  foreach (var n in tree.IterateNodesPrefix()) {
863                    if (IsConstantNode(n)) {
864                      nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we do not need a gradient
865                      pIdx++;
866                    } else if (n.SubtreeCount == 0) {
867                      // for variables and latent variables get the value from variableValues
868                      var varName = n.Symbol.Name;
869                      var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
[16268]870                      if (varIdx < 0) throw new InvalidProgramException();
[16251]871                      var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
872                      nodeValues.Add(n, Tuple.Create(y_i, Vector.Zero)); // no gradient needed
873                    }
874                  }
[16250]875                }
876                for (int i = 0; i < trees.Length; i++) {
877                  var tree = trees[i];
[16251]878                  var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
[16250]879                  CVODES.NV_Set_Ith_S(ydot, i, res_i.Item1);
880                }
881                return 0;
882              };
883    }
884
[16251]885    private static CVODES.CVDlsJacFunc CreateJac(
886      ISymbolicExpressionTree[] trees,
[16250]887      string[] calculatedVariables,
[16251]888      double[] parameterValues) {
889
890      return (
891        double t, // current time (input)
892        IntPtr y, // N_Vector, current value of y (input)
893        IntPtr fy, // N_Vector, current value of f (input)
894        IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
895        IntPtr user_data, // optional (unused here)
896        IntPtr tmp1, // N_Vector, optional (unused here)
897        IntPtr tmp2, // N_Vector, optional (unused here)
898        IntPtr tmp3 // N_Vector, optional (unused here)
899      ) => {
900        // here we need to calculate partial derivatives for the calculated variables y
901        var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
902        int pIdx = 0;
903        foreach (var tree in trees) {
904          foreach (var n in tree.IterateNodesPrefix()) {
905            if (IsConstantNode(n)) {
906              nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
907              pIdx++;
908            } else if (n.SubtreeCount == 0) {
909              // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
910              var varName = n.Symbol.Name;
911              var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
[16268]912              if (varIdx < 0) throw new InvalidProgramException();
913
[16251]914              var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
915              var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
916              gArr[varIdx] = 1.0;
917              var g = new Vector(gArr);
918              nodeValues.Add(n, Tuple.Create(y_i, g));
919            }
920          }
921        }
922
923        for (int i = 0; i < trees.Length; i++) {
924          var tree = trees[i];
925          var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
926          var g = res.Item2;
927          for (int j = 0; j < calculatedVariables.Length; j++) {
928            CVODES.SUNDenseMatrix_Set(Jac, i, j, g[j]);
929          }
930        }
931        return 0; // on success
932      };
933    }
934
935
936    // to calculate sensitivities RHS for all equations at once
937    // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
938    // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
939    private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, double[] parameterValues) {
940      return (
941              int Ns, // number of parameters
942              double t, // current time
943              IntPtr y, // N_Vector y(t) (input)
944              IntPtr ydot, // N_Vector dy/dt(t) (input)
945              IntPtr yS, // N_Vector*, one vector for each parameter (input)
946              IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
947              IntPtr user_data, // optional (unused here)
948              IntPtr tmp1, // N_Vector, optional (unused here)
949              IntPtr tmp2 // N_Vector, optional (unused here)
950        ) => {
951          // here we need to calculate partial derivatives for the calculated variables y as well as for the parameters
952          var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
953          var d = calculatedVariables.Length + parameterValues.Length; // dimensionality of gradient
954          // first collect variable values
955          foreach (var tree in trees) {
956            foreach (var n in tree.IterateNodesPrefix()) {
957              if (IsVariableNode(n)) {
958                // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
959                var varName = n.Symbol.Name;
960                var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
[16268]961                if (varIdx < 0) throw new InvalidProgramException();
962
[16251]963                var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
964                var gArr = new double[d]; // backing array
965                gArr[varIdx] = 1.0;
966                var g = new Vector(gArr);
967                nodeValues.Add(n, Tuple.Create(y_i, g));
968              }
969            }
970          }
971          // then collect constants
972          int pIdx = 0;
973          foreach (var tree in trees) {
974            foreach (var n in tree.IterateNodesPrefix()) {
975              if (IsConstantNode(n)) {
976                var gArr = new double[d];
977                gArr[calculatedVariables.Length + pIdx] = 1.0;
978                var g = new Vector(gArr);
979                nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], g));
980                pIdx++;
981              }
982            }
983          }
984          // gradient vector is [∂f/∂y_1, ∂f/∂y_2, ... ∂f/∂yN, ∂f/∂p_1 ... ∂f/∂p_K]
985
986
987          for (pIdx = 0; pIdx < Ns; pIdx++) {
988            unsafe {
989              var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
990              CVODES.N_VConst_Serial(0.0, sDot_pi);
991            }
992          }
993
994          for (int i = 0; i < trees.Length; i++) {
995            var tree = trees[i];
996            var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
997            var g = res.Item2;
998
999
1000            // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
1001
1002            for (pIdx = 0; pIdx < Ns; pIdx++) {
1003              unsafe {
1004                var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1005                var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1006
1007                var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
1008                // (∂f/∂y)s_i(t)
1009                var p = 0.0;
1010                for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
1011                  p += g[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
1012                }
1013                // + ∂f/∂p_i
1014                CVODES.NV_Set_Ith_S(sDot_pi, i, v + p + g[calculatedVariables.Length + pIdx]);
1015              }
1016            }
1017
1018          }
1019          return 0; // on success
1020        };
1021    }
[16597]1022    */
[16398]1023    #endregion
[16251]1024
1025    private static void IntegrateHL(
1026      ISymbolicExpressionTree[] trees,
1027      string[] calculatedVariables, // names of integrated variables
1028      Dictionary<string, Tuple<double, Vector>> variableValues, //y (intput and output) input: y(t0), output: y(t0+1)
[16250]1029      double[] parameterValues,
1030      int numericIntegrationSteps) {
[16251]1031
1032      var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1033
1034      // the nodeValues for parameters are constant over time
1035      // TODO: this needs to be done only once for each iteration in gradient descent (whenever parameter values change)
1036      // NOTE: the order must be the same as above (prefix order for nodes)
1037      int pIdx = 0;
1038      foreach (var tree in trees) {
1039        foreach (var node in tree.Root.IterateNodesPrefix()) {
1040          if (IsConstantNode(node)) {
1041            var gArr = new double[parameterValues.Length]; // backing array
1042            gArr[pIdx] = 1.0;
1043            var g = new Vector(gArr);
1044            nodeValues.Add(node, new Tuple<double, Vector>(parameterValues[pIdx], g));
1045            pIdx++;
1046          } else if (node.SubtreeCount == 0) {
1047            // for (latent) variables get the values from variableValues
1048            var varName = node.Symbol.Name;
1049            nodeValues.Add(node, variableValues[varName]);
1050          }
1051        }
1052      }
1053
[16597]1054      double[] deltaF = new double[calculatedVariables.Length];
1055      Vector[] deltaG = new Vector[calculatedVariables.Length];
[16251]1056
[16250]1057      double h = 1.0 / numericIntegrationSteps;
1058      for (int step = 0; step < numericIntegrationSteps; step++) {
[16597]1059        //var deltaValues = new Dictionary<string, Tuple<double, Vector>>();
[16251]1060        for (int i = 0; i < trees.Length; i++) {
1061          var tree = trees[i];
1062          var targetVarName = calculatedVariables[i];
1063
1064          // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
[16597]1065          double f; Vector g;
1066          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
1067          deltaF[i] = f;
1068          deltaG[i] = g;
[16250]1069        }
1070
[16251]1071        // update variableValues for next step, trapezoid integration
[16597]1072        for (int i = 0; i < trees.Length; i++) {
1073          var varName = calculatedVariables[i];
1074          var oldVal = variableValues[varName];
[16251]1075          var newVal = Tuple.Create(
[16597]1076            oldVal.Item1 + h * deltaF[i],
1077            oldVal.Item2 + deltaG[i].Scale(h)
[16250]1078          );
[16597]1079          variableValues[varName] = newVal;
[16250]1080        }
[16398]1081
[16597]1082        // TODO perf
[16399]1083        foreach (var node in nodeValues.Keys.ToArray()) {
1084          if (node.SubtreeCount == 0 && !IsConstantNode(node)) {
[16398]1085            // update values for (latent) variables
[16251]1086            var varName = node.Symbol.Name;
1087            nodeValues[node] = variableValues[varName];
1088          }
1089        }
[16250]1090      }
1091    }
1092
[16597]1093    private static void InterpretRec(
[15964]1094      ISymbolicExpressionTreeNode node,
[16597]1095      Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> nodeValues,      // contains value and gradient vector for a node (variables and constants only)
[16600]1096      out double z,
1097      out Vector dz
[16597]1098      ) {
[16600]1099      double f, g;
1100      Vector df, dg;
[16215]1101      switch (node.Symbol.Name) {
[15964]1102        case "+": {
[16600]1103            InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1104            InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1105            z = f + g;
1106            dz = df + dg; // Vector.AddTo(gl, gr);
[16597]1107            break;
[15964]1108          }
1109        case "*": {
[16600]1110            InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1111            InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
1112            z = f * g;
1113            dz = df * g + f * dg;  // Vector.AddTo(gl.Scale(fr), gr.Scale(fl)); // f'*g + f*g'
[16597]1114            break;
[15964]1115          }
1116
1117        case "-": {
[16599]1118            if (node.SubtreeCount == 1) {
[16600]1119              InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1120              z = -f;
1121              dz = -df;
[16599]1122            } else {
[16600]1123              InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1124              InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
[16599]1125
[16600]1126              z = f - g;
1127              dz = df - dg;
[16599]1128            }
[16597]1129            break;
[15964]1130          }
1131        case "%": {
[16600]1132            InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1133            InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg);
[15964]1134
1135            // protected division
[16600]1136            if (g.IsAlmost(0.0)) {
1137              z = 0;
1138              dz = Vector.Zero;
[15964]1139            } else {
[16600]1140              z = f / g;
1141              dz = -f / (g * g) * dg + df / g; // Vector.AddTo(dg.Scale(f * -1.0 / (g * g)), df.Scale(1.0 / g)); // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f'
[15964]1142            }
[16597]1143            break;
[15964]1144          }
[16215]1145        case "sin": {
[16600]1146            InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1147            z = Math.Sin(f);
1148            dz = Math.Cos(f) * df;
[16597]1149            break;
[16215]1150          }
1151        case "cos": {
[16600]1152            InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1153            z = Math.Cos(f);
1154            dz = -Math.Sin(f) * df;
[16597]1155            break;
[16215]1156          }
[16329]1157        case "sqr": {
[16600]1158            InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1159            z = f * f;
1160            dz = 2.0 * f * df;
[16597]1161            break;
[16329]1162          }
[15964]1163        default: {
[16597]1164            var t = nodeValues[node];
[16600]1165            z = t.Item1;
1166            dz = Vector.CreateNew(t.Item2);
[16597]1167            break;
[15964]1168          }
1169      }
1170    }
[15968]1171    #endregion
[15964]1172
1173    #region events
[15968]1174    /*
1175     * Dependencies between parameters:
1176     *
1177     * ProblemData
1178     *    |
1179     *    V
[15970]1180     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables
1181     *               |   |                 |                   |
1182     *               V   V                 |                   |
1183     *             Grammar <---------------+-------------------
[15968]1184     *                |
1185     *                V
1186     *            Encoding
1187     */
[15964]1188    private void RegisterEventHandlers() {
[15968]1189      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
[16215]1190      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
[15968]1191
1192      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
[16215]1193      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
[15968]1194
1195      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
[16215]1196      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
[15968]1197
1198      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
[15970]1199
1200      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
[15964]1201    }
1202
[15970]1203    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1204      UpdateGrammarAndEncoding();
1205    }
1206
[15968]1207    private void MaximumLengthChanged(object sender, EventArgs e) {
1208      UpdateGrammarAndEncoding();
1209    }
1210
1211    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1212      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1213    }
1214
[16268]1215    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
[15968]1216      UpdateGrammarAndEncoding();
1217    }
1218
1219    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1220      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1221    }
1222
[16268]1223    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
[15968]1224      UpdateGrammarAndEncoding();
1225    }
1226
[15964]1227    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
[15968]1228      ProblemDataParameter.Value.Changed += ProblemData_Changed;
[15964]1229      OnProblemDataChanged();
1230      OnReset();
1231    }
1232
1233    private void ProblemData_Changed(object sender, EventArgs e) {
[15968]1234      OnProblemDataChanged();
[15964]1235      OnReset();
1236    }
1237
1238    private void OnProblemDataChanged() {
[15968]1239      UpdateTargetVariables();        // implicitly updates other dependent parameters
[15964]1240      var handler = ProblemDataChanged;
[16215]1241      if (handler != null) handler(this, EventArgs.Empty);
[15964]1242    }
1243
[15968]1244    #endregion
1245
1246    #region  helper
1247
1248    private void InitAllParameters() {
1249      UpdateTargetVariables(); // implicitly updates the grammar and the encoding     
1250    }
1251
[16268]1252    private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
1253      var l = new CheckedItemList<StringValue>();
[15968]1254      l.Add(new StringValue("+").AsReadOnly());
1255      l.Add(new StringValue("*").AsReadOnly());
1256      l.Add(new StringValue("%").AsReadOnly());
1257      l.Add(new StringValue("-").AsReadOnly());
[16215]1258      l.Add(new StringValue("sin").AsReadOnly());
1259      l.Add(new StringValue("cos").AsReadOnly());
[16329]1260      l.Add(new StringValue("sqr").AsReadOnly());
[15968]1261      return l.AsReadOnly();
1262    }
1263
1264    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
[16399]1265      return n.Symbol.Name[0] == 'θ';
[15968]1266    }
[15970]1267    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
[16399]1268      return n.Symbol.Name[0] == 'λ';
[15970]1269    }
[16251]1270    private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
1271      return (n.SubtreeCount == 0) && !IsConstantNode(n) && !IsLatentVariableNode(n);
1272    }
[15968]1273
1274
1275    private void UpdateTargetVariables() {
[16268]1276      var currentlySelectedVariables = TargetVariables.CheckedItems
1277        .OrderBy(i => i.Index)
1278        .Select(i => i.Value.Value)
1279        .ToArray();
[15968]1280
[16268]1281      var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
[15968]1282      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
[16597]1283      foreach (var item in newVariablesList) {
1284        if (currentlySelectedVariables.Contains(item.Value)) {
1285          newVariablesList.SetItemCheckedState(item, true);
1286        } else {
1287          newVariablesList.SetItemCheckedState(item, false);
1288        }
[15968]1289      }
1290      TargetVariablesParameter.Value = newVariablesList;
1291    }
1292
1293    private void UpdateGrammarAndEncoding() {
1294      var encoding = new MultiEncoding();
1295      var g = CreateGrammar();
[16215]1296      foreach (var targetVar in TargetVariables.CheckedItems) {
[15970]1297        encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength)); // only limit by length
[15968]1298      }
[16215]1299      for (int i = 1; i <= NumberOfLatentVariables; i++) {
[15970]1300        encoding = encoding.Add(new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength));
1301      }
[15968]1302      Encoding = encoding;
1303    }
1304
1305    private ISymbolicExpressionGrammar CreateGrammar() {
[15964]1306      // whenever ProblemData is changed we create a new grammar with the necessary symbols
1307      var g = new SimpleSymbolicExpressionGrammar();
[16597]1308      var unaryFunc = new string[] { "sin", "cos", "sqr" };
1309      var binaryFunc = new string[] { "+", "-", "*", "%" };
1310      foreach (var func in unaryFunc) {
1311        if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 1, 1);
1312      }
1313      foreach (var func in binaryFunc) {
1314        if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 2, 2);
1315      }
[15964]1316
[16268]1317      foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value.Value)))
[15964]1318        g.AddTerminalSymbol(variableName);
1319
1320      // generate symbols for numeric parameters for which the value is optimized using AutoDiff
1321      // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
1322      var numericConstantsFactor = 2.0;
[16215]1323      for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
[15964]1324        g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
1325      }
[15970]1326
1327      // generate symbols for latent variables
[16215]1328      for (int i = 1; i <= NumberOfLatentVariables; i++) {
[15970]1329        g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
1330      }
1331
[15968]1332      return g;
[15964]1333    }
[15968]1334
[16251]1335
[16399]1336
1337
1338
1339    private ISymbolicExpressionTreeNode FixParameters(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
1340      ISymbolicExpressionTreeNode translatedNode = null;
1341      if (n.Symbol is StartSymbol) {
1342        translatedNode = new StartSymbol().CreateTreeNode();
1343      } else if (n.Symbol is ProgramRootSymbol) {
1344        translatedNode = new ProgramRootSymbol().CreateTreeNode();
1345      } else if (n.Symbol.Name == "+") {
1346        translatedNode = new SimpleSymbol("+", 2).CreateTreeNode();
1347      } else if (n.Symbol.Name == "-") {
1348        translatedNode = new SimpleSymbol("-", 2).CreateTreeNode();
1349      } else if (n.Symbol.Name == "*") {
1350        translatedNode = new SimpleSymbol("*", 2).CreateTreeNode();
1351      } else if (n.Symbol.Name == "%") {
1352        translatedNode = new SimpleSymbol("%", 2).CreateTreeNode();
1353      } else if (n.Symbol.Name == "sin") {
1354        translatedNode = new SimpleSymbol("sin", 1).CreateTreeNode();
1355      } else if (n.Symbol.Name == "cos") {
1356        translatedNode = new SimpleSymbol("cos", 1).CreateTreeNode();
1357      } else if (n.Symbol.Name == "sqr") {
1358        translatedNode = new SimpleSymbol("sqr", 1).CreateTreeNode();
1359      } else if (IsConstantNode(n)) {
1360        translatedNode = new SimpleSymbol("c_" + nextParIdx, 0).CreateTreeNode();
1361        nextParIdx++;
1362      } else {
1363        translatedNode = new SimpleSymbol(n.Symbol.Name, n.SubtreeCount).CreateTreeNode();
1364      }
1365      foreach (var child in n.Subtrees) {
1366        translatedNode.AddSubtree(FixParameters(child, parameterValues, ref nextParIdx));
1367      }
1368      return translatedNode;
1369    }
1370
1371
[16251]1372    private ISymbolicExpressionTreeNode TranslateTreeNode(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
1373      ISymbolicExpressionTreeNode translatedNode = null;
1374      if (n.Symbol is StartSymbol) {
1375        translatedNode = new StartSymbol().CreateTreeNode();
1376      } else if (n.Symbol is ProgramRootSymbol) {
1377        translatedNode = new ProgramRootSymbol().CreateTreeNode();
1378      } else if (n.Symbol.Name == "+") {
1379        translatedNode = new Addition().CreateTreeNode();
1380      } else if (n.Symbol.Name == "-") {
1381        translatedNode = new Subtraction().CreateTreeNode();
1382      } else if (n.Symbol.Name == "*") {
1383        translatedNode = new Multiplication().CreateTreeNode();
1384      } else if (n.Symbol.Name == "%") {
1385        translatedNode = new Division().CreateTreeNode();
1386      } else if (n.Symbol.Name == "sin") {
1387        translatedNode = new Sine().CreateTreeNode();
1388      } else if (n.Symbol.Name == "cos") {
1389        translatedNode = new Cosine().CreateTreeNode();
[16329]1390      } else if (n.Symbol.Name == "sqr") {
1391        translatedNode = new Square().CreateTreeNode();
[16251]1392      } else if (IsConstantNode(n)) {
1393        var constNode = (ConstantTreeNode)new Constant().CreateTreeNode();
1394        constNode.Value = parameterValues[nextParIdx];
1395        nextParIdx++;
1396        translatedNode = constNode;
1397      } else {
1398        // assume a variable name
1399        var varName = n.Symbol.Name;
1400        var varNode = (VariableTreeNode)new Variable().CreateTreeNode();
1401        varNode.Weight = 1.0;
1402        varNode.VariableName = varName;
1403        translatedNode = varNode;
1404      }
1405      foreach (var child in n.Subtrees) {
1406        translatedNode.AddSubtree(TranslateTreeNode(child, parameterValues, ref nextParIdx));
1407      }
1408      return translatedNode;
1409    }
[15964]1410    #endregion
1411
1412    #region Import & Export
1413    public void Load(IRegressionProblemData data) {
1414      Name = data.Name;
1415      Description = data.Description;
1416      ProblemData = data;
1417    }
1418
1419    public IRegressionProblemData Export() {
1420      return ProblemData;
1421    }
[16600]1422
1423    public class OptimizationData {
1424      public readonly ISymbolicExpressionTree[] trees;
1425      public readonly string[] targetVariables;
1426      public readonly IRegressionProblemData problemData;
1427      public readonly double[,] targetValues;
1428      public readonly IntRange[] episodes;
1429      public readonly int numericIntegrationSteps;
1430      public readonly string[] latentVariables;
1431      public readonly string odeSolver;
1432
1433      public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, IRegressionProblemData problemData, double[,] targetValues, IntRange[] episodes, int numericIntegrationSteps, string[] latentVariables, string odeSolver) {
1434        this.trees = trees;
1435        this.targetVariables = targetVars;
1436        this.problemData = problemData;
1437        this.targetValues = targetValues;
1438        this.episodes = episodes;
1439        this.numericIntegrationSteps = numericIntegrationSteps;
1440        this.latentVariables = latentVariables;
1441        this.odeSolver = odeSolver;
1442      }
1443    }
[15964]1444    #endregion
1445
1446  }
1447}
Note: See TracBrowser for help on using the repository browser.