Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16599

Last change on this file since 16599 was 16599, checked in by gkronber, 5 years ago

#2925: changed code to use LM instead of LBFGS and removed standardization

File size: 69.3 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Linq;
26using HeuristicLab.Analysis;
27using HeuristicLab.Collections;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
32using HeuristicLab.Optimization;
33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Problems.DataAnalysis.Symbolic;
37using HeuristicLab.Problems.Instances;
38using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
39
40namespace HeuristicLab.Problems.DynamicalSystemsModelling {
41  // Eine weitere Möglichkeit ist spline-smoothing der Daten (über Zeit) um damit für jede Zielvariable
42  // einen bereinigten (Rauschen) Wert und die Ableitung dy/dt für alle Beobachtungen zu bekommen
43  // danach kann man die Regression direkt für dy/dt anwenden (mit bereinigten y als inputs)
44  [Item("Dynamical Systems Modelling Problem", "TODO")]
45  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
46  [StorableClass]
47  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
48    #region parameter names
49    private const string ProblemDataParameterName = "Data";
50    private const string TargetVariablesParameterName = "Target variables";
51    private const string FunctionSetParameterName = "Function set";
52    private const string MaximumLengthParameterName = "Size limit";
53    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
54    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
55    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
56    private const string TrainingEpisodesParameterName = "Training episodes";
57    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
58    private const string OdeSolverParameterName = "ODE Solver";
59    #endregion
60
61    #region Parameter Properties
62    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
63
64    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
65      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
66    }
67    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
68      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
69    }
70    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
71      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
72    }
73    public IFixedValueParameter<IntValue> MaximumLengthParameter {
74      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
75    }
76    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
77      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
78    }
79    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
80      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
81    }
82    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
83      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
84    }
85    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
86      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
87    }
88    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
89      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
90    }
91    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
92      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
93    }
94    #endregion
95
96    #region Properties
97    public IRegressionProblemData ProblemData {
98      get { return ProblemDataParameter.Value; }
99      set { ProblemDataParameter.Value = value; }
100    }
101    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
102
103    public ReadOnlyCheckedItemList<StringValue> TargetVariables {
104      get { return TargetVariablesParameter.Value; }
105    }
106
107    public ReadOnlyCheckedItemList<StringValue> FunctionSet {
108      get { return FunctionSetParameter.Value; }
109    }
110
111    public int MaximumLength {
112      get { return MaximumLengthParameter.Value.Value; }
113    }
114    public int MaximumParameterOptimizationIterations {
115      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
116    }
117    public int NumberOfLatentVariables {
118      get { return NumberOfLatentVariablesParameter.Value.Value; }
119    }
120    public int NumericIntegrationSteps {
121      get { return NumericIntegrationStepsParameter.Value.Value; }
122    }
123    public IEnumerable<IntRange> TrainingEpisodes {
124      get { return TrainingEpisodesParameter.Value; }
125    }
126    public bool OptimizeParametersForEpisodes {
127      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
128    }
129
130    public string OdeSolver {
131      get { return OdeSolverParameter.Value.Value; }
132      set {
133        var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
134        if (matchingValue == null) throw new ArgumentOutOfRangeException();
135        else OdeSolverParameter.Value = matchingValue;
136      }
137    }
138
139    #endregion
140
141    public event EventHandler ProblemDataChanged;
142
143    public override bool Maximization {
144      get { return false; } // we minimize NMSE
145    }
146
147    #region item cloning and persistence
148    // persistence
149    [StorableConstructor]
150    private Problem(bool deserializing) : base(deserializing) { }
151    [StorableHook(HookType.AfterDeserialization)]
152    private void AfterDeserialization() {
153      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
154        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
155      }
156      RegisterEventHandlers();
157    }
158
159    // cloning
160    private Problem(Problem original, Cloner cloner)
161      : base(original, cloner) {
162      RegisterEventHandlers();
163    }
164    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
165    #endregion
166
167    public Problem()
168      : base() {
169      var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
170      var functions = CreateFunctionSet();
171      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
172      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
173      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
174      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
175      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
176      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
177      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
178      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
179      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
180
181      var solversStr = new string[] { "HeuristicLab", "CVODES" };
182      var solvers = new ItemSet<StringValue>(
183        solversStr.Select(s => new StringValue(s).AsReadOnly())
184        );
185      Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
186
187      RegisterEventHandlers();
188      InitAllParameters();
189
190      // TODO: do not clear selection of target variables when the input variables are changed (keep selected target variables)
191      // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item
192      // TODO: use training range as default training episode
193      // TODO: write back optimized parameters to solution?
194      // TODO: optimization of starting values for latent variables in CVODES solver
195
196    }
197
198    public override double Evaluate(Individual individual, IRandom random) {
199      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
200                                                                                                      // write back optimized parameters to tree nodes instead of the separate OptTheta variable
201                                                                                                      // retreive optimized parameters from nodes?
202
203      var problemData = Standardize(ProblemData);
204      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
205      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
206      if (OptimizeParametersForEpisodes) {
207        int eIdx = 0;
208        double totalNMSE = 0.0;
209        int totalSize = 0;
210        foreach (var episode in TrainingEpisodes) {
211          double[] optTheta;
212          double nmse;
213          OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
214          individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
215          eIdx++;
216          totalNMSE += nmse * episode.Size;
217          totalSize += episode.Size;
218        }
219        return totalNMSE / totalSize;
220      } else {
221        double[] optTheta;
222        double nmse;
223        OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
224        individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
225        return nmse;
226      }
227    }
228
229    private IRegressionProblemData Standardize(IRegressionProblemData problemData) {
230      // var standardizedDataset = new Dataset(
231      //   problemData.Dataset.DoubleVariables,
232      //   problemData.Dataset.DoubleVariables.Select(v => Standardize(problemData.Dataset.GetReadOnlyDoubleValues(v)).ToList()));
233      // return new RegressionProblemData(standardizedDataset, problemData.AllowedInputVariables, problemData.TargetVariable);
234      return new RegressionProblemData(problemData.Dataset, problemData.AllowedInputVariables, problemData.TargetVariable);
235    }
236
237    public static void OptimizeForEpisodes(
238      ISymbolicExpressionTree[] trees,
239      IRegressionProblemData problemData,
240      string[] targetVars,
241      string[] latentVariables,
242      IRandom random,
243      IEnumerable<IntRange> episodes,
244      int maxParameterOptIterations,
245      int numericIntegrationSteps,
246      string odeSolver,
247      out double[] optTheta,
248      out double nmse) {
249      var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray();
250      var targetValues = new double[rows.Length, targetVars.Length];
251
252     
253      // collect values of all target variables                                           
254      var colIdx = 0;
255      foreach (var targetVar in targetVars) {
256        int rowIdx = 0;
257        foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
258          targetValues[rowIdx, colIdx] = value;
259          rowIdx++;
260        }
261        colIdx++;
262      }
263
264      // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
265      var paramNodes = new List<ISymbolicExpressionTreeNode>();
266      foreach (var t in trees) {
267        foreach (var n in t.IterateNodesPrefix()) {
268          if (IsConstantNode(n))
269            paramNodes.Add(n);
270        }
271      }
272      // init params randomly
273      // theta contains parameter values for trees and then the initial values for latent variables (a separate vector for each episode)
274      // inital values for latent variables are also optimized
275      var theta = new double[paramNodes.Count + latentVariables.Length * episodes.Count()];
276      for (int i = 0; i < theta.Length; i++)
277        theta[i] = random.NextDouble() * 2.0e-2 - 1.0e-2;
278
279      optTheta = new double[0];
280      if (theta.Length > 0) {
281        alglib.minlmstate state;
282        alglib.minlmreport report;
283        alglib.minlmcreatevj(targetValues.Length, theta, out state);
284        alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
285        alglib.minlmsetgradientcheck(state, 1.0e-3);
286        //TODO: create a type
287        var myState = new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver };
288        alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
289
290        alglib.minlmresults(state, out optTheta, out report);
291
292        /*************************************************************************
293         Levenberg-Marquardt algorithm results
294         
295         INPUT PARAMETERS:
296             State   -   algorithm state
297         
298         OUTPUT PARAMETERS:
299             X       -   array[0..N-1], solution
300             Rep     -   optimization  report;  includes  termination   codes   and
301                         additional information. Termination codes are listed below,
302                         see comments for this structure for more info.
303                         Termination code is stored in rep.terminationtype field:
304                         * -8    optimizer detected NAN/INF values either in the
305                                 function itself, or in its Jacobian
306                         * -7    derivative correctness check failed;
307                                 see rep.funcidx, rep.varidx for
308                                 more information.
309                         * -3    constraints are inconsistent
310                         *  2    relative step is no more than EpsX.
311                         *  5    MaxIts steps was taken
312                         *  7    stopping conditions are too stringent,
313                                 further improvement is impossible
314                         *  8    terminated by user who called minlmrequesttermination().
315                                 X contains point which was "current accepted" when
316                                 termination request was submitted.
317         
318           -- ALGLIB --
319              Copyright 10.03.2009 by Bochkanov Sergey
320         *************************************************************************/
321        if (report.terminationtype < 0) { nmse = 10.0; return; }
322
323        nmse = state.f; //TODO check
324
325        // var myState = new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver };
326        // EvaluateObjectiveVector(optTheta, ref nmse, grad,myState);
327        if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10.0; return; } // return a large value (TODO: be consistent by using NMSE)
328      } else {
329        // no parameters
330        nmse = targetValues.Length;
331      }
332     
333    }
334
335    // private static IEnumerable<double> Standardize(IEnumerable<double> xs) {
336    //   var m = xs.Average();
337    //   var s = xs.StandardDeviationPop();
338    //   return xs.Select(xi => (xi - m) / s);
339    // }
340
341    alglib.ndimensional_fvec fvec;
342    alglib.ndimensional_jac jac;
343
344    private static void EvaluateObjectiveVector(double[] x, double[] fi, object obj) {
345      EvaluateObjectiveVectorAndJacobian(x, fi, null, obj);
346    }
347
348    private static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object obj) {
349      var trees = (ISymbolicExpressionTree[])((object[])obj)[0];
350      var targetVariables = (string[])((object[])obj)[1];
351      var problemData = (IRegressionProblemData)((object[])obj)[2];
352      var targetValues = (double[,])((object[])obj)[3];
353      var episodes = (IntRange[])((object[])obj)[4];
354      var numericIntegrationSteps = (int)((object[])obj)[5];
355      var latentVariables = (string[])((object[])obj)[6];
356      var odeSolver = (string)((object[])obj)[7];
357
358
359      Tuple<double, Vector>[][] predicted = null; // one array of predictions for each episode
360      // TODO: stop integration early for diverging solutions
361      predicted = Integrate(
362          trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
363          problemData.Dataset,
364          problemData.AllowedInputVariables.ToArray(),
365          targetVariables,
366          latentVariables,
367          episodes,
368          x,
369          odeSolver,
370          numericIntegrationSteps).ToArray();
371
372      // clear all result data structures
373      for (int j = 0; j < fi.Length; j++) {
374        fi[j] = 10.0;
375        if (jac != null) Array.Clear(jac, 0, jac.Length);
376      }
377
378      if (predicted.Length != targetValues.GetLength(0)) {
379        return;
380      }
381
382      // for normalized MSE = 1/variance(t) * MSE(t, pred)
383      // TODO: Perf. (by standardization of target variables before evaluation of all trees)     
384      // var invVar = Enumerable.Range(0, targetVariables.Length)
385      //   .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors
386      //   .Select(vec => vec.StandardDeviation()) // TODO: variance of stddev
387      //   .Select(v => 1.0 / v)
388      //   .ToArray();
389
390      // double[] invVar = Enumerable.Repeat(1.0, targetVariables.Length).ToArray();
391
392
393      // objective function is NMSE
394      int n = predicted.Length;
395      double invN = 1.0 / n;
396      int i = 0;
397      int r = 0;
398      foreach (var y_pred in predicted) {
399        // y_pred contains the predicted values for target variables first and then predicted values for latent variables
400        for (int c = 0; c < targetVariables.Length; c++) {
401
402          var y_pred_f = y_pred[c].Item1;
403          var y = targetValues[r, c];
404
405          fi[i] = (y - y_pred_f);
406          var g = y_pred[c].Item2;
407          if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[i, j] = -g[j];
408          i++; // we put the errors for each target variable after each other
409        }
410        r++;
411      }
412    }
413
414    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
415      base.Analyze(individuals, qualities, results, random);
416
417      if (!results.ContainsKey("Prediction (training)")) {
418        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
419      }
420      if (!results.ContainsKey("Prediction (test)")) {
421        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
422      }
423      if (!results.ContainsKey("Models")) {
424        results.Add(new Result("Models", typeof(VariableCollection)));
425      }
426      if (!results.ContainsKey("SNMSE")) {
427        results.Add(new Result("SNMSE", typeof(DoubleValue)));
428      }
429      if (!results.ContainsKey("Solution")) {
430        results.Add(new Result("Solution", typeof(Solution)));
431      }
432      if (!results.ContainsKey("Squared error and gradient")) {
433        results.Add(new Result("Squared error and gradient", typeof(DataTable)));
434      }
435
436      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
437      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
438
439      results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
440
441      var problemData = Standardize(ProblemData);
442      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
443      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
444
445      var trainingList = new ItemList<DataTable>();
446
447      if (OptimizeParametersForEpisodes) {
448        var eIdx = 0;
449        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
450        foreach (var episode in TrainingEpisodes) {
451          var episodes = new[] { episode };
452          var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta_" + eIdx]).ToArray(); // see evaluate
453          var trainingPrediction = Integrate(
454                                   trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
455                                   problemData.Dataset,
456                                   problemData.AllowedInputVariables.ToArray(),
457                                   targetVars,
458                                   latentVariables,
459                                   episodes,
460                                   optTheta,
461                                   OdeSolver,
462                                   NumericIntegrationSteps).ToArray();
463          trainingPredictions.Add(trainingPrediction);
464          eIdx++;
465        }
466
467        // only for target values
468        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
469        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
470          var targetVar = targetVars[colIdx];
471          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
472          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
473          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
474          trainingDataTable.Rows.Add(actualValuesRow);
475          trainingDataTable.Rows.Add(predictedValuesRow);
476          trainingList.Add(trainingDataTable);
477        }
478        results["Prediction (training)"].Value = trainingList.AsReadOnly();
479
480
481        var models = new VariableCollection();
482
483        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
484          var targetVarName = tup.Item1;
485          var tree = tup.Item2;
486
487          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
488          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
489          models.Add(origTreeVar);
490        }
491        results["Models"].Value = models;
492      } else {
493        var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate
494        var trainingPrediction = Integrate(
495                                   trees,  // we assume trees contain expressions for the change of each target variable over time dy/dt
496                                   problemData.Dataset,
497                                   problemData.AllowedInputVariables.ToArray(),
498                                   targetVars,
499                                   latentVariables,
500                                   TrainingEpisodes,
501                                   optTheta,
502                                   OdeSolver,
503                                   NumericIntegrationSteps).ToArray();
504        // for target values and latent variables
505        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
506        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
507          // is target variable
508          if (colIdx < targetVars.Length) {
509            var targetVar = targetVars[colIdx];
510            var trainingDataTable = new DataTable(targetVar + " prediction (training)");
511            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
512            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
513            trainingDataTable.Rows.Add(actualValuesRow);
514            trainingDataTable.Rows.Add(predictedValuesRow);
515
516            for (int paramIdx = 0; paramIdx < optTheta.Length; paramIdx++) {
517              var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", trainingPrediction.Select(arr => arr[colIdx].Item2[paramIdx]).ToArray());
518              paramSensitivityRow.VisualProperties.SecondYAxis = true;
519              trainingDataTable.Rows.Add(paramSensitivityRow);
520            }
521            trainingList.Add(trainingDataTable);
522          } else {
523            var latentVar = latentVariables[colIdx - targetVars.Length];
524            var trainingDataTable = new DataTable(latentVar + " prediction (training)");
525            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
526            var emptyRow = new DataRow(latentVar);
527            trainingDataTable.Rows.Add(emptyRow);
528            trainingDataTable.Rows.Add(predictedValuesRow);
529            trainingList.Add(trainingDataTable);
530          }
531        }
532
533        var errorTable = new DataTable("Squared error and gradient");
534        var seRow = new DataRow("Squared error");
535        var gradientRows = Enumerable.Range(0, optTheta.Length).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
536        errorTable.Rows.Add(seRow);
537        foreach (var gRow in gradientRows) {
538          gRow.VisualProperties.SecondYAxis = true;
539          errorTable.Rows.Add(gRow);
540        }
541        var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
542        int r = 0;
543        double invN = 1.0 / trainingRows.Count();
544        foreach (var y_pred in trainingPrediction) {
545          // calculate objective function gradient
546          double f_i = 0.0;
547          Vector g_i = Vector.CreateNew(new double[optTheta.Length]);
548          for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
549            var y_pred_f = y_pred[colIdx].Item1;
550            var y = targetValues[colIdx][r];
551
552            var res = (y - y_pred_f);
553            var ressq = res * res;
554            f_i += ressq * invN /* * Math.Exp(-0.2 * r) */;
555            g_i = g_i - 2.0 * res * y_pred[colIdx].Item2 * invN /* * Math.Exp(-0.2 * r)*/;
556          }
557          seRow.Values.Add(f_i);
558          for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
559          r++;
560        }
561        results["Squared error and gradient"].Value = errorTable;
562
563        // TODO: DRY for training and test
564        var testList = new ItemList<DataTable>();
565        var testRows = ProblemData.TestIndices.ToArray();
566        var testPrediction = Integrate(
567         trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
568         problemData.Dataset,
569         problemData.AllowedInputVariables.ToArray(),
570         targetVars,
571         latentVariables,
572         new IntRange[] { ProblemData.TestPartition },
573         optTheta,
574         OdeSolver,
575         NumericIntegrationSteps).ToArray();
576
577        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
578          // is target variable
579          if (colIdx < targetVars.Length) {
580            var targetVar = targetVars[colIdx];
581            var testDataTable = new DataTable(targetVar + " prediction (test)");
582            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
583            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
584            testDataTable.Rows.Add(actualValuesRow);
585            testDataTable.Rows.Add(predictedValuesRow);
586            testList.Add(testDataTable);
587
588          } else {
589            var latentVar = latentVariables[colIdx - targetVars.Length];
590            var testDataTable = new DataTable(latentVar + " prediction (test)");
591            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
592            var emptyRow = new DataRow(latentVar);
593            testDataTable.Rows.Add(emptyRow);
594            testDataTable.Rows.Add(predictedValuesRow);
595            testList.Add(testDataTable);
596          }
597        }
598
599        results["Prediction (training)"].Value = trainingList.AsReadOnly();
600        results["Prediction (test)"].Value = testList.AsReadOnly();
601
602
603        #region simplification of models
604        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
605        var models = new VariableCollection();    // to store target var names and original version of tree
606
607        var optimizedTrees = new List<ISymbolicExpressionTree>();
608        int nextParIdx = 0;
609        for (int idx = 0; idx < trees.Length; idx++) {
610          var tree = trees[idx];
611          optimizedTrees.Add(new SymbolicExpressionTree(FixParameters(tree.Root, optTheta.ToArray(), ref nextParIdx)));
612        }
613        var ds = problemData.Dataset;
614        var newVarNames = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
615        var allVarNames = ds.DoubleVariables.Concat(newVarNames);
616        var newVarValues = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
617        var allVarValues = ds.DoubleVariables.Select(varName => ds.GetDoubleValues(varName).ToList())
618          .Concat(Enumerable.Range(0, nextParIdx).Select(i => Enumerable.Repeat(optTheta[i], ds.Rows).ToList()))
619          .ToList();
620        var newDs = new Dataset(allVarNames, allVarValues);
621        var newProblemData = new RegressionProblemData(newDs, problemData.AllowedInputVariables.Concat(newVarValues).ToArray(), problemData.TargetVariable);
622        results["Solution"].Value = new Solution(optimizedTrees.ToArray(),
623                   // optTheta,
624                   newProblemData,
625                   targetVars,
626                   latentVariables,
627                   TrainingEpisodes,
628                   OdeSolver,
629                   NumericIntegrationSteps);
630
631
632        nextParIdx = 0;
633        for (int idx = 0; idx < trees.Length; idx++) {
634          var varName = string.Empty;
635          if (idx < targetVars.Length) {
636            varName = targetVars[idx];
637          } else {
638            varName = latentVariables[idx - targetVars.Length];
639          }
640          var tree = trees[idx];
641
642          // when we reference HeuristicLab.Problems.DataAnalysis.Symbolic we can translate symbols
643          var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta.ToArray(),
644            ref nextParIdx));
645
646          // var shownTree = (SymbolicExpressionTree)tree.Clone();
647          // var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode);
648          // var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode);
649          //
650          // foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) {
651          //   double constantsVal = optTheta[nodeIdx[n.original]];
652          //
653          //   ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal };
654          //
655          //   var parentNode = n.shown.Parent;
656          //   int replacementIndex = parentNode.IndexOfSubtree(n.shown);
657          //   parentNode.RemoveSubtree(replacementIndex);
658          //   parentNode.InsertSubtree(replacementIndex, replacementNode);
659          // }
660
661          var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
662          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
663          models.Add(origTreeVar);
664          var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
665          simplifiedTreeVar.Value = TreeSimplifier.Simplify(shownTree);
666          models.Add(simplifiedTreeVar);
667
668        }
669
670        results["Models"].Value = models;
671        #endregion
672      }
673    }
674
675
676    #region interpretation
677
678    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
679    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
680
681    // a comparison of three potential calculation methods for the gradient is given in:
682    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
683    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
684    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
685
686    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
687
688    public static IEnumerable<Tuple<double, Vector>[]> Integrate(
689      ISymbolicExpressionTree[] trees, IDataset dataset,
690      string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes,
691      double[] parameterValues,
692      string odeSolver, int numericIntegrationSteps = 100) {
693
694      // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
695      var episodeIdx = 0;
696      foreach (var episode in episodes) {
697        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start);
698
699        // integrate forward starting with known values for the target in t0
700
701        var variableValues = new Dictionary<string, Tuple<double, Vector>>();
702        var t0 = rows.First();
703        foreach (var varName in inputVariables) {
704          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
705        }
706        foreach (var varName in targetVariables) {
707          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
708        }
709        // add value entries for latent variables which are also integrated
710        // initial values are at the end of the parameter vector
711        // separete initial values for each episode
712        var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
713        foreach (var latentVar in latentVariables) {
714          var arr = new double[parameterValues.Length]; // backing array
715          arr[initialValueIdx] = 1.0;
716          var g = new Vector(arr);
717          variableValues.Add(latentVar,
718            Tuple.Create(parameterValues[initialValueIdx], g)); // we don't have observations for latent variables therefore we optimize the initial value for each episode
719          initialValueIdx++;
720        }
721
722        var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
723
724        // return first value as stored in the dataset
725        yield return calculatedVariables
726          .Select(calcVarName => variableValues[calcVarName])
727          .ToArray();
728
729        var prevT = rows.First(); // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
730        foreach (var t in rows.Skip(1)) {
731          if (odeSolver == "HeuristicLab")
732            IntegrateHL(trees, calculatedVariables, variableValues, parameterValues, numericIntegrationSteps);
733          else if (odeSolver == "CVODES")
734            throw new NotImplementedException();
735          // IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT);
736          else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
737          prevT = t;
738
739          // This check doesn't work with the HeuristicLab integrator if there are input variables
740          //if (variableValues.Count == targetVariables.Length) {
741          // only return the target variables for calculation of errors
742          var res = calculatedVariables
743            .Select(targetVar => variableValues[targetVar])
744            .ToArray();
745          if (res.Any(ri => double.IsNaN(ri.Item1) || double.IsInfinity(ri.Item1))) yield break;
746          yield return res;
747          //} else {
748          //  yield break; // stop early on problems
749          //}
750
751
752          // update for next time step
753          foreach (var varName in inputVariables) {
754            variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero);
755          }
756        }
757        episodeIdx++;
758      }
759    }
760
761    #region CVODES
762
763    /*
764    /// <summary>
765    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
766    /// </summary>
767    /// <param name="trees">Each equation in the ODE represented as a tree</param>
768    /// <param name="calculatedVariables">The names of the calculated variables</param>
769    /// <param name="variableValues">The start values of the calculated variables as well as their sensitivites over parameters</param>
770    /// <param name="parameterValues">The current parameter values</param>
771    /// <param name="t">The time t up to which we need to integrate.</param>
772    private static void IntegrateCVODES(
773      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
774      string[] calculatedVariables, // names of elements of y
775      Dictionary<string, Tuple<double, Vector>> variableValues,  //  y (intput and output) input: y(t0), output: y(t0+t)
776      double[] parameterValues, // p
777      double t // duration t for which we want to integrate
778      ) {
779
780      // the RHS of the ODE
781      // dy/dt = f(y_t,x_t,p)
782      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, parameterValues);
783      // the Jacobian ∂f/∂y
784      CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, parameterValues);
785
786      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
787      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, parameterValues);
788
789      // setup solver
790      int numberOfEquations = trees.Length;
791      IntPtr y = IntPtr.Zero;
792      IntPtr cvode_mem = IntPtr.Zero;
793      IntPtr A = IntPtr.Zero;
794      IntPtr yS0 = IntPtr.Zero;
795      IntPtr linearSolver = IntPtr.Zero;
796      var ns = parameterValues.Length; // number of parameters
797
798      try {
799        y = CVODES.N_VNew_Serial(numberOfEquations);
800        // init y to current values of variables
801        // y must be initialized before calling CVodeInit
802        for (int i = 0; i < calculatedVariables.Length; i++) {
803          CVODES.NV_Set_Ith_S(y, i, variableValues[calculatedVariables[i]].Item1);
804        }
805
806        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
807
808        var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
809        Debug.Assert(CVODES.CV_SUCCESS == flag);
810
811        double relTol = 1.0e-2;
812        double absTol = 1.0;
813        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
814        Debug.Assert(CVODES.CV_SUCCESS == flag);
815
816        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
817        Debug.Assert(A != IntPtr.Zero);
818
819        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
820        Debug.Assert(linearSolver != IntPtr.Zero);
821
822        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
823        Debug.Assert(CVODES.CV_SUCCESS == flag);
824
825        flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
826        Debug.Assert(CVODES.CV_SUCCESS == flag);
827
828        yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
829        unsafe {
830          // set to initial sensitivities supplied by caller
831          for (int pIdx = 0; pIdx < ns; pIdx++) {
832            var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
833            for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
834              CVODES.NV_Set_Ith_S(yS0_i, varIdx, variableValues[calculatedVariables[varIdx]].Item2[pIdx]);
835            }
836          }
837        }
838
839        flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
840        Debug.Assert(CVODES.CV_SUCCESS == flag);
841
842        flag = CVODES.CVodeSensEEtolerances(cvode_mem);
843        Debug.Assert(CVODES.CV_SUCCESS == flag);
844
845        // make one forward integration step
846        double tout = 0.0; // first output time
847        flag = CVODES.CVode(cvode_mem, t, y, ref tout, CVODES.CV_NORMAL);
848        if (flag == CVODES.CV_SUCCESS) {
849          Debug.Assert(t == tout);
850
851          // get sensitivities
852          flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
853          Debug.Assert(CVODES.CV_SUCCESS == flag);
854
855          // update variableValues based on integration results
856          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
857            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
858            var gArr = new double[parameterValues.Length];
859            for (var pIdx = 0; pIdx < parameterValues.Length; pIdx++) {
860              unsafe {
861                var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
862                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
863              }
864            }
865            variableValues[calculatedVariables[varIdx]] = Tuple.Create(yi, new Vector(gArr));
866          }
867        } else {
868          variableValues.Clear();   // indicate problems by not returning new values
869        }
870
871        // cleanup all allocated objects
872      } finally {
873        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
874        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
875        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
876        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
877        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
878      }
879    }
880
881
882    private static CVODES.CVRhsFunc CreateOdeRhs(
883      ISymbolicExpressionTree[] trees,
884      string[] calculatedVariables,
885      double[] parameterValues) {
886      // we don't need to calculate a gradient here
887      return (double t,
888              IntPtr y, // N_Vector, current value of y (input)
889              IntPtr ydot, // N_Vector (calculated value of y' (output)
890              IntPtr user_data // optional user data, (unused here)
891              ) => {
892                // TODO: perf
893                var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
894
895                int pIdx = 0;
896                foreach (var tree in trees) {
897                  foreach (var n in tree.IterateNodesPrefix()) {
898                    if (IsConstantNode(n)) {
899                      nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we do not need a gradient
900                      pIdx++;
901                    } else if (n.SubtreeCount == 0) {
902                      // for variables and latent variables get the value from variableValues
903                      var varName = n.Symbol.Name;
904                      var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
905                      if (varIdx < 0) throw new InvalidProgramException();
906                      var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
907                      nodeValues.Add(n, Tuple.Create(y_i, Vector.Zero)); // no gradient needed
908                    }
909                  }
910                }
911                for (int i = 0; i < trees.Length; i++) {
912                  var tree = trees[i];
913                  var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
914                  CVODES.NV_Set_Ith_S(ydot, i, res_i.Item1);
915                }
916                return 0;
917              };
918    }
919
920    private static CVODES.CVDlsJacFunc CreateJac(
921      ISymbolicExpressionTree[] trees,
922      string[] calculatedVariables,
923      double[] parameterValues) {
924
925      return (
926        double t, // current time (input)
927        IntPtr y, // N_Vector, current value of y (input)
928        IntPtr fy, // N_Vector, current value of f (input)
929        IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
930        IntPtr user_data, // optional (unused here)
931        IntPtr tmp1, // N_Vector, optional (unused here)
932        IntPtr tmp2, // N_Vector, optional (unused here)
933        IntPtr tmp3 // N_Vector, optional (unused here)
934      ) => {
935        // here we need to calculate partial derivatives for the calculated variables y
936        var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
937        int pIdx = 0;
938        foreach (var tree in trees) {
939          foreach (var n in tree.IterateNodesPrefix()) {
940            if (IsConstantNode(n)) {
941              nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
942              pIdx++;
943            } else if (n.SubtreeCount == 0) {
944              // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
945              var varName = n.Symbol.Name;
946              var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
947              if (varIdx < 0) throw new InvalidProgramException();
948
949              var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
950              var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
951              gArr[varIdx] = 1.0;
952              var g = new Vector(gArr);
953              nodeValues.Add(n, Tuple.Create(y_i, g));
954            }
955          }
956        }
957
958        for (int i = 0; i < trees.Length; i++) {
959          var tree = trees[i];
960          var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
961          var g = res.Item2;
962          for (int j = 0; j < calculatedVariables.Length; j++) {
963            CVODES.SUNDenseMatrix_Set(Jac, i, j, g[j]);
964          }
965        }
966        return 0; // on success
967      };
968    }
969
970
971    // to calculate sensitivities RHS for all equations at once
972    // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
973    // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
974    private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, double[] parameterValues) {
975      return (
976              int Ns, // number of parameters
977              double t, // current time
978              IntPtr y, // N_Vector y(t) (input)
979              IntPtr ydot, // N_Vector dy/dt(t) (input)
980              IntPtr yS, // N_Vector*, one vector for each parameter (input)
981              IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
982              IntPtr user_data, // optional (unused here)
983              IntPtr tmp1, // N_Vector, optional (unused here)
984              IntPtr tmp2 // N_Vector, optional (unused here)
985        ) => {
986          // here we need to calculate partial derivatives for the calculated variables y as well as for the parameters
987          var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
988          var d = calculatedVariables.Length + parameterValues.Length; // dimensionality of gradient
989          // first collect variable values
990          foreach (var tree in trees) {
991            foreach (var n in tree.IterateNodesPrefix()) {
992              if (IsVariableNode(n)) {
993                // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
994                var varName = n.Symbol.Name;
995                var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
996                if (varIdx < 0) throw new InvalidProgramException();
997
998                var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
999                var gArr = new double[d]; // backing array
1000                gArr[varIdx] = 1.0;
1001                var g = new Vector(gArr);
1002                nodeValues.Add(n, Tuple.Create(y_i, g));
1003              }
1004            }
1005          }
1006          // then collect constants
1007          int pIdx = 0;
1008          foreach (var tree in trees) {
1009            foreach (var n in tree.IterateNodesPrefix()) {
1010              if (IsConstantNode(n)) {
1011                var gArr = new double[d];
1012                gArr[calculatedVariables.Length + pIdx] = 1.0;
1013                var g = new Vector(gArr);
1014                nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], g));
1015                pIdx++;
1016              }
1017            }
1018          }
1019          // gradient vector is [∂f/∂y_1, ∂f/∂y_2, ... ∂f/∂yN, ∂f/∂p_1 ... ∂f/∂p_K]
1020
1021
1022          for (pIdx = 0; pIdx < Ns; pIdx++) {
1023            unsafe {
1024              var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1025              CVODES.N_VConst_Serial(0.0, sDot_pi);
1026            }
1027          }
1028
1029          for (int i = 0; i < trees.Length; i++) {
1030            var tree = trees[i];
1031            var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1032            var g = res.Item2;
1033
1034
1035            // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
1036
1037            for (pIdx = 0; pIdx < Ns; pIdx++) {
1038              unsafe {
1039                var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1040                var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1041
1042                var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
1043                // (∂f/∂y)s_i(t)
1044                var p = 0.0;
1045                for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
1046                  p += g[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
1047                }
1048                // + ∂f/∂p_i
1049                CVODES.NV_Set_Ith_S(sDot_pi, i, v + p + g[calculatedVariables.Length + pIdx]);
1050              }
1051            }
1052
1053          }
1054          return 0; // on success
1055        };
1056    }
1057    */
1058    #endregion
1059
1060    private static void IntegrateHL(
1061      ISymbolicExpressionTree[] trees,
1062      string[] calculatedVariables, // names of integrated variables
1063      Dictionary<string, Tuple<double, Vector>> variableValues, //y (intput and output) input: y(t0), output: y(t0+1)
1064      double[] parameterValues,
1065      int numericIntegrationSteps) {
1066
1067      var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1068
1069      // the nodeValues for parameters are constant over time
1070      // TODO: this needs to be done only once for each iteration in gradient descent (whenever parameter values change)
1071      // NOTE: the order must be the same as above (prefix order for nodes)
1072      int pIdx = 0;
1073      foreach (var tree in trees) {
1074        foreach (var node in tree.Root.IterateNodesPrefix()) {
1075          if (IsConstantNode(node)) {
1076            var gArr = new double[parameterValues.Length]; // backing array
1077            gArr[pIdx] = 1.0;
1078            var g = new Vector(gArr);
1079            nodeValues.Add(node, new Tuple<double, Vector>(parameterValues[pIdx], g));
1080            pIdx++;
1081          } else if (node.SubtreeCount == 0) {
1082            // for (latent) variables get the values from variableValues
1083            var varName = node.Symbol.Name;
1084            nodeValues.Add(node, variableValues[varName]);
1085          }
1086        }
1087      }
1088
1089      double[] deltaF = new double[calculatedVariables.Length];
1090      Vector[] deltaG = new Vector[calculatedVariables.Length];
1091
1092      double h = 1.0 / numericIntegrationSteps;
1093      for (int step = 0; step < numericIntegrationSteps; step++) {
1094        //var deltaValues = new Dictionary<string, Tuple<double, Vector>>();
1095        for (int i = 0; i < trees.Length; i++) {
1096          var tree = trees[i];
1097          var targetVarName = calculatedVariables[i];
1098
1099          // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
1100          double f; Vector g;
1101          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
1102          deltaF[i] = f;
1103          deltaG[i] = g;
1104        }
1105
1106        // update variableValues for next step, trapezoid integration
1107        for (int i = 0; i < trees.Length; i++) {
1108          var varName = calculatedVariables[i];
1109          var oldVal = variableValues[varName];
1110          var newVal = Tuple.Create(
1111            oldVal.Item1 + h * deltaF[i],
1112            oldVal.Item2 + deltaG[i].Scale(h)
1113          );
1114          variableValues[varName] = newVal;
1115        }
1116
1117        // TODO perf
1118        foreach (var node in nodeValues.Keys.ToArray()) {
1119          if (node.SubtreeCount == 0 && !IsConstantNode(node)) {
1120            // update values for (latent) variables
1121            var varName = node.Symbol.Name;
1122            nodeValues[node] = variableValues[varName];
1123          }
1124        }
1125      }
1126    }
1127
1128    private static void InterpretRec(
1129      ISymbolicExpressionTreeNode node,
1130      Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> nodeValues,      // contains value and gradient vector for a node (variables and constants only)
1131      out double f,
1132      out Vector g
1133      ) {
1134      double fl, fr;
1135      Vector gl, gr;
1136      switch (node.Symbol.Name) {
1137        case "+": {
1138            InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1139            InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);
1140            f = fl + fr;
1141            g = Vector.AddTo(gl, gr);
1142            break;
1143          }
1144        case "*": {
1145            InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1146            InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);
1147            f = fl * fr;
1148            g = Vector.AddTo(gl.Scale(fr), gr.Scale(fl)); // f'*g + f*g'
1149            break;
1150          }
1151
1152        case "-": {
1153            if (node.SubtreeCount == 1) {
1154              InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1155              f = -fl;
1156              g = gl.Scale(-1.0);
1157            } else {
1158              InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1159              InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);
1160
1161              f = fl - fr;
1162              g = Vector.Subtract(gl, gr);
1163            }
1164            break;
1165          }
1166        case "%": {
1167            InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1168            InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);
1169
1170            // protected division
1171            if (fr.IsAlmost(0.0)) {
1172              f = 0;
1173              g = Vector.Zero;
1174            } else {
1175              f = fl / fr;
1176              g = Vector.AddTo(gr.Scale(fl * -1.0 / (fr * fr)), gl.Scale(1.0 / fr)); // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f'
1177            }
1178            break;
1179          }
1180        case "sin": {
1181            InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1182            f = Math.Sin(fl);
1183            g = gl.Scale(Math.Cos(fl));
1184            break;
1185          }
1186        case "cos": {
1187            InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1188            f = Math.Cos(fl);
1189            g = gl.Scale(-Math.Sin(fl));
1190            break;
1191          }
1192        case "sqr": {
1193            InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl);
1194            f = fl * fl;
1195            g = gl.Scale(2.0 * fl);
1196            break;
1197          }
1198        default: {
1199            var t = nodeValues[node];
1200            f = t.Item1;
1201            g = Vector.CreateNew(t.Item2);
1202            break;
1203          }
1204      }
1205    }
1206    #endregion
1207
1208    #region events
1209    /*
1210     * Dependencies between parameters:
1211     *
1212     * ProblemData
1213     *    |
1214     *    V
1215     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables
1216     *               |   |                 |                   |
1217     *               V   V                 |                   |
1218     *             Grammar <---------------+-------------------
1219     *                |
1220     *                V
1221     *            Encoding
1222     */
1223    private void RegisterEventHandlers() {
1224      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
1225      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
1226
1227      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
1228      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1229
1230      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
1231      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1232
1233      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
1234
1235      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
1236    }
1237
1238    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1239      UpdateGrammarAndEncoding();
1240    }
1241
1242    private void MaximumLengthChanged(object sender, EventArgs e) {
1243      UpdateGrammarAndEncoding();
1244    }
1245
1246    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1247      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1248    }
1249
1250    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1251      UpdateGrammarAndEncoding();
1252    }
1253
1254    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1255      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1256    }
1257
1258    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
1259      UpdateGrammarAndEncoding();
1260    }
1261
1262    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
1263      ProblemDataParameter.Value.Changed += ProblemData_Changed;
1264      OnProblemDataChanged();
1265      OnReset();
1266    }
1267
1268    private void ProblemData_Changed(object sender, EventArgs e) {
1269      OnProblemDataChanged();
1270      OnReset();
1271    }
1272
1273    private void OnProblemDataChanged() {
1274      UpdateTargetVariables();        // implicitly updates other dependent parameters
1275      var handler = ProblemDataChanged;
1276      if (handler != null) handler(this, EventArgs.Empty);
1277    }
1278
1279    #endregion
1280
1281    #region  helper
1282
1283    private void InitAllParameters() {
1284      UpdateTargetVariables(); // implicitly updates the grammar and the encoding     
1285    }
1286
1287    private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
1288      var l = new CheckedItemList<StringValue>();
1289      l.Add(new StringValue("+").AsReadOnly());
1290      l.Add(new StringValue("*").AsReadOnly());
1291      l.Add(new StringValue("%").AsReadOnly());
1292      l.Add(new StringValue("-").AsReadOnly());
1293      l.Add(new StringValue("sin").AsReadOnly());
1294      l.Add(new StringValue("cos").AsReadOnly());
1295      l.Add(new StringValue("sqr").AsReadOnly());
1296      return l.AsReadOnly();
1297    }
1298
1299    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
1300      return n.Symbol.Name[0] == 'θ';
1301    }
1302    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
1303      return n.Symbol.Name[0] == 'λ';
1304    }
1305    private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
1306      return (n.SubtreeCount == 0) && !IsConstantNode(n) && !IsLatentVariableNode(n);
1307    }
1308
1309
1310    private void UpdateTargetVariables() {
1311      var currentlySelectedVariables = TargetVariables.CheckedItems
1312        .OrderBy(i => i.Index)
1313        .Select(i => i.Value.Value)
1314        .ToArray();
1315
1316      var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
1317      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
1318      foreach (var item in newVariablesList) {
1319        if (currentlySelectedVariables.Contains(item.Value)) {
1320          newVariablesList.SetItemCheckedState(item, true);
1321        } else {
1322          newVariablesList.SetItemCheckedState(item, false);
1323        }
1324      }
1325      TargetVariablesParameter.Value = newVariablesList;
1326    }
1327
1328    private void UpdateGrammarAndEncoding() {
1329      var encoding = new MultiEncoding();
1330      var g = CreateGrammar();
1331      foreach (var targetVar in TargetVariables.CheckedItems) {
1332        encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength)); // only limit by length
1333      }
1334      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1335        encoding = encoding.Add(new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength));
1336      }
1337      Encoding = encoding;
1338    }
1339
1340    private ISymbolicExpressionGrammar CreateGrammar() {
1341      // whenever ProblemData is changed we create a new grammar with the necessary symbols
1342      var g = new SimpleSymbolicExpressionGrammar();
1343      var unaryFunc = new string[] { "sin", "cos", "sqr" };
1344      var binaryFunc = new string[] { "+", "-", "*", "%" };
1345      foreach (var func in unaryFunc) {
1346        if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 1, 1);
1347      }
1348      foreach (var func in binaryFunc) {
1349        if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 2, 2);
1350      }
1351
1352      foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value.Value)))
1353        g.AddTerminalSymbol(variableName);
1354
1355      // generate symbols for numeric parameters for which the value is optimized using AutoDiff
1356      // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
1357      var numericConstantsFactor = 2.0;
1358      for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
1359        g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
1360      }
1361
1362      // generate symbols for latent variables
1363      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1364        g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
1365      }
1366
1367      return g;
1368    }
1369
1370
1371
1372
1373
1374    private ISymbolicExpressionTreeNode FixParameters(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
1375      ISymbolicExpressionTreeNode translatedNode = null;
1376      if (n.Symbol is StartSymbol) {
1377        translatedNode = new StartSymbol().CreateTreeNode();
1378      } else if (n.Symbol is ProgramRootSymbol) {
1379        translatedNode = new ProgramRootSymbol().CreateTreeNode();
1380      } else if (n.Symbol.Name == "+") {
1381        translatedNode = new SimpleSymbol("+", 2).CreateTreeNode();
1382      } else if (n.Symbol.Name == "-") {
1383        translatedNode = new SimpleSymbol("-", 2).CreateTreeNode();
1384      } else if (n.Symbol.Name == "*") {
1385        translatedNode = new SimpleSymbol("*", 2).CreateTreeNode();
1386      } else if (n.Symbol.Name == "%") {
1387        translatedNode = new SimpleSymbol("%", 2).CreateTreeNode();
1388      } else if (n.Symbol.Name == "sin") {
1389        translatedNode = new SimpleSymbol("sin", 1).CreateTreeNode();
1390      } else if (n.Symbol.Name == "cos") {
1391        translatedNode = new SimpleSymbol("cos", 1).CreateTreeNode();
1392      } else if (n.Symbol.Name == "sqr") {
1393        translatedNode = new SimpleSymbol("sqr", 1).CreateTreeNode();
1394      } else if (IsConstantNode(n)) {
1395        translatedNode = new SimpleSymbol("c_" + nextParIdx, 0).CreateTreeNode();
1396        nextParIdx++;
1397      } else {
1398        translatedNode = new SimpleSymbol(n.Symbol.Name, n.SubtreeCount).CreateTreeNode();
1399      }
1400      foreach (var child in n.Subtrees) {
1401        translatedNode.AddSubtree(FixParameters(child, parameterValues, ref nextParIdx));
1402      }
1403      return translatedNode;
1404    }
1405
1406
1407    private ISymbolicExpressionTreeNode TranslateTreeNode(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
1408      ISymbolicExpressionTreeNode translatedNode = null;
1409      if (n.Symbol is StartSymbol) {
1410        translatedNode = new StartSymbol().CreateTreeNode();
1411      } else if (n.Symbol is ProgramRootSymbol) {
1412        translatedNode = new ProgramRootSymbol().CreateTreeNode();
1413      } else if (n.Symbol.Name == "+") {
1414        translatedNode = new Addition().CreateTreeNode();
1415      } else if (n.Symbol.Name == "-") {
1416        translatedNode = new Subtraction().CreateTreeNode();
1417      } else if (n.Symbol.Name == "*") {
1418        translatedNode = new Multiplication().CreateTreeNode();
1419      } else if (n.Symbol.Name == "%") {
1420        translatedNode = new Division().CreateTreeNode();
1421      } else if (n.Symbol.Name == "sin") {
1422        translatedNode = new Sine().CreateTreeNode();
1423      } else if (n.Symbol.Name == "cos") {
1424        translatedNode = new Cosine().CreateTreeNode();
1425      } else if (n.Symbol.Name == "sqr") {
1426        translatedNode = new Square().CreateTreeNode();
1427      } else if (IsConstantNode(n)) {
1428        var constNode = (ConstantTreeNode)new Constant().CreateTreeNode();
1429        constNode.Value = parameterValues[nextParIdx];
1430        nextParIdx++;
1431        translatedNode = constNode;
1432      } else {
1433        // assume a variable name
1434        var varName = n.Symbol.Name;
1435        var varNode = (VariableTreeNode)new Variable().CreateTreeNode();
1436        varNode.Weight = 1.0;
1437        varNode.VariableName = varName;
1438        translatedNode = varNode;
1439      }
1440      foreach (var child in n.Subtrees) {
1441        translatedNode.AddSubtree(TranslateTreeNode(child, parameterValues, ref nextParIdx));
1442      }
1443      return translatedNode;
1444    }
1445    #endregion
1446
1447    #region Import & Export
1448    public void Load(IRegressionProblemData data) {
1449      Name = data.Name;
1450      Description = data.Description;
1451      ProblemData = data;
1452    }
1453
1454    public IRegressionProblemData Export() {
1455      return ProblemData;
1456    }
1457    #endregion
1458
1459  }
1460}
Note: See TracBrowser for help on using the repository browser.