Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16222

Last change on this file since 16222 was 16222, checked in by gkronber, 6 years ago

#2925:

  • added comments about parameter identification for differential equation models
  • added source code of cvodes library (part of sundials) which provides functionality to calculate gradients for the parameters of partial differential equation models efficiently using the 'adjoint state method'.
  • added compiled version of cvodes
File size: 46.3 KB
RevLine 
[15964]1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Linq;
[15968]26using HeuristicLab.Analysis;
27using HeuristicLab.Collections;
[15964]28using HeuristicLab.Common;
29using HeuristicLab.Core;
[15968]30using HeuristicLab.Data;
[15964]31using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
[15968]32using HeuristicLab.Optimization;
[15964]33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.DataAnalysis;
[16126]36using HeuristicLab.Problems.DataAnalysis.Symbolic;
[15964]37using HeuristicLab.Problems.Instances;
[16153]38using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
[15964]39
40namespace HeuristicLab.Problems.DynamicalSystemsModelling {
41  public class Vector {
42    public readonly static Vector Zero = new Vector(new double[0]);
43
44    public static Vector operator +(Vector a, Vector b) {
[16215]45      if (a == Zero) return b;
46      if (b == Zero) return a;
[15964]47      Debug.Assert(a.arr.Length == b.arr.Length);
48      var res = new double[a.arr.Length];
[16215]49      for (int i = 0; i < res.Length; i++)
[15964]50        res[i] = a.arr[i] + b.arr[i];
51      return new Vector(res);
52    }
53    public static Vector operator -(Vector a, Vector b) {
[16215]54      if (b == Zero) return a;
55      if (a == Zero) return -b;
[15964]56      Debug.Assert(a.arr.Length == b.arr.Length);
57      var res = new double[a.arr.Length];
[16215]58      for (int i = 0; i < res.Length; i++)
[15964]59        res[i] = a.arr[i] - b.arr[i];
60      return new Vector(res);
61    }
62    public static Vector operator -(Vector v) {
[16215]63      if (v == Zero) return Zero;
64      for (int i = 0; i < v.arr.Length; i++)
[15964]65        v.arr[i] = -v.arr[i];
66      return v;
67    }
68
69    public static Vector operator *(double s, Vector v) {
[16215]70      if (v == Zero) return Zero;
71      if (s == 0.0) return Zero;
[15964]72      var res = new double[v.arr.Length];
[16215]73      for (int i = 0; i < res.Length; i++)
[15964]74        res[i] = s * v.arr[i];
75      return new Vector(res);
76    }
[16215]77
[15964]78    public static Vector operator *(Vector v, double s) {
79      return s * v;
80    }
[16215]81    public static Vector operator *(Vector u, Vector v) {
82      if (v == Zero) return Zero;
83      if (u == Zero) return Zero;
84      var res = new double[v.arr.Length];
85      for (int i = 0; i < res.Length; i++)
86        res[i] = u.arr[i] * v.arr[i];
87      return new Vector(res);
88    }
[15964]89    public static Vector operator /(double s, Vector v) {
[16215]90      if (s == 0.0) return Zero;
91      if (v == Zero) throw new ArgumentException("Division by zero vector");
[15964]92      var res = new double[v.arr.Length];
[16215]93      for (int i = 0; i < res.Length; i++)
[15964]94        res[i] = 1.0 / v.arr[i];
95      return new Vector(res);
96    }
97    public static Vector operator /(Vector v, double s) {
98      return v * 1.0 / s;
99    }
100
[16215]101    public static Vector Sin(Vector s) {
102      var res = new double[s.arr.Length];
103      for (int i = 0; i < res.Length; i++) res[i] = Math.Sin(s.arr[i]);
104      return new Vector(res);
105    }
106    public static Vector Cos(Vector s) {
107      var res = new double[s.arr.Length];
108      for (int i = 0; i < res.Length; i++) res[i] = Math.Cos(s.arr[i]);
109      return new Vector(res);
110    }
[15964]111
112    private readonly double[] arr; // backing array;
113
114    public Vector(double[] v) {
115      this.arr = v;
116    }
117
118    public void CopyTo(double[] target) {
119      Debug.Assert(arr.Length <= target.Length);
120      Array.Copy(arr, target, arr.Length);
121    }
122  }
123
124  [Item("Dynamical Systems Modelling Problem", "TODO")]
125  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
126  [StorableClass]
[15968]127  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
[15964]128
129    #region parameter names
[15968]130    private const string ProblemDataParameterName = "Data";
131    private const string TargetVariablesParameterName = "Target variables";
132    private const string FunctionSetParameterName = "Function set";
133    private const string MaximumLengthParameterName = "Size limit";
134    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
[15970]135    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
136    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
[16153]137    private const string TrainingEpisodesParameterName = "Training episodes";
[16155]138    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
[15964]139    #endregion
140
141    #region Parameter Properties
142    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
143
144    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
145      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
146    }
[15968]147    public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> TargetVariablesParameter {
148      get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[TargetVariablesParameterName]; }
149    }
150    public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> FunctionSetParameter {
151      get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[FunctionSetParameterName]; }
152    }
153    public IFixedValueParameter<IntValue> MaximumLengthParameter {
154      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
155    }
156    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
157      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
158    }
[15970]159    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
160      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
161    }
162    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
163      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
164    }
[16153]165    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
166      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
167    }
[16155]168    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
169      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
170    }
[15964]171    #endregion
172
173    #region Properties
174    public IRegressionProblemData ProblemData {
175      get { return ProblemDataParameter.Value; }
176      set { ProblemDataParameter.Value = value; }
177    }
178    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
179
[15968]180    public ReadOnlyCheckedItemCollection<StringValue> TargetVariables {
181      get { return TargetVariablesParameter.Value; }
182    }
183
184    public ReadOnlyCheckedItemCollection<StringValue> FunctionSet {
185      get { return FunctionSetParameter.Value; }
186    }
187
188    public int MaximumLength {
189      get { return MaximumLengthParameter.Value.Value; }
190    }
191    public int MaximumParameterOptimizationIterations {
192      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
193    }
[15970]194    public int NumberOfLatentVariables {
195      get { return NumberOfLatentVariablesParameter.Value.Value; }
196    }
197    public int NumericIntegrationSteps {
198      get { return NumericIntegrationStepsParameter.Value.Value; }
199    }
[16153]200    public IEnumerable<IntRange> TrainingEpisodes {
201      get { return TrainingEpisodesParameter.Value; }
202    }
[16155]203    public bool OptimizeParametersForEpisodes {
204      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
205    }
[15970]206
[16153]207    #endregion
[15968]208
[15964]209    public event EventHandler ProblemDataChanged;
210
211    public override bool Maximization {
212      get { return false; } // we minimize NMSE
213    }
214
215    #region item cloning and persistence
216    // persistence
217    [StorableConstructor]
218    private Problem(bool deserializing) : base(deserializing) { }
219    [StorableHook(HookType.AfterDeserialization)]
220    private void AfterDeserialization() {
[16215]221      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
[16155]222        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
223      }
[15964]224      RegisterEventHandlers();
225    }
226
227    // cloning
228    private Problem(Problem original, Cloner cloner)
229      : base(original, cloner) {
230      RegisterEventHandlers();
231    }
232    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
233    #endregion
234
235    public Problem()
236      : base() {
[15968]237      var targetVariables = new CheckedItemCollection<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
238      var functions = CreateFunctionSet();
[15970]239      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
[15968]240      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
241      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
[15970]242      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
243      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
244      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
245      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
[16153]246      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
[16155]247      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
[15964]248      RegisterEventHandlers();
[15968]249      InitAllParameters();
[16152]250
[16153]251      // TODO: do not clear selection of target variables when the input variables are changed (keep selected target variables)
[16152]252      // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item
[16215]253      // TODO: use training range as default training episode
[16153]254
[15964]255    }
256
[15968]257    public override double Evaluate(Individual individual, IRandom random) {
258      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
259
[16215]260      if (OptimizeParametersForEpisodes) {
261        int eIdx = 0;
[16155]262        double totalNMSE = 0.0;
263        int totalSize = 0;
[16215]264        foreach (var episode in TrainingEpisodes) {
[16155]265          double[] optTheta;
266          double nmse;
267          OptimizeForEpisodes(trees, random, new[] { episode }, out optTheta, out nmse);
268          individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
269          eIdx++;
270          totalNMSE += nmse * episode.Size;
271          totalSize += episode.Size;
272        }
273        return totalNMSE / totalSize;
274      } else {
275        double[] optTheta;
276        double nmse;
277        OptimizeForEpisodes(trees, random, TrainingEpisodes, out optTheta, out nmse);
278        individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
279        return nmse;
280      }
281    }
282
283    private void OptimizeForEpisodes(ISymbolicExpressionTree[] trees, IRandom random, IEnumerable<IntRange> episodes, out double[] optTheta, out double nmse) {
284      var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray();
[15964]285      var problemData = ProblemData;
[15968]286      var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
[15970]287      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
288      var targetValues = new double[rows.Length, targetVars.Length];
289
[15968]290      // collect values of all target variables
291      var colIdx = 0;
[16215]292      foreach (var targetVar in targetVars) {
[15968]293        int rowIdx = 0;
[16215]294        foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
[15968]295          targetValues[rowIdx, colIdx] = value;
296          rowIdx++;
297        }
298        colIdx++;
299      }
[15964]300
301      var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>();
[15968]302
[16215]303      foreach (var tree in trees) {
304        foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
[15968]305          nodeIdx.Add(node, nodeIdx.Count);
306        }
[15964]307      }
308
309      var theta = nodeIdx.Select(_ => random.NextDouble() * 2.0 - 1.0).ToArray(); // init params randomly from Unif(-1,1)
310
[16155]311      optTheta = new double[0];
[16215]312      if (theta.Length > 0) {
[15964]313        alglib.minlbfgsstate state;
314        alglib.minlbfgsreport report;
315        alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state);
[15968]316        alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations);
[16126]317        alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null,
[16155]318          new object[] { trees, targetVars, problemData, nodeIdx, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables }); //TODO: create a type
[15964]319        alglib.minlbfgsresults(state, out optTheta, out report);
320
321        /*
322         *
323         *         L-BFGS algorithm results
324
325          INPUT PARAMETERS:
326              State   -   algorithm state
327
328          OUTPUT PARAMETERS:
329              X       -   array[0..N-1], solution
330              Rep     -   optimization report:
331                          * Rep.TerminationType completetion code:
332                              * -7    gradient verification failed.
333                                      See MinLBFGSSetGradientCheck() for more information.
334                              * -2    rounding errors prevent further improvement.
335                                      X contains best point found.
336                              * -1    incorrect parameters were specified
337                              *  1    relative function improvement is no more than
338                                      EpsF.
339                              *  2    relative step is no more than EpsX.
340                              *  4    gradient norm is no more than EpsG
341                              *  5    MaxIts steps was taken
342                              *  7    stopping conditions are too stringent,
343                                      further improvement is impossible
344                          * Rep.IterationsCount contains iterations count
345                          * NFEV countains number of function calculations
346         */
[16215]347        if (report.terminationtype < 0) { nmse = 10E6; return; }
[15964]348      }
349
350      // perform evaluation for optimal theta to get quality value
351      double[] grad = new double[optTheta.Length];
[16155]352      nmse = double.NaN;
353      EvaluateObjectiveAndGradient(optTheta, ref nmse, grad,
354        new object[] { trees, targetVars, problemData, nodeIdx, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables });
[16215]355      if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10E6; return; } // return a large value (TODO: be consistent by using NMSE)
[15964]356    }
357
358    private static void EvaluateObjectiveAndGradient(double[] x, ref double f, double[] grad, object obj) {
[15968]359      var trees = (ISymbolicExpressionTree[])((object[])obj)[0];
360      var targetVariables = (string[])((object[])obj)[1];
361      var problemData = (IRegressionProblemData)((object[])obj)[2];
362      var nodeIdx = (Dictionary<ISymbolicExpressionTreeNode, int>)((object[])obj)[3];
363      var targetValues = (double[,])((object[])obj)[4];
[16153]364      var episodes = (IntRange[])((object[])obj)[5];
[15970]365      var numericIntegrationSteps = (int)((object[])obj)[6];
366      var latentVariables = (string[])((object[])obj)[7];
[15964]367
368      var predicted = Integrate(
[15968]369        trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
[15964]370        problemData.Dataset,
371        problemData.AllowedInputVariables.ToArray(),
[15968]372        targetVariables,
[15970]373        latentVariables,
[16153]374        episodes,
375        nodeIdx,
[15970]376        x, numericIntegrationSteps).ToArray();
[15964]377
[15968]378
379      // for normalized MSE = 1/variance(t) * MSE(t, pred)
[16153]380      // TODO: Perf. (by standardization of target variables before evaluation of all trees)     
[15968]381      var invVar = Enumerable.Range(0, targetVariables.Length)
[16153]382        .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors
[15968]383        .Select(vec => vec.Variance())
384        .Select(v => 1.0 / v)
385        .ToArray();
386
387      // objective function is NMSE
[15964]388      f = 0.0;
389      int n = predicted.Length;
390      double invN = 1.0 / n;
391      var g = Vector.Zero;
[15968]392      int r = 0;
[16215]393      foreach (var y_pred in predicted) {
394        for (int c = 0; c < y_pred.Length; c++) {
[15970]395
[15968]396          var y_pred_f = y_pred[c].Item1;
[15970]397          var y = targetValues[r, c];
[15964]398
[15968]399          var res = (y - y_pred_f);
400          var ressq = res * res;
401          f += ressq * invN * invVar[c];
402          g += -2.0 * res * y_pred[c].Item2 * invN * invVar[c];
403        }
404        r++;
[15964]405      }
406
407      g.CopyTo(grad);
408    }
409
[15968]410    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
411      base.Analyze(individuals, qualities, results, random);
[15964]412
[16215]413      if (!results.ContainsKey("Prediction (training)")) {
[15968]414        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
415      }
[16215]416      if (!results.ContainsKey("Prediction (test)")) {
[15968]417        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
418      }
[16215]419      if (!results.ContainsKey("Models")) {
[16153]420        results.Add(new Result("Models", typeof(VariableCollection)));
[15968]421      }
422
423      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
424      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
[16155]425
426      // TODO extract common functionality from Evaluate and Analyze
[15968]427      var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>();
[16215]428      foreach (var tree in trees) {
429        foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
[15968]430          nodeIdx.Add(node, nodeIdx.Count);
431        }
432      }
433      var problemData = ProblemData;
434      var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
[15970]435      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
[15968]436
437      var trainingList = new ItemList<DataTable>();
438
[16215]439      if (OptimizeParametersForEpisodes) {
[16155]440        var eIdx = 0;
441        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
[16215]442        foreach (var episode in TrainingEpisodes) {
[16155]443          var episodes = new[] { episode };
444          var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta_" + eIdx]).ToArray(); // see evaluate
445          var trainingPrediction = Integrate(
446                                   trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
447                                   problemData.Dataset,
448                                   problemData.AllowedInputVariables.ToArray(),
449                                   targetVars,
450                                   latentVariables,
451                                   episodes,
452                                   nodeIdx,
453                                   optTheta,
454                                   NumericIntegrationSteps).ToArray();
455          trainingPredictions.Add(trainingPrediction);
456          eIdx++;
457        }
[15968]458
[16155]459        // only for actual target values
460        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
[16215]461        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
[16155]462          var targetVar = targetVars[colIdx];
463          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
464          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
465          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
466          trainingDataTable.Rows.Add(actualValuesRow);
467          trainingDataTable.Rows.Add(predictedValuesRow);
468          trainingList.Add(trainingDataTable);
469        }
470        results["Prediction (training)"].Value = trainingList.AsReadOnly();
[15968]471
472
[16155]473        var models = new VariableCollection();
[16126]474
[16215]475        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
[16155]476          var targetVarName = tup.Item1;
477          var tree = tup.Item2;
[16126]478
[16155]479          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
480          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
481          models.Add(origTreeVar);
482        }
483        results["Models"].Value = models;
484      } else {
485        var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate
486        var trainingPrediction = Integrate(
487                                   trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
488                                   problemData.Dataset,
489                                   problemData.AllowedInputVariables.ToArray(),
490                                   targetVars,
491                                   latentVariables,
492                                   TrainingEpisodes,
493                                   nodeIdx,
494                                   optTheta,
495                                   NumericIntegrationSteps).ToArray();
496        // only for actual target values
497        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
[16215]498        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
[16155]499          var targetVar = targetVars[colIdx];
500          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
501          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
502          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
503          trainingDataTable.Rows.Add(actualValuesRow);
504          trainingDataTable.Rows.Add(predictedValuesRow);
505          trainingList.Add(trainingDataTable);
506        }
507        // TODO: DRY for training and test
508        var testList = new ItemList<DataTable>();
509        var testRows = ProblemData.TestIndices.ToArray();
510        var testPrediction = Integrate(
511         trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
512         problemData.Dataset,
513         problemData.AllowedInputVariables.ToArray(),
514         targetVars,
515         latentVariables,
516         new IntRange[] { ProblemData.TestPartition },
517         nodeIdx,
518         optTheta,
519         NumericIntegrationSteps).ToArray();
[16126]520
[16215]521        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
[16155]522          var targetVar = targetVars[colIdx];
523          var testDataTable = new DataTable(targetVar + " prediction (test)");
524          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
525          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
526          testDataTable.Rows.Add(actualValuesRow);
527          testDataTable.Rows.Add(predictedValuesRow);
528          testList.Add(testDataTable);
529        }
[16126]530
[16155]531        results["Prediction (training)"].Value = trainingList.AsReadOnly();
532        results["Prediction (test)"].Value = testList.AsReadOnly();
533        #region simplification of models
534        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
535        var models = new VariableCollection();    // to store target var names and original version of tree
[16126]536
[16215]537        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
[16155]538          var targetVarName = tup.Item1;
539          var tree = tup.Item2;
[16153]540
[16155]541          // when we reference HeuristicLab.Problems.DataAnalysis.Symbolic we can translate symbols
542          int nextParIdx = 0;
543          var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta, ref nextParIdx));
544
545          // var shownTree = (SymbolicExpressionTree)tree.Clone();
546          // var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode);
547          // var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode);
548          //
549          // foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) {
550          //   double constantsVal = optTheta[nodeIdx[n.original]];
551          //
552          //   ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal };
553          //
554          //   var parentNode = n.shown.Parent;
555          //   int replacementIndex = parentNode.IndexOfSubtree(n.shown);
556          //   parentNode.RemoveSubtree(replacementIndex);
557          //   parentNode.InsertSubtree(replacementIndex, replacementNode);
558          // }
559
560          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
561          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
562          models.Add(origTreeVar);
563          var simplifiedTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(simplified)");
564          simplifiedTreeVar.Value = TreeSimplifier.Simplify(shownTree);
565          models.Add(simplifiedTreeVar);
566
567        }
568        results["Models"].Value = models;
569        #endregion
[16126]570      }
[15968]571    }
572
[16153]573    private ISymbolicExpressionTreeNode TranslateTreeNode(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
574      ISymbolicExpressionTreeNode translatedNode = null;
[16215]575      if (n.Symbol is StartSymbol) {
[16153]576        translatedNode = new StartSymbol().CreateTreeNode();
[16215]577      } else if (n.Symbol is ProgramRootSymbol) {
[16153]578        translatedNode = new ProgramRootSymbol().CreateTreeNode();
[16215]579      } else if (n.Symbol.Name == "+") {
[16153]580        translatedNode = new Addition().CreateTreeNode();
[16215]581      } else if (n.Symbol.Name == "-") {
[16153]582        translatedNode = new Subtraction().CreateTreeNode();
[16215]583      } else if (n.Symbol.Name == "*") {
[16153]584        translatedNode = new Multiplication().CreateTreeNode();
[16215]585      } else if (n.Symbol.Name == "%") {
[16153]586        translatedNode = new Division().CreateTreeNode();
[16215]587      } else if (n.Symbol.Name == "sin") {
588        translatedNode = new Sine().CreateTreeNode();
589      } else if (n.Symbol.Name == "cos") {
590        translatedNode = new Cosine().CreateTreeNode();
591      } else if (IsConstantNode(n)) {
[16153]592        var constNode = (ConstantTreeNode)new Constant().CreateTreeNode();
593        constNode.Value = parameterValues[nextParIdx];
594        nextParIdx++;
595        translatedNode = constNode;
596      } else {
597        // assume a variable name
598        var varName = n.Symbol.Name;
599        var varNode = (VariableTreeNode)new Variable().CreateTreeNode();
600        varNode.Weight = 1.0;
601        varNode.VariableName = varName;
602        translatedNode = varNode;
603      }
[16215]604      foreach (var child in n.Subtrees) {
[16153]605        translatedNode.AddSubtree(TranslateTreeNode(child, parameterValues, ref nextParIdx));
606      }
607      return translatedNode;
608    }
[15968]609
610    #region interpretation
[16222]611
612    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
613    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
614
615    // a comparison of three potential calculation methods for the gradient is given in:
616    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
617    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
618    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
619
620    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
621    /*
622     * SUNDIALS: SUite of Nonlinear and DIfferential/ALgebraic Equation Solvers
623     * CVODES
624     * CVODES is a solver for stiff and nonstiff ODE systems (initial value problem) given in explicit
625     * form y’ = f(t,y,p) with sensitivity analysis capabilities (both forward and adjoint modes). CVODES
626     * is a superset of CVODE and hence all options available to CVODE (with the exception of the FCVODE
627     * interface module) are also available for CVODES. Both integration methods (Adams-Moulton and BDF)
628     * and the corresponding nonlinear iteration methods, as well as all linear solver and preconditioner
629     * modules, are available for the integration of the original ODEs, the sensitivity systems, or the
630     * adjoint system. Depending on the number of model parameters and the number of functional outputs,
631     * one of two sensitivity methods is more appropriate. The forward sensitivity analysis (FSA) method
632     * is mostly suitable when the gradients of many outputs (for example the entire solution vector) with
633     * respect to relatively few parameters are needed. In this approach, the model is differentiated with
634     * respect to each parameter in turn to yield an additional system of the same size as the original
635     * one, the result of which is the solution sensitivity. The gradient of any output function depending
636     * on the solution can then be directly obtained from these sensitivities by applying the chain rule
637     * of differentiation. The adjoint sensitivity analysis (ASA) method is more practical than the
638     * forward approach when the number of parameters is large and the gradients of only few output
639     * functionals are needed. In this approach, the solution sensitivities need not be computed
640     * explicitly. Instead, for each output functional of interest, an additional system, adjoint to the
641     * original one, is formed and solved. The solution of the adjoint system can then be used to evaluate
642     * the gradient of the output functional with respect to any set of model parameters. The FSA module
643     * in CVODES implements a simultaneous corrector method as well as two flavors of staggered corrector
644     * methods–for the case when sensitivity right hand sides are generated all at once or separated for
645     * each model parameter. The ASA module provides the infrastructure required for the backward
646     * integration in time of systems of differential equations dependent on the solution of the original
647     * ODEs. It employs a checkpointing scheme for efficient interpolation of forward solutions during the
648     * backward integration.
649     */
[15968]650    private static IEnumerable<Tuple<double, Vector>[]> Integrate(
[16153]651      ISymbolicExpressionTree[] trees, IDataset dataset, string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes,
[15970]652      Dictionary<ISymbolicExpressionTreeNode, int> nodeIdx, double[] parameterValues, int numericIntegrationSteps = 100) {
[15964]653
[16153]654      int NUM_STEPS = numericIntegrationSteps;
[15964]655      double h = 1.0 / NUM_STEPS;
656
[16215]657      foreach (var episode in episodes) {
[16153]658        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start);
659        // return first value as stored in the dataset
660        yield return targetVariables
661          .Select(targetVar => Tuple.Create(dataset.GetDoubleValue(targetVar, rows.First()), Vector.Zero))
662          .ToArray();
[15968]663
[16153]664        // integrate forward starting with known values for the target in t0
[15964]665
[16153]666        var variableValues = new Dictionary<string, Tuple<double, Vector>>();
667        var t0 = rows.First();
[16215]668        foreach (var varName in inputVariables) {
[16153]669          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
670        }
[16215]671        foreach (var varName in targetVariables) {
[16153]672          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
673        }
674        // add value entries for latent variables which are also integrated
[16215]675        foreach (var latentVar in latentVariables) {
[16153]676          variableValues.Add(latentVar, Tuple.Create(0.0, Vector.Zero)); // we don't have observations for latent variables -> assume zero as starting value
677        }
678        var calculatedVariables = targetVariables.Concat(latentVariables); // TODO: must conincide with the order of trees in the encoding
[15964]679
[16215]680        foreach (var t in rows.Skip(1)) {
681          for (int step = 0; step < NUM_STEPS; step++) {
[16153]682            var deltaValues = new Dictionary<string, Tuple<double, Vector>>();
[16215]683            foreach (var tup in trees.Zip(calculatedVariables, Tuple.Create)) {
[16153]684              var tree = tup.Item1;
685              var targetVarName = tup.Item2;
686              // skip programRoot and startSymbol
687              var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), variableValues, nodeIdx, parameterValues);
688              deltaValues.Add(targetVarName, res);
689            }
[15964]690
[16153]691            // update variableValues for next step
[16215]692            foreach (var kvp in deltaValues) {
[16153]693              var oldVal = variableValues[kvp.Key];
694              variableValues[kvp.Key] = Tuple.Create(
695                oldVal.Item1 + h * kvp.Value.Item1,
696                oldVal.Item2 + h * kvp.Value.Item2
697              );
698            }
[15964]699          }
700
[16153]701          // only return the target variables for calculation of errors
702          yield return targetVariables
703            .Select(targetVar => variableValues[targetVar])
704            .ToArray();
[15964]705
[16153]706          // update for next time step
[16215]707          foreach (var varName in inputVariables) {
[16153]708            variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero);
709          }
[15964]710        }
711      }
712    }
713
714    private static Tuple<double, Vector> InterpretRec(
715      ISymbolicExpressionTreeNode node,
716      Dictionary<string, Tuple<double, Vector>> variableValues,
717      Dictionary<ISymbolicExpressionTreeNode, int> nodeIdx,
718      double[] parameterValues
719        ) {
720
[16215]721      switch (node.Symbol.Name) {
[15964]722        case "+": {
[15970]723            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); // TODO capture all parameters into a state type for interpretation
[15964]724            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
725
726            return Tuple.Create(l.Item1 + r.Item1, l.Item2 + r.Item2);
727          }
728        case "*": {
[15968]729            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
[15964]730            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
731
732            return Tuple.Create(l.Item1 * r.Item1, l.Item2 * r.Item1 + l.Item1 * r.Item2);
733          }
734
735        case "-": {
[15968]736            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
[15964]737            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
738
739            return Tuple.Create(l.Item1 - r.Item1, l.Item2 - r.Item2);
740          }
741        case "%": {
[15968]742            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
[15964]743            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
744
745            // protected division
[16215]746            if (r.Item1.IsAlmost(0.0)) {
[15964]747              return Tuple.Create(0.0, Vector.Zero);
748            } else {
749              return Tuple.Create(
750                l.Item1 / r.Item1,
751                l.Item1 * -1.0 / (r.Item1 * r.Item1) * r.Item2 + 1.0 / r.Item1 * l.Item2 // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f'
752                );
753            }
754          }
[16215]755        case "sin": {
756            var x = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
757            return Tuple.Create(
758              Math.Sin(x.Item1),
759              Vector.Cos(x.Item2) * x.Item2
760            );
761          }
762        case "cos": {
763            var x = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
764            return Tuple.Create(
765              Math.Cos(x.Item1),
766              -Vector.Sin(x.Item2) * x.Item2
767            );
768          }
[15964]769        default: {
770            // distinguish other cases
[16215]771            if (IsConstantNode(node)) {
[15964]772              var vArr = new double[parameterValues.Length]; // backing array for vector
773              vArr[nodeIdx[node]] = 1.0;
774              var g = new Vector(vArr);
775              return Tuple.Create(parameterValues[nodeIdx[node]], g);
776            } else {
777              // assume a variable name
778              var varName = node.Symbol.Name;
779              return variableValues[varName];
780            }
781          }
782      }
783    }
[15968]784    #endregion
[15964]785
786    #region events
[15968]787    /*
788     * Dependencies between parameters:
789     *
790     * ProblemData
791     *    |
792     *    V
[15970]793     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables
794     *               |   |                 |                   |
795     *               V   V                 |                   |
796     *             Grammar <---------------+-------------------
[15968]797     *                |
798     *                V
799     *            Encoding
800     */
[15964]801    private void RegisterEventHandlers() {
[15968]802      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
[16215]803      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
[15968]804
805      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
[16215]806      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
[15968]807
808      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
[16215]809      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
[15968]810
811      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
[15970]812
813      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
[15964]814    }
815
[15970]816    private void NumLatentVariablesChanged(object sender, EventArgs e) {
817      UpdateGrammarAndEncoding();
818    }
819
[15968]820    private void MaximumLengthChanged(object sender, EventArgs e) {
821      UpdateGrammarAndEncoding();
822    }
823
824    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
825      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
826    }
827
828    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) {
829      UpdateGrammarAndEncoding();
830    }
831
832    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
833      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
834    }
835
836    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) {
837      UpdateGrammarAndEncoding();
838    }
839
[15964]840    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
[15968]841      ProblemDataParameter.Value.Changed += ProblemData_Changed;
[15964]842      OnProblemDataChanged();
843      OnReset();
844    }
845
846    private void ProblemData_Changed(object sender, EventArgs e) {
[15968]847      OnProblemDataChanged();
[15964]848      OnReset();
849    }
850
851    private void OnProblemDataChanged() {
[15968]852      UpdateTargetVariables();        // implicitly updates other dependent parameters
[15964]853      var handler = ProblemDataChanged;
[16215]854      if (handler != null) handler(this, EventArgs.Empty);
[15964]855    }
856
[15968]857    #endregion
858
859    #region  helper
860
861    private void InitAllParameters() {
862      UpdateTargetVariables(); // implicitly updates the grammar and the encoding     
863    }
864
865    private ReadOnlyCheckedItemCollection<StringValue> CreateFunctionSet() {
866      var l = new CheckedItemCollection<StringValue>();
867      l.Add(new StringValue("+").AsReadOnly());
868      l.Add(new StringValue("*").AsReadOnly());
869      l.Add(new StringValue("%").AsReadOnly());
870      l.Add(new StringValue("-").AsReadOnly());
[16215]871      l.Add(new StringValue("sin").AsReadOnly());
872      l.Add(new StringValue("cos").AsReadOnly());
[15968]873      return l.AsReadOnly();
874    }
875
876    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
877      return n.Symbol.Name.StartsWith("θ");
878    }
[15970]879    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
880      return n.Symbol.Name.StartsWith("λ");
881    }
[15968]882
883
884    private void UpdateTargetVariables() {
885      var currentlySelectedVariables = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
886
887      var newVariablesList = new CheckedItemCollection<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
888      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
[16215]889      foreach (var matchingItem in matchingItems) {
[15968]890        newVariablesList.SetItemCheckedState(matchingItem, true);
891      }
892      TargetVariablesParameter.Value = newVariablesList;
893    }
894
895    private void UpdateGrammarAndEncoding() {
896      var encoding = new MultiEncoding();
897      var g = CreateGrammar();
[16215]898      foreach (var targetVar in TargetVariables.CheckedItems) {
[15970]899        encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength)); // only limit by length
[15968]900      }
[16215]901      for (int i = 1; i <= NumberOfLatentVariables; i++) {
[15970]902        encoding = encoding.Add(new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength));
903      }
[15968]904      Encoding = encoding;
905    }
906
907    private ISymbolicExpressionGrammar CreateGrammar() {
[15964]908      // whenever ProblemData is changed we create a new grammar with the necessary symbols
909      var g = new SimpleSymbolicExpressionGrammar();
[15968]910      g.AddSymbols(FunctionSet.CheckedItems.Select(i => i.Value).ToArray(), 2, 2);
[15964]911
912      // TODO
913      //g.AddSymbols(new[] {
914      //  "exp",
915      //  "log", // log( <expr> ) // TODO: init a theta to ensure the value is always positive
916      //  "exp_minus" // exp((-1) * <expr>
917      //}, 1, 1);
918
[16215]919      foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value)))
[15964]920        g.AddTerminalSymbol(variableName);
921
922      // generate symbols for numeric parameters for which the value is optimized using AutoDiff
923      // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
924      var numericConstantsFactor = 2.0;
[16215]925      for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
[15964]926        g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
927      }
[15970]928
929      // generate symbols for latent variables
[16215]930      for (int i = 1; i <= NumberOfLatentVariables; i++) {
[15970]931        g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
932      }
933
[15968]934      return g;
[15964]935    }
[15968]936
[15964]937    #endregion
938
939    #region Import & Export
940    public void Load(IRegressionProblemData data) {
941      Name = data.Name;
942      Description = data.Description;
943      ProblemData = data;
944    }
945
946    public IRegressionProblemData Export() {
947      return ProblemData;
948    }
949    #endregion
950
951  }
952}
Note: See TracBrowser for help on using the repository browser.