Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16663

Last change on this file since 16663 was 16663, checked in by gkronber, 5 years ago

#2925: adapted to work with new persistence

File size: 87.7 KB
RevLine 
[15964]1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
[16653]25using System.Globalization;
[15964]26using System.Linq;
[15968]27using HeuristicLab.Analysis;
28using HeuristicLab.Collections;
[15964]29using HeuristicLab.Common;
30using HeuristicLab.Core;
[15968]31using HeuristicLab.Data;
[15964]32using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
[15968]33using HeuristicLab.Optimization;
[15964]34using HeuristicLab.Parameters;
35using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
36using HeuristicLab.Problems.DataAnalysis;
[16126]37using HeuristicLab.Problems.DataAnalysis.Symbolic;
[15964]38using HeuristicLab.Problems.Instances;
[16153]39using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
[16663]40using HEAL.Attic;
[15964]41
42namespace HeuristicLab.Problems.DynamicalSystemsModelling {
43  [Item("Dynamical Systems Modelling Problem", "TODO")]
44  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
[16663]45  [StorableType("065C6A61-773A-42C9-9DE5-61A5D1D823EB")]
[15968]46  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
[15964]47    #region parameter names
[15968]48    private const string ProblemDataParameterName = "Data";
49    private const string TargetVariablesParameterName = "Target variables";
50    private const string FunctionSetParameterName = "Function set";
51    private const string MaximumLengthParameterName = "Size limit";
52    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
[15970]53    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
54    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
[16153]55    private const string TrainingEpisodesParameterName = "Training episodes";
[16155]56    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
[16250]57    private const string OdeSolverParameterName = "ODE Solver";
[15964]58    #endregion
59
60    #region Parameter Properties
61    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
62
63    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
64      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
65    }
[16268]66    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> TargetVariablesParameter {
67      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[TargetVariablesParameterName]; }
[15968]68    }
[16268]69    public IValueParameter<ReadOnlyCheckedItemList<StringValue>> FunctionSetParameter {
70      get { return (IValueParameter<ReadOnlyCheckedItemList<StringValue>>)Parameters[FunctionSetParameterName]; }
[15968]71    }
72    public IFixedValueParameter<IntValue> MaximumLengthParameter {
73      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
74    }
[16602]75
[15968]76    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
77      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
78    }
[15970]79    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
80      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
81    }
82    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
83      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
84    }
[16153]85    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
86      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
87    }
[16155]88    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
89      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
90    }
[16250]91    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
92      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
93    }
[15964]94    #endregion
95
96    #region Properties
97    public IRegressionProblemData ProblemData {
98      get { return ProblemDataParameter.Value; }
99      set { ProblemDataParameter.Value = value; }
100    }
101    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
102
[16268]103    public ReadOnlyCheckedItemList<StringValue> TargetVariables {
[15968]104      get { return TargetVariablesParameter.Value; }
105    }
106
[16268]107    public ReadOnlyCheckedItemList<StringValue> FunctionSet {
[15968]108      get { return FunctionSetParameter.Value; }
109    }
110
111    public int MaximumLength {
112      get { return MaximumLengthParameter.Value.Value; }
113    }
114    public int MaximumParameterOptimizationIterations {
115      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
116    }
[15970]117    public int NumberOfLatentVariables {
118      get { return NumberOfLatentVariablesParameter.Value.Value; }
119    }
120    public int NumericIntegrationSteps {
121      get { return NumericIntegrationStepsParameter.Value.Value; }
122    }
[16153]123    public IEnumerable<IntRange> TrainingEpisodes {
124      get { return TrainingEpisodesParameter.Value; }
125    }
[16155]126    public bool OptimizeParametersForEpisodes {
127      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
128    }
[15970]129
[16250]130    public string OdeSolver {
131      get { return OdeSolverParameter.Value.Value; }
132      set {
133        var matchingValue = OdeSolverParameter.ValidValues.FirstOrDefault(v => v.Value == value);
134        if (matchingValue == null) throw new ArgumentOutOfRangeException();
135        else OdeSolverParameter.Value = matchingValue;
136      }
137    }
138
[16153]139    #endregion
[15968]140
[15964]141    public event EventHandler ProblemDataChanged;
142
143    public override bool Maximization {
144      get { return false; } // we minimize NMSE
145    }
146
147    #region item cloning and persistence
148    // persistence
149    [StorableConstructor]
[16663]150    private Problem(StorableConstructorFlag _) : base(_) { }
[15964]151    [StorableHook(HookType.AfterDeserialization)]
152    private void AfterDeserialization() {
[16215]153      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
[16155]154        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
155      }
[15964]156      RegisterEventHandlers();
157    }
158
159    // cloning
160    private Problem(Problem original, Cloner cloner)
161      : base(original, cloner) {
162      RegisterEventHandlers();
163    }
164    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
165    #endregion
166
167    public Problem()
168      : base() {
[16268]169      var targetVariables = new CheckedItemList<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
[15968]170      var functions = CreateFunctionSet();
[15970]171      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
[16268]172      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
173      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
[15970]174      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
175      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
176      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
177      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
[16153]178      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
[16155]179      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
[16250]180
[16601]181      var solversStr = new string[] { "HeuristicLab" /* , "CVODES" */};
[16250]182      var solvers = new ItemSet<StringValue>(
183        solversStr.Select(s => new StringValue(s).AsReadOnly())
184        );
[16251]185      Parameters.Add(new ConstrainedValueParameter<StringValue>(OdeSolverParameterName, "The solver to use for solving the initial value ODE problems", solvers, solvers.First()));
[16250]186
[15964]187      RegisterEventHandlers();
[15968]188      InitAllParameters();
[16152]189
[16215]190      // TODO: use training range as default training episode
[16398]191      // TODO: optimization of starting values for latent variables in CVODES solver
[16601]192      // TODO: allow to specify the name for the time variable in the dataset and allow variable step-sizes
[15964]193    }
194
[15968]195    public override double Evaluate(Individual individual, IRandom random) {
196      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
197
[16601]198      var problemData = ProblemData;
[16399]199      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
200      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
[16215]201      if (OptimizeParametersForEpisodes) {
[16601]202        throw new NotImplementedException();
[16215]203        int eIdx = 0;
[16155]204        double totalNMSE = 0.0;
205        int totalSize = 0;
[16215]206        foreach (var episode in TrainingEpisodes) {
[16602]207          // double[] optTheta;
208          double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
209          // individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
[16155]210          eIdx++;
211          totalNMSE += nmse * episode.Size;
212          totalSize += episode.Size;
213        }
214        return totalNMSE / totalSize;
215      } else {
[16602]216        // double[] optTheta;
217        double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
218        // individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
[16155]219        return nmse;
220      }
221    }
222
[16602]223    public static double OptimizeForEpisodes(
[16250]224      ISymbolicExpressionTree[] trees,
[16399]225      IRegressionProblemData problemData,
226      string[] targetVars,
227      string[] latentVariables,
[16250]228      IRandom random,
229      IEnumerable<IntRange> episodes,
[16399]230      int maxParameterOptIterations,
231      int numericIntegrationSteps,
[16602]232      string odeSolver) {
[15970]233
[16660]234      // extract constants from trees (without trees for latent variables)
235      var targetVariableTrees = trees.Take(targetVars.Length).ToArray();
236      var latentVariableTrees = trees.Skip(targetVars.Length).ToArray();
237      var constantNodes = targetVariableTrees.Select(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().ToArray()).ToArray();
[16602]238      var initialTheta = constantNodes.Select(nodes => nodes.Select(n => n.Value).ToArray()).ToArray();
[16600]239
[16601]240      // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t)
[16602]241      double nmse = PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxParameterOptIterations,
242        initialTheta, out double[] pretunedParameters);
[15964]243
[16660]244      // extend parameter vector to include parameters for latent variable trees
245      pretunedParameters = pretunedParameters
246        .Concat(latentVariableTrees
247        .SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().Select(n => n.Value)))
248        .ToArray();
249
[16601]250      // optimize parameters using integration of f(x,y) to calculate y(t)
[16602]251      nmse = OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,
252        out double[] optTheta);
[16616]253      // var optTheta = pretunedParameters;
[16601]254
[16602]255      if (double.IsNaN(nmse) ||
256        double.IsInfinity(nmse) ||
257        nmse > 100 * trees.Length * episodes.Sum(ep => ep.Size))
258        return 100 * trees.Length * episodes.Sum(ep => ep.Size);
259
260      // update tree nodes with optimized values
261      var paramIdx = 0;
262      for (var treeIdx = 0; treeIdx < constantNodes.Length; treeIdx++) {
263        for (int i = 0; i < constantNodes[treeIdx].Length; i++)
264          constantNodes[treeIdx][i].Value = optTheta[paramIdx++];
265      }
266      return nmse;
[16601]267    }
268
269    private static double PreTuneParameters(
270      ISymbolicExpressionTree[] trees,
271      IRegressionProblemData problemData,
272      string[] targetVars,
273      string[] latentVariables,
274      IRandom random,
275      IEnumerable<IntRange> episodes,
276      int maxParameterOptIterations,
[16602]277      double[][] initialTheta,
[16601]278      out double[] optTheta) {
279      var thetas = new List<double>();
280      double nmse = 0.0;
[16602]281      var maxTreeNmse = 100 * episodes.Sum(ep => ep.Size);
282
[16660]283      var targetTrees = trees.Take(targetVars.Length).ToArray();
284      var latentTrees = trees.Take(latentVariables.Length).ToArray();
285
286      {
287        // first calculate values of latent variables by integration
288        var inputVariables = targetVars.Concat(latentTrees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))).Except(latentVariables).Distinct();
289        var myState = new OptimizationData(latentTrees, targetVars, inputVariables.ToArray(), problemData, null, episodes.ToArray(), 10, latentVariables, "HeuristicLab");
290
291        var fi = new double[myState.rows.Length * targetVars.Length];
292        var jac = new double[myState.rows.Length * targetVars.Length, myState.nodeValueLookup.ParameterCount];
293        var latentValues = new double[myState.rows.Length, latentVariables.Length];
294        Integrate(myState, fi, jac, latentValues);
295
296        // add integrated latent variables to dataset
297        var modifiedDataset = ((Dataset)problemData.Dataset).ToModifiable();
298        foreach (var variable in latentVariables) {
299          modifiedDataset.AddVariable(variable, Enumerable.Repeat(0.0, modifiedDataset.Rows).ToList()); // empty column
300        }
301        int predIdx = 0;
302        foreach (var ep in episodes) {
303          for (int r = ep.Start; r < ep.End; r++) {
304            for (int latVarIdx = 0; latVarIdx < latentVariables.Length; latVarIdx++) {
305              modifiedDataset.SetVariableValue(latentValues[predIdx, latVarIdx], latentVariables[latVarIdx], r);
306            }
307            predIdx++;
308          }
309        }
310
311        problemData = new RegressionProblemData(modifiedDataset, problemData.AllowedInputVariables, problemData.TargetVariable);
312      }
[16251]313      // NOTE: the order of values in parameter matches prefix order of constant nodes in trees
[16660]314      for (int treeIdx = 0; treeIdx < targetTrees.Length; treeIdx++) {
315        var t = targetTrees[treeIdx];
[16601]316
[16610]317        var targetValuesDiff = new List<double>();
318        foreach (var ep in episodes) {
319          var episodeRows = Enumerable.Range(ep.Start, ep.Size);
320          var targetValues = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], episodeRows).ToArray();
321          targetValuesDiff.AddRange(targetValues.Skip(1).Zip(targetValues, (t1, t0) => t1 - t0));// TODO: smoothing or multi-pole);
322        }
323        var adjustedEpisodes = episodes.Select(ep => new IntRange(ep.Start, ep.End - 1)); // because we lose the last row in the differencing step
[16653]324
325        // data for input variables is assumed to be known
326        // input variables in pretuning are all target variables and all variable names that occur in the tree
[16660]327        var inputVariables = targetVars.Concat(t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName)).Distinct();
[16653]328
[16610]329        var myState = new OptimizationData(new[] { t },
330          targetVars,
[16653]331          inputVariables.ToArray(),
[16610]332          problemData, new[] { targetValuesDiff.ToArray() }, adjustedEpisodes.ToArray(), -99, latentVariables, string.Empty); // TODO
[16601]333        var paramCount = myState.nodeValueLookup.ParameterCount;
334
335        optTheta = new double[0];
[16602]336        if (initialTheta[treeIdx].Length > 0) {
337          try {
338            alglib.minlmstate state;
339            alglib.minlmreport report;
340            var p = new double[initialTheta[treeIdx].Length];
[16616]341            var lowerBounds = Enumerable.Repeat(-1000.0, p.Length).ToArray();
342            var upperBounds = Enumerable.Repeat(1000.0, p.Length).ToArray();
[16602]343            Array.Copy(initialTheta[treeIdx], p, p.Length);
[16610]344            alglib.minlmcreatevj(targetValuesDiff.Count, p, out state);
[16602]345            alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
346            alglib.minlmsetbc(state, lowerBounds, upperBounds);
[16616]347#if DEBUG
348            //alglib.minlmsetgradientcheck(state, 1.0e-7);
349#endif
[16602]350            alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState);
[16601]351
[16602]352            alglib.minlmresults(state, out optTheta, out report);
[16616]353            if (report.terminationtype < 0) {
354#if DEBUG
355              if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
356#endif
357              optTheta = initialTheta[treeIdx];
358            }
[16602]359          } catch (alglib.alglibexception) {
360            optTheta = initialTheta[treeIdx];
361          }
362        }
363        var tree_nmse = EvaluateMSE(optTheta, myState);
364        if (double.IsNaN(tree_nmse) || double.IsInfinity(tree_nmse) || tree_nmse > maxTreeNmse) {
365          nmse += maxTreeNmse;
366          thetas.AddRange(initialTheta[treeIdx]);
367        } else {
368          nmse += tree_nmse;
[16601]369          thetas.AddRange(optTheta);
[15968]370        }
[16601]371      } // foreach tree
372      optTheta = thetas.ToArray();
373
374      return nmse;
375    }
376
377
378    // similar to above but this time we integrate and optimize all parameters for all targets concurrently
379    private static double OptimizeParameters(ISymbolicExpressionTree[] trees, IRegressionProblemData problemData, string[] targetVars, string[] latentVariables,
380      IEnumerable<IntRange> episodes, int maxParameterOptIterations, double[] initialTheta, int numericIntegrationSteps, string odeSolver, out double[] optTheta) {
[16610]381      var rowsForDataExtraction = episodes.SelectMany(e => Enumerable.Range(e.Start, e.Size)).ToArray();
[16660]382      var targetValues = new double[targetVars.Length][];
383      for (int treeIdx = 0; treeIdx < targetVars.Length; treeIdx++) {
[16601]384        var t = trees[treeIdx];
385
386        targetValues[treeIdx] = problemData.Dataset.GetDoubleValues(targetVars[treeIdx], rowsForDataExtraction).ToArray();
[15964]387      }
388
[16653]389      // data for input variables is assumed to be known
390      // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
[16660]391      var inputVariables = trees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
392        .Except(targetVars)
393        .Except(latentVariables)
394        .Distinct();
[16653]395
396      var myState = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver);
[16601]397      optTheta = initialTheta;
[16250]398
[16601]399      if (initialTheta.Length > 0) {
[16616]400        var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray();
401        var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray();
[16601]402        try {
403          alglib.minlmstate state;
404          alglib.minlmreport report;
405          alglib.minlmcreatevj(rowsForDataExtraction.Length * trees.Length, initialTheta, out state);
[16602]406          alglib.minlmsetbc(state, lowerBounds, upperBounds);
[16601]407          alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
[16616]408#if DEBUG         
409          //alglib.minlmsetgradientcheck(state, 1.0e-7);
410#endif
[16601]411          alglib.minlmoptimize(state, IntegrateAndEvaluateObjectiveVector, IntegrateAndEvaluateObjectiveVectorAndJacobian, null, myState);
[15964]412
[16601]413          alglib.minlmresults(state, out optTheta, out report);
[15964]414
[16601]415          if (report.terminationtype < 0) {
[16616]416#if DEBUG
417            if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!");
418#endif            // there was a problem: reset theta and evaluate for inital values
[16601]419            optTheta = initialTheta;
420          }
421        } catch (alglib.alglibexception) {
422          optTheta = initialTheta;
423        }
[15964]424      }
[16601]425      var nmse = EvaluateIntegratedMSE(optTheta, myState);
426      var maxNmse = 100 * targetValues.Length * rowsForDataExtraction.Length;
427      if (double.IsNaN(nmse) || double.IsInfinity(nmse) || nmse > maxNmse) nmse = maxNmse;
428      return nmse;
[16599]429    }
[15964]430
[16599]431
[16601]432    // helper
433    public static double EvaluateMSE(double[] x, OptimizationData optimizationData) {
434      var fi = new double[optimizationData.rows.Count()];
435      EvaluateObjectiveVector(x, fi, optimizationData);
436      return fi.Sum(fii => fii * fii) / fi.Length;
437    }
[16600]438    public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
439    public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
[16603]440      var rows = optimizationData.rows;
[16601]441      var problemData = optimizationData.problemData;
442      var nodeValueLookup = optimizationData.nodeValueLookup;
443      var ds = problemData.Dataset;
[16610]444      var variables = optimizationData.variables;
[16601]445
446      nodeValueLookup.UpdateParamValues(x);
447
[16610]448      int outputIdx = 0;
[16601]449      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
450        // update variable values
[16610]451        foreach (var variable in variables) {
[16653]452          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
453          if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
454            nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
455          } else {
456            nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
457          }
[16601]458        }
459        // interpret all trees
460        for (int treeIdx = 0; treeIdx < optimizationData.trees.Length; treeIdx++) {
461          var tree = optimizationData.trees[treeIdx];
462          var pred = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup);
463          var y = optimizationData.targetValues[treeIdx][trainIdx];
[16616]464          fi[outputIdx++] = (y - pred) * optimizationData.inverseStandardDeviation[treeIdx];
[16601]465        }
466      }
[15964]467    }
468
[16600]469    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
470    public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
[16601]471      // extract variable values from dataset
472      var variableValues = new Dictionary<string, Tuple<double, Vector>>();
473      var problemData = optimizationData.problemData;
474      var ds = problemData.Dataset;
[16603]475      var rows = optimizationData.rows;
[16610]476      var variables = optimizationData.variables;
[15964]477
[16601]478      var nodeValueLookup = optimizationData.nodeValueLookup;
479      nodeValueLookup.UpdateParamValues(x);
[15964]480
[16601]481      int termIdx = 0;
[15968]482
[16601]483      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
484        // update variable values
[16610]485        foreach (var variable in variables) {
[16653]486          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
487          if (double.TryParse(variable, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
488            nodeValueLookup.SetVariableValue(variable, value); // TODO: Perf we don't need to set this for each index
489          } else {
490            nodeValueLookup.SetVariableValue(variable, ds.GetDoubleValue(variable, rows[trainIdx])); // TODO: perf
491          }
[16601]492        }
[16599]493
[16601]494        var calculatedVariables = optimizationData.targetVariables;
495
496        var trees = optimizationData.trees;
497        for (int i = 0; i < trees.Length; i++) {
498          var tree = trees[i];
499          var targetVarName = calculatedVariables[i];
500
501          double f; Vector g;
502          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup, out f, out g);
503
504          var y = optimizationData.targetValues[i][trainIdx];
[16603]505          fi[termIdx] = (y - f) * optimizationData.inverseStandardDeviation[i]; // scale of NMSE
[16604]506          if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[termIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i];
[16601]507
508          termIdx++;
509        }
[16251]510      }
[16250]511
[16601]512    }
[15968]513
[16601]514    // helper
515    public static double EvaluateIntegratedMSE(double[] x, OptimizationData optimizationData) {
516      var fi = new double[optimizationData.rows.Count() * optimizationData.targetVariables.Length];
517      IntegrateAndEvaluateObjectiveVector(x, fi, optimizationData);
518      return fi.Sum(fii => fii * fii) / fi.Length;
519    }
520    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { IntegrateAndEvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib
521    public static void IntegrateAndEvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) {
522      IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData);
523    }
[16597]524
[16601]525    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { IntegrateAndEvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib
526    public static void IntegrateAndEvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) {
527      var rows = optimizationData.rows.ToArray();
528      var problemData = optimizationData.problemData;
529      var nodeValueLookup = optimizationData.nodeValueLookup;
530      var ds = problemData.Dataset;
531      int outputIdx = 0;
[16597]532
[16603]533      nodeValueLookup.UpdateParamValues(x);
534
[16660]535      Integrate(optimizationData, fi, jac, null);
[16601]536      var trees = optimizationData.trees;
[15970]537
[16604]538      // update result with error
[16601]539      for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) {
[16660]540        for (int i = 0; i < optimizationData.targetVariables.Length; i++) {
[16601]541          var tree = trees[i];
542          var y = optimizationData.targetValues[i][trainIdx];
[16604]543          fi[outputIdx] = (y - fi[outputIdx]) * optimizationData.inverseStandardDeviation[i];  // scale for normalized squared error
544          if (jac != null) for (int j = 0; j < x.Length; j++) jac[outputIdx, j] = -jac[outputIdx, j] * optimizationData.inverseStandardDeviation[i];
[16601]545          outputIdx++;
[15968]546        }
[15964]547      }
548    }
549
[15968]550    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
551      base.Analyze(individuals, qualities, results, random);
[15964]552
[16215]553      if (!results.ContainsKey("Prediction (training)")) {
[15968]554        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
555      }
[16215]556      if (!results.ContainsKey("Prediction (test)")) {
[15968]557        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
558      }
[16215]559      if (!results.ContainsKey("Models")) {
[16153]560        results.Add(new Result("Models", typeof(VariableCollection)));
[15968]561      }
[16399]562      if (!results.ContainsKey("SNMSE")) {
[16398]563        results.Add(new Result("SNMSE", typeof(DoubleValue)));
564      }
[16399]565      if (!results.ContainsKey("Solution")) {
566        results.Add(new Result("Solution", typeof(Solution)));
567      }
[16597]568      if (!results.ContainsKey("Squared error and gradient")) {
569        results.Add(new Result("Squared error and gradient", typeof(DataTable)));
570      }
[15968]571
572      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
573      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
[16155]574
[16398]575      results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2);
576
[16601]577      var problemData = ProblemData;
[16268]578      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
[15970]579      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
[15968]580
581      var trainingList = new ItemList<DataTable>();
582
[16215]583      if (OptimizeParametersForEpisodes) {
[16602]584        throw new NotSupportedException();
[16155]585        var eIdx = 0;
586        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
[16215]587        foreach (var episode in TrainingEpisodes) {
[16155]588          var episodes = new[] { episode };
[16610]589          var optimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver);
[16603]590          var trainingPrediction = Integrate(optimizationData).ToArray();
[16155]591          trainingPredictions.Add(trainingPrediction);
592          eIdx++;
593        }
[15968]594
[16329]595        // only for target values
[16155]596        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
[16215]597        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
[16155]598          var targetVar = targetVars[colIdx];
599          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
600          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
601          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
602          trainingDataTable.Rows.Add(actualValuesRow);
603          trainingDataTable.Rows.Add(predictedValuesRow);
604          trainingList.Add(trainingDataTable);
605        }
606        results["Prediction (training)"].Value = trainingList.AsReadOnly();
[15968]607
608
[16155]609        var models = new VariableCollection();
[16126]610
[16215]611        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
[16155]612          var targetVarName = tup.Item1;
613          var tree = tup.Item2;
[16126]614
[16155]615          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
616          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
617          models.Add(origTreeVar);
618        }
619        results["Models"].Value = models;
620      } else {
[16653]621        // data for input variables is assumed to be known
622        // input variables are all variable names that occur in the trees except for target variables (we assume that trees have been generated correctly)
[16660]623        var inputVariables = trees
624          .SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
625          .Except(targetVars)
626          .Except(latentVariables)
627          .Distinct();
[16653]628
629        var optimizationData = new OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver);
[16660]630        var numParams = optimizationData.nodeValueLookup.ParameterCount;
[16603]631
[16660]632        var fi = new double[optimizationData.rows.Length * targetVars.Length];
633        var jac = new double[optimizationData.rows.Length * targetVars.Length, numParams];
634        var latentValues = new double[optimizationData.rows.Length, latentVariables.Length];
635        Integrate(optimizationData, fi, jac, latentValues);
[16603]636
[16660]637
[16329]638        // for target values and latent variables
[16610]639        var trainingRows = optimizationData.rows;
[16329]640        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
641          // is target variable
642          if (colIdx < targetVars.Length) {
643            var targetVar = targetVars[colIdx];
644            var trainingDataTable = new DataTable(targetVar + " prediction (training)");
645            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
[16660]646            var idx = Enumerable.Range(0, trainingRows.Length).Select(i => i * targetVars.Length + colIdx);
647            var pred = idx.Select(i => fi[i]);
648            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, pred.ToArray());
[16329]649            trainingDataTable.Rows.Add(actualValuesRow);
650            trainingDataTable.Rows.Add(predictedValuesRow);
[16597]651
[16603]652            for (int paramIdx = 0; paramIdx < numParams; paramIdx++) {
[16660]653              var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", idx.Select(i => jac[i, paramIdx]).ToArray());
[16597]654              paramSensitivityRow.VisualProperties.SecondYAxis = true;
655              trainingDataTable.Rows.Add(paramSensitivityRow);
656            }
[16329]657            trainingList.Add(trainingDataTable);
658          } else {
659            var latentVar = latentVariables[colIdx - targetVars.Length];
660            var trainingDataTable = new DataTable(latentVar + " prediction (training)");
[16660]661            var idx = Enumerable.Range(0, trainingRows.Length);
662            var pred = idx.Select(i => latentValues[i, colIdx - targetVars.Length]);
663            var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, pred.ToArray());
[16329]664            var emptyRow = new DataRow(latentVar);
665            trainingDataTable.Rows.Add(emptyRow);
666            trainingDataTable.Rows.Add(predictedValuesRow);
667            trainingList.Add(trainingDataTable);
668          }
[16155]669        }
[16597]670
671        var errorTable = new DataTable("Squared error and gradient");
672        var seRow = new DataRow("Squared error");
[16603]673        var gradientRows = Enumerable.Range(0, numParams).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray();
[16597]674        errorTable.Rows.Add(seRow);
675        foreach (var gRow in gradientRows) {
676          gRow.VisualProperties.SecondYAxis = true;
677          errorTable.Rows.Add(gRow);
678        }
679        var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray();
680        int r = 0;
[16610]681
[16660]682        // foreach (var y_pred in trainingPrediction) {
683        //   // calculate objective function gradient
684        //   double f_i = 0.0;
685        //   Vector g_i = Vector.CreateNew(new double[numParams]);
686        //   for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
687        //     var y_pred_f = y_pred[colIdx].Item1;
688        //     var y = targetValues[colIdx][r];
689        //
690        //     var res = (y - y_pred_f) * optimizationData.inverseStandardDeviation[colIdx];
691        //     var ressq = res * res;
692        //     f_i += ressq;
693        //     g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res));
694        //   }
695        //   seRow.Values.Add(f_i);
696        //   for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]);
697        //   r++;
698        // }
699        // results["Squared error and gradient"].Value = errorTable;
[16597]700
[16155]701        // TODO: DRY for training and test
702        var testList = new ItemList<DataTable>();
703        var testRows = ProblemData.TestIndices.ToArray();
[16610]704        var testOptimizationData = new OptimizationData(trees, targetVars, problemData.AllowedInputVariables.ToArray(), problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver);
[16603]705        var testPrediction = Integrate(testOptimizationData).ToArray();
[16126]706
[16329]707        for (int colIdx = 0; colIdx < trees.Length; colIdx++) {
708          // is target variable
709          if (colIdx < targetVars.Length) {
710            var targetVar = targetVars[colIdx];
711            var testDataTable = new DataTable(targetVar + " prediction (test)");
712            var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
713            var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
714            testDataTable.Rows.Add(actualValuesRow);
715            testDataTable.Rows.Add(predictedValuesRow);
716            testList.Add(testDataTable);
717
718          } else {
[16660]719            // var latentVar = latentVariables[colIdx - targetVars.Length];
720            // var testDataTable = new DataTable(latentVar + " prediction (test)");
721            // var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
722            // var emptyRow = new DataRow(latentVar);
723            // testDataTable.Rows.Add(emptyRow);
724            // testDataTable.Rows.Add(predictedValuesRow);
725            // testList.Add(testDataTable);
[16329]726          }
[16155]727        }
[16126]728
[16155]729        results["Prediction (training)"].Value = trainingList.AsReadOnly();
730        results["Prediction (test)"].Value = testList.AsReadOnly();
[16399]731
732
[16155]733        #region simplification of models
734        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
735        var models = new VariableCollection();    // to store target var names and original version of tree
[16126]736
[16603]737        var clonedTrees = new List<ISymbolicExpressionTree>();
[16329]738        for (int idx = 0; idx < trees.Length; idx++) {
[16604]739          clonedTrees.Add((ISymbolicExpressionTree)trees[idx].Clone());
[16399]740        }
741        var ds = problemData.Dataset;
[16603]742        var newProblemData = new RegressionProblemData((IDataset)ds.Clone(), problemData.AllowedInputVariables, problemData.TargetVariable);
743        results["Solution"].Value = new Solution(clonedTrees.ToArray(),
[16399]744                   // optTheta,
745                   newProblemData,
746                   targetVars,
747                   latentVariables,
748                   TrainingEpisodes,
749                   OdeSolver,
750                   NumericIntegrationSteps);
751
752
753        for (int idx = 0; idx < trees.Length; idx++) {
[16329]754          var varName = string.Empty;
755          if (idx < targetVars.Length) {
756            varName = targetVars[idx];
757          } else {
758            varName = latentVariables[idx - targetVars.Length];
759          }
760          var tree = trees[idx];
[16153]761
[16329]762          var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)");
[16155]763          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
764          models.Add(origTreeVar);
[16329]765          var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)");
[16602]766          simplifiedTreeVar.Value = TreeSimplifier.Simplify(tree);
[16155]767          models.Add(simplifiedTreeVar);
768
769        }
[16399]770
[16155]771        results["Models"].Value = models;
772        #endregion
[16126]773      }
[15968]774    }
775
776    #region interpretation
[16222]777
778    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
779    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
780
781    // a comparison of three potential calculation methods for the gradient is given in:
782    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
783    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
784    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
785
786    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
[16253]787
[16603]788    public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData) {
[16604]789      var nTargets = optimizationData.targetVariables.Length;
790      var n = optimizationData.rows.Length * optimizationData.targetVariables.Length;
791      var d = optimizationData.nodeValueLookup.ParameterCount;
792      double[] fi = new double[n];
793      double[,] jac = new double[n, d];
[16660]794      Integrate(optimizationData, fi, jac, null);
[16610]795      for (int i = 0; i < optimizationData.rows.Length; i++) {
[16604]796        var res = new Tuple<double, Vector>[nTargets];
[16610]797        for (int j = 0; j < nTargets; j++) {
[16604]798          res[j] = Tuple.Create(fi[i * nTargets + j], Vector.CreateFromMatrixRow(jac, i * nTargets + j));
799        }
800        yield return res;
801      }
802    }
[15964]803
[16660]804    public static void Integrate(OptimizationData optimizationData, double[] fi, double[,] jac, double[,] latentValues) {
[16600]805      var trees = optimizationData.trees;
806      var dataset = optimizationData.problemData.Dataset;
[16653]807      var inputVariables = optimizationData.variables;
[16600]808      var targetVariables = optimizationData.targetVariables;
809      var latentVariables = optimizationData.latentVariables;
810      var episodes = optimizationData.episodes;
811      var odeSolver = optimizationData.odeSolver;
812      var numericIntegrationSteps = optimizationData.numericIntegrationSteps;
[16601]813      var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding
[16600]814
[16610]815
816
[16601]817      var nodeValues = optimizationData.nodeValueLookup;
818
[16250]819      // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver
[16610]820      var outputRowIdx = 0;
[16329]821      var episodeIdx = 0;
[16600]822      foreach (var episode in optimizationData.episodes) {
[16601]823        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start).ToArray();
[15968]824
[16601]825        var t0 = rows.First();
[15964]826
[16601]827        // initialize values for inputs and targets from dataset
[16604]828        foreach (var varName in inputVariables) {
[16653]829          // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
830          if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
831            nodeValues.SetVariableValue(varName, value, Vector.Zero);
832          } else {
833            var y0 = dataset.GetDoubleValue(varName, t0);
834            nodeValues.SetVariableValue(varName, y0, Vector.Zero);
835          }
[16153]836        }
[16610]837        foreach (var varName in targetVariables) {
[16604]838          var y0 = dataset.GetDoubleValue(varName, t0);
839          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
[16601]840
[16604]841          // output starting value
842          fi[outputRowIdx] = y0;
843          Vector.Zero.CopyTo(jac, outputRowIdx);
844
845          outputRowIdx++;
846        }
847
[16660]848        var latentValueRowIdx = 0;
849        var latentValueColIdx = 0;
850        foreach (var varName in latentVariables) {
851          var y0 = 0.0; // assume we start at zero
852          nodeValues.SetVariableValue(varName, y0, Vector.Zero);
853
854          if (latentValues != null) {
855            latentValues[latentValueRowIdx, latentValueColIdx++] = y0;
856          }
857        }
858        latentValueColIdx = 0; latentValueRowIdx++;
859
[16603]860        { // CODE BELOW DOESN'T WORK ANYMORE
861          // if (latentVariables.Length > 0) throw new NotImplementedException();
862          //
863          // // add value entries for latent variables which are also integrated
864          // // initial values are at the end of the parameter vector
865          // // separate initial values for each episode
866          // var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length;
867          // foreach (var latentVar in latentVariables) {
868          //   var arr = new double[parameterValues.Length]; // backing array
869          //   arr[initialValueIdx] = 1.0;
870          //   var g = new Vector(arr);
871          //   nodeValues.SetVariableValue(latentVar, parameterValues[initialValueIdx], g); // we don't have observations for latent variables therefore we optimize the initial value for each episode
872          //   initialValueIdx++;
873          // }
[16153]874        }
[16329]875
[16601]876        var prevT = t0; // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements.
[16215]877        foreach (var t in rows.Skip(1)) {
[16250]878          if (odeSolver == "HeuristicLab")
[16601]879            IntegrateHL(trees, calculatedVariables, nodeValues, numericIntegrationSteps); // integrator updates nodeValues
[16250]880          else if (odeSolver == "CVODES")
[16597]881            throw new NotImplementedException();
882          // IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT);
[16250]883          else throw new InvalidOperationException("Unknown ODE solver " + odeSolver);
[16253]884          prevT = t;
[15964]885
[16660]886          // update output for target variables (TODO: if we want to visualize the latent variables then we need to provide a separate output)
887          for (int i = 0; i < targetVariables.Length; i++) {
888            var targetVar = targetVariables[i];
[16604]889            var yt = nodeValues.GetVariableValue(targetVar);
[16601]890
[16604]891            // fill up remaining rows with last valid value if there are invalid values
892            if (double.IsNaN(yt.Item1) || double.IsInfinity(yt.Item1)) {
893              for (; outputRowIdx < fi.Length; outputRowIdx++) {
[16660]894                var prevIdx = outputRowIdx - targetVariables.Length;
[16604]895                fi[outputRowIdx] = fi[prevIdx]; // current <- prev
896                if (jac != null) for (int j = 0; j < jac.GetLength(1); j++) jac[outputRowIdx, j] = jac[prevIdx, j];
897              }
898              return;
899            };
[16601]900
[16604]901            fi[outputRowIdx] = yt.Item1;
902            var g = yt.Item2;
903            g.CopyTo(jac, outputRowIdx);
904            outputRowIdx++;
905          }
[16660]906          if (latentValues != null) {
907            foreach (var latentVariable in latentVariables) {
908              var lt = nodeValues.GetVariableValue(latentVariable).Item1;
909              latentValues[latentValueRowIdx, latentValueColIdx++] = lt;
910            }
911            latentValueRowIdx++; latentValueColIdx = 0;
912          }
[15964]913
[16601]914          // update for next time step (only the inputs)
[16215]915          foreach (var varName in inputVariables) {
[16653]916            // in this problem we also allow fixed numeric parameters (represented as variables with the value as name)
917            if (double.TryParse(varName, NumberStyles.Float, CultureInfo.InvariantCulture, out double value)) {
918              // value is unchanged
919            } else {
920              nodeValues.SetVariableValue(varName, dataset.GetDoubleValue(varName, t), Vector.Zero);
921            }
[16153]922          }
[15964]923        }
[16329]924        episodeIdx++;
[15964]925      }
926    }
927
[16398]928    #region CVODES
[16253]929
[16597]930    /*
[16253]931    /// <summary>
932    ///  Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization
933    /// </summary>
934    /// <param name="trees">Each equation in the ODE represented as a tree</param>
935    /// <param name="calculatedVariables">The names of the calculated variables</param>
936    /// <param name="variableValues">The start values of the calculated variables as well as their sensitivites over parameters</param>
937    /// <param name="parameterValues">The current parameter values</param>
938    /// <param name="t">The time t up to which we need to integrate.</param>
[16250]939    private static void IntegrateCVODES(
[16251]940      ISymbolicExpressionTree[] trees, // f(y,p) in tree representation
941      string[] calculatedVariables, // names of elements of y
942      Dictionary<string, Tuple<double, Vector>> variableValues,  //  y (intput and output) input: y(t0), output: y(t0+t)
943      double[] parameterValues, // p
944      double t // duration t for which we want to integrate
[16250]945      ) {
[16251]946
[16250]947      // the RHS of the ODE
[16251]948      // dy/dt = f(y_t,x_t,p)
949      CVODES.CVRhsFunc f = CreateOdeRhs(trees, calculatedVariables, parameterValues);
950      // the Jacobian ∂f/∂y
951      CVODES.CVDlsJacFunc jac = CreateJac(trees, calculatedVariables, parameterValues);
952
953      // the RHS for the forward sensitivities (∂f/∂y)s_i(t) + ∂f/∂p_i
954      CVODES.CVSensRhsFn sensF = CreateSensitivityRhs(trees, calculatedVariables, parameterValues);
955
956      // setup solver
957      int numberOfEquations = trees.Length;
958      IntPtr y = IntPtr.Zero;
959      IntPtr cvode_mem = IntPtr.Zero;
960      IntPtr A = IntPtr.Zero;
961      IntPtr yS0 = IntPtr.Zero;
962      IntPtr linearSolver = IntPtr.Zero;
963      var ns = parameterValues.Length; // number of parameters
964
965      try {
966        y = CVODES.N_VNew_Serial(numberOfEquations);
967        // init y to current values of variables
968        // y must be initialized before calling CVodeInit
969        for (int i = 0; i < calculatedVariables.Length; i++) {
970          CVODES.NV_Set_Ith_S(y, i, variableValues[calculatedVariables[i]].Item1);
971        }
972
973        cvode_mem = CVODES.CVodeCreate(CVODES.MultistepMethod.CV_ADAMS, CVODES.NonlinearSolverIteration.CV_FUNCTIONAL);
974
975        var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y);
[16616]976        Assert(CVODES.CV_SUCCESS == flag);
[16251]977
978        double relTol = 1.0e-2;
979        double absTol = 1.0;
980        flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol);  // TODO: probably need to adjust absTol per variable
[16616]981        Assert(CVODES.CV_SUCCESS == flag);
[16251]982
983        A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations);
[16616]984        Assert(A != IntPtr.Zero);
[16251]985
986        linearSolver = CVODES.SUNDenseLinearSolver(y, A);
[16616]987        Assert(linearSolver != IntPtr.Zero);
[16251]988
989        flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A);
[16616]990        Assert(CVODES.CV_SUCCESS == flag);
[16251]991
992        flag = CVODES.CVDlsSetJacFn(cvode_mem, jac);
[16616]993        Assert(CVODES.CV_SUCCESS == flag);
[16251]994
995        yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter
996        unsafe {
997          // set to initial sensitivities supplied by caller
998          for (int pIdx = 0; pIdx < ns; pIdx++) {
999            var yS0_i = *((IntPtr*)yS0.ToPointer() + pIdx);
1000            for (var varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1001              CVODES.NV_Set_Ith_S(yS0_i, varIdx, variableValues[calculatedVariables[varIdx]].Item2[pIdx]);
1002            }
1003          }
1004        }
1005
1006        flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0);
[16616]1007        Assert(CVODES.CV_SUCCESS == flag);
[16251]1008
1009        flag = CVODES.CVodeSensEEtolerances(cvode_mem);
[16616]1010        Assert(CVODES.CV_SUCCESS == flag);
[16251]1011
1012        // make one forward integration step
1013        double tout = 0.0; // first output time
1014        flag = CVODES.CVode(cvode_mem, t, y, ref tout, CVODES.CV_NORMAL);
1015        if (flag == CVODES.CV_SUCCESS) {
[16616]1016          Assert(t == tout);
[16251]1017
1018          // get sensitivities
1019          flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0);
[16616]1020          Assert(CVODES.CV_SUCCESS == flag);
[16251]1021
1022          // update variableValues based on integration results
1023          for (int varIdx = 0; varIdx < calculatedVariables.Length; varIdx++) {
1024            var yi = CVODES.NV_Get_Ith_S(y, varIdx);
1025            var gArr = new double[parameterValues.Length];
1026            for (var pIdx = 0; pIdx < parameterValues.Length; pIdx++) {
1027              unsafe {
1028                var yS0_pi = *((IntPtr*)yS0.ToPointer() + pIdx);
1029                gArr[pIdx] = CVODES.NV_Get_Ith_S(yS0_pi, varIdx);
1030              }
1031            }
1032            variableValues[calculatedVariables[varIdx]] = Tuple.Create(yi, new Vector(gArr));
1033          }
1034        } else {
1035          variableValues.Clear();   // indicate problems by not returning new values
1036        }
1037
1038        // cleanup all allocated objects
1039      } finally {
1040        if (y != IntPtr.Zero) CVODES.N_VDestroy_Serial(y);
[16253]1041        if (cvode_mem != IntPtr.Zero) CVODES.CVodeFree(ref cvode_mem);
[16251]1042        if (linearSolver != IntPtr.Zero) CVODES.SUNLinSolFree(linearSolver);
1043        if (A != IntPtr.Zero) CVODES.SUNMatDestroy(A);
1044        if (yS0 != IntPtr.Zero) CVODES.N_VDestroyVectorArray_Serial(yS0, ns);
1045      }
[16250]1046    }
1047
[16251]1048
[16250]1049    private static CVODES.CVRhsFunc CreateOdeRhs(
1050      ISymbolicExpressionTree[] trees,
1051      string[] calculatedVariables,
1052      double[] parameterValues) {
[16398]1053      // we don't need to calculate a gradient here
[16250]1054      return (double t,
1055              IntPtr y, // N_Vector, current value of y (input)
1056              IntPtr ydot, // N_Vector (calculated value of y' (output)
1057              IntPtr user_data // optional user data, (unused here)
1058              ) => {
[16251]1059                // TODO: perf
1060                var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1061
1062                int pIdx = 0;
1063                foreach (var tree in trees) {
1064                  foreach (var n in tree.IterateNodesPrefix()) {
1065                    if (IsConstantNode(n)) {
1066                      nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we do not need a gradient
1067                      pIdx++;
1068                    } else if (n.SubtreeCount == 0) {
1069                      // for variables and latent variables get the value from variableValues
1070                      var varName = n.Symbol.Name;
1071                      var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
[16268]1072                      if (varIdx < 0) throw new InvalidProgramException();
[16251]1073                      var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1074                      nodeValues.Add(n, Tuple.Create(y_i, Vector.Zero)); // no gradient needed
1075                    }
1076                  }
[16250]1077                }
1078                for (int i = 0; i < trees.Length; i++) {
1079                  var tree = trees[i];
[16251]1080                  var res_i = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
[16250]1081                  CVODES.NV_Set_Ith_S(ydot, i, res_i.Item1);
1082                }
1083                return 0;
1084              };
1085    }
1086
[16251]1087    private static CVODES.CVDlsJacFunc CreateJac(
1088      ISymbolicExpressionTree[] trees,
[16250]1089      string[] calculatedVariables,
[16251]1090      double[] parameterValues) {
1091
1092      return (
1093        double t, // current time (input)
1094        IntPtr y, // N_Vector, current value of y (input)
1095        IntPtr fy, // N_Vector, current value of f (input)
1096        IntPtr Jac, // SUNMatrix ∂f/∂y (output, rows i contains are ∂f_i/∂y vector)
1097        IntPtr user_data, // optional (unused here)
1098        IntPtr tmp1, // N_Vector, optional (unused here)
1099        IntPtr tmp2, // N_Vector, optional (unused here)
1100        IntPtr tmp3 // N_Vector, optional (unused here)
1101      ) => {
1102        // here we need to calculate partial derivatives for the calculated variables y
1103        var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1104        int pIdx = 0;
1105        foreach (var tree in trees) {
1106          foreach (var n in tree.IterateNodesPrefix()) {
1107            if (IsConstantNode(n)) {
1108              nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], Vector.Zero)); // here we need a gradient over y which is zero for parameters
1109              pIdx++;
1110            } else if (n.SubtreeCount == 0) {
1111              // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1112              var varName = n.Symbol.Name;
1113              var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
[16268]1114              if (varIdx < 0) throw new InvalidProgramException();
1115
[16251]1116              var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1117              var gArr = new double[CVODES.NV_LENGTH_S(y)]; // backing array
1118              gArr[varIdx] = 1.0;
1119              var g = new Vector(gArr);
1120              nodeValues.Add(n, Tuple.Create(y_i, g));
1121            }
1122          }
1123        }
1124
1125        for (int i = 0; i < trees.Length; i++) {
1126          var tree = trees[i];
1127          var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1128          var g = res.Item2;
1129          for (int j = 0; j < calculatedVariables.Length; j++) {
1130            CVODES.SUNDenseMatrix_Set(Jac, i, j, g[j]);
1131          }
1132        }
1133        return 0; // on success
1134      };
1135    }
1136
1137
1138    // to calculate sensitivities RHS for all equations at once
1139    // must compute (∂f/∂y)s_i(t) + ∂f/∂p_i and store in ySdot.
1140    // Index i refers to parameters, dimensionality of matrix and vectors is number of equations
1141    private static CVODES.CVSensRhsFn CreateSensitivityRhs(ISymbolicExpressionTree[] trees, string[] calculatedVariables, double[] parameterValues) {
1142      return (
1143              int Ns, // number of parameters
1144              double t, // current time
1145              IntPtr y, // N_Vector y(t) (input)
1146              IntPtr ydot, // N_Vector dy/dt(t) (input)
1147              IntPtr yS, // N_Vector*, one vector for each parameter (input)
1148              IntPtr ySdot, // N_Vector*, one vector for each parameter (output)
1149              IntPtr user_data, // optional (unused here)
1150              IntPtr tmp1, // N_Vector, optional (unused here)
1151              IntPtr tmp2 // N_Vector, optional (unused here)
1152        ) => {
1153          // here we need to calculate partial derivatives for the calculated variables y as well as for the parameters
1154          var nodeValues = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1155          var d = calculatedVariables.Length + parameterValues.Length; // dimensionality of gradient
1156          // first collect variable values
1157          foreach (var tree in trees) {
1158            foreach (var n in tree.IterateNodesPrefix()) {
1159              if (IsVariableNode(n)) {
1160                // for variables and latent variables we use values supplied in y and init gradient vectors accordingly
1161                var varName = n.Symbol.Name;
1162                var varIdx = Array.IndexOf(calculatedVariables, varName); // TODO: perf!
[16268]1163                if (varIdx < 0) throw new InvalidProgramException();
1164
[16251]1165                var y_i = CVODES.NV_Get_Ith_S(y, (long)varIdx);
1166                var gArr = new double[d]; // backing array
1167                gArr[varIdx] = 1.0;
1168                var g = new Vector(gArr);
1169                nodeValues.Add(n, Tuple.Create(y_i, g));
1170              }
1171            }
1172          }
1173          // then collect constants
1174          int pIdx = 0;
1175          foreach (var tree in trees) {
1176            foreach (var n in tree.IterateNodesPrefix()) {
1177              if (IsConstantNode(n)) {
1178                var gArr = new double[d];
1179                gArr[calculatedVariables.Length + pIdx] = 1.0;
1180                var g = new Vector(gArr);
1181                nodeValues.Add(n, Tuple.Create(parameterValues[pIdx], g));
1182                pIdx++;
1183              }
1184            }
1185          }
1186          // gradient vector is [∂f/∂y_1, ∂f/∂y_2, ... ∂f/∂yN, ∂f/∂p_1 ... ∂f/∂p_K]
1187
1188
1189          for (pIdx = 0; pIdx < Ns; pIdx++) {
1190            unsafe {
1191              var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1192              CVODES.N_VConst_Serial(0.0, sDot_pi);
1193            }
1194          }
1195
1196          for (int i = 0; i < trees.Length; i++) {
1197            var tree = trees[i];
1198            var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues);
1199            var g = res.Item2;
1200
1201
1202            // update ySdot = (∂f/∂y)s_i(t) + ∂f/∂p_i
1203
1204            for (pIdx = 0; pIdx < Ns; pIdx++) {
1205              unsafe {
1206                var sDot_pi = *((IntPtr*)ySdot.ToPointer() + pIdx);
1207                var s_pi = *((IntPtr*)yS.ToPointer() + pIdx);
1208
1209                var v = CVODES.NV_Get_Ith_S(sDot_pi, i);
1210                // (∂f/∂y)s_i(t)
1211                var p = 0.0;
1212                for (int yIdx = 0; yIdx < calculatedVariables.Length; yIdx++) {
1213                  p += g[yIdx] * CVODES.NV_Get_Ith_S(s_pi, yIdx);
1214                }
1215                // + ∂f/∂p_i
1216                CVODES.NV_Set_Ith_S(sDot_pi, i, v + p + g[calculatedVariables.Length + pIdx]);
1217              }
1218            }
1219
1220          }
1221          return 0; // on success
1222        };
1223    }
[16597]1224    */
[16398]1225    #endregion
[16251]1226
1227    private static void IntegrateHL(
1228      ISymbolicExpressionTree[] trees,
1229      string[] calculatedVariables, // names of integrated variables
[16601]1230      NodeValueLookup nodeValues,
[16250]1231      int numericIntegrationSteps) {
[16251]1232
1233
[16597]1234      double[] deltaF = new double[calculatedVariables.Length];
1235      Vector[] deltaG = new Vector[calculatedVariables.Length];
[16251]1236
[16250]1237      double h = 1.0 / numericIntegrationSteps;
1238      for (int step = 0; step < numericIntegrationSteps; step++) {
[16601]1239
1240        // evaluate all trees
[16251]1241        for (int i = 0; i < trees.Length; i++) {
1242          var tree = trees[i];
1243
1244          // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol
[16597]1245          double f; Vector g;
1246          InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g);
1247          deltaF[i] = f;
1248          deltaG[i] = g;
[16250]1249        }
1250
[16251]1251        // update variableValues for next step, trapezoid integration
[16597]1252        for (int i = 0; i < trees.Length; i++) {
1253          var varName = calculatedVariables[i];
[16601]1254          var oldVal = nodeValues.GetVariableValue(varName);
[16604]1255          nodeValues.SetVariableValue(varName, oldVal.Item1 + h * deltaF[i], oldVal.Item2.Add(deltaG[i].Scale(h)));
[16250]1256        }
[16601]1257      }
1258    }
[16398]1259
[16602]1260    // TODO: use an existing interpreter implementation instead
[16601]1261    private static double InterpretRec(ISymbolicExpressionTreeNode node, NodeValueLookup nodeValues) {
[16603]1262      if (node is ConstantTreeNode) {
1263        return ((ConstantTreeNode)node).Value;
[16604]1264      } else if (node is VariableTreeNode) {
[16602]1265        return nodeValues.NodeValue(node);
1266      } else if (node.Symbol is Addition) {
1267        var f = InterpretRec(node.GetSubtree(0), nodeValues);
[16652]1268        for (int i = 1; i < node.SubtreeCount; i++) {
1269          f += InterpretRec(node.GetSubtree(i), nodeValues);
1270        }
1271        return f;
[16602]1272      } else if (node.Symbol is Multiplication) {
1273        var f = InterpretRec(node.GetSubtree(0), nodeValues);
[16652]1274        for (int i = 1; i < node.SubtreeCount; i++) {
1275          f *= InterpretRec(node.GetSubtree(i), nodeValues);
1276        }
1277        return f;
[16602]1278      } else if (node.Symbol is Subtraction) {
1279        if (node.SubtreeCount == 1) {
[16652]1280          return -InterpretRec(node.GetSubtree(0), nodeValues);
[16602]1281        } else {
1282          var f = InterpretRec(node.GetSubtree(0), nodeValues);
[16652]1283          for (int i = 1; i < node.SubtreeCount; i++) {
1284            f -= InterpretRec(node.GetSubtree(i), nodeValues);
1285          }
1286          return f;
[16602]1287        }
1288      } else if (node.Symbol is Division) {
[16610]1289        if (node.SubtreeCount == 1) {
1290          var f = InterpretRec(node.GetSubtree(0), nodeValues);
1291          // protected division
1292          if (f.IsAlmost(0.0)) {
1293            return 0;
1294          } else {
1295            return 1.0 / f;
1296          }
[16602]1297        } else {
[16610]1298          var f = InterpretRec(node.GetSubtree(0), nodeValues);
[16652]1299          for (int i = 1; i < node.SubtreeCount; i++) {
1300            var g = InterpretRec(node.GetSubtree(i), nodeValues);
1301            // protected division
1302            if (g.IsAlmost(0.0)) {
1303              return 0;
1304            } else {
1305              f /= g;
1306            }
[16610]1307          }
[16652]1308          return f;
[16602]1309        }
1310      } else if (node.Symbol is Sine) {
[16616]1311        Assert(node.SubtreeCount == 1);
[16610]1312
[16602]1313        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1314        return Math.Sin(f);
1315      } else if (node.Symbol is Cosine) {
[16616]1316        Assert(node.SubtreeCount == 1);
[16610]1317
[16602]1318        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1319        return Math.Cos(f);
1320      } else if (node.Symbol is Square) {
[16616]1321        Assert(node.SubtreeCount == 1);
[16610]1322
[16602]1323        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1324        return f * f;
[16610]1325      } else if (node.Symbol is Exponential) {
[16616]1326        Assert(node.SubtreeCount == 1);
[16610]1327
1328        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1329        return Math.Exp(f);
1330      } else if (node.Symbol is Logarithm) {
[16616]1331        Assert(node.SubtreeCount == 1);
[16610]1332
1333        var f = InterpretRec(node.GetSubtree(0), nodeValues);
1334        return Math.Log(f);
[16602]1335      } else throw new NotSupportedException("unsupported symbol");
[16250]1336    }
1337
[16616]1338    private static void Assert(bool cond) {
1339#if DEBUG
1340      if (!cond) throw new InvalidOperationException("Assertion failed");
1341#endif
1342    }
1343
[16597]1344    private static void InterpretRec(
[15964]1345      ISymbolicExpressionTreeNode node,
[16601]1346       NodeValueLookup nodeValues,      // contains value and gradient vector for a node (variables and constants only)
[16600]1347      out double z,
1348      out Vector dz
[16597]1349      ) {
[16600]1350      double f, g;
1351      Vector df, dg;
[16602]1352      if (node.Symbol is Constant || node.Symbol is Variable) {
1353        z = nodeValues.NodeValue(node);
[16604]1354        dz = Vector.CreateNew(nodeValues.NodeGradient(node)); // original gradient vectors are never changed by evaluation
[16602]1355      } else if (node.Symbol is Addition) {
1356        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
[16652]1357        for (int i = 1; i < node.SubtreeCount; i++) {
1358          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1359          f = f + g;
1360          df = df.Add(dg);
1361        }
1362        z = f;
1363        dz = df;
[16602]1364      } else if (node.Symbol is Multiplication) {
1365        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
[16652]1366        for (int i = 1; i < node.SubtreeCount; i++) {
1367          InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1368          f = f * g;
1369          df = df.Scale(g).Add(dg.Scale(f));  // f'*g + f*g'
1370        }
1371        z = f;
1372        dz = df;
[16602]1373      } else if (node.Symbol is Subtraction) {
1374        if (node.SubtreeCount == 1) {
1375          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1376          z = -f;
[16604]1377          dz = df.Scale(-1.0);
[16602]1378        } else {
1379          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
[16652]1380          for (int i = 1; i < node.SubtreeCount; i++) {
1381            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1382            f = f - g;
1383            df = df.Subtract(dg);
1384          }
1385          z = f;
1386          dz = df;
[16602]1387        }
1388      } else if (node.Symbol is Division) {
[16610]1389        if (node.SubtreeCount == 1) {
1390          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1391          // protected division
1392          if (f.IsAlmost(0.0)) {
1393            z = 0;
1394            dz = Vector.Zero;
1395          } else {
1396            z = 1.0 / f;
[16652]1397            dz = df.Scale(-1 * z * z);
[16610]1398          }
[16602]1399        } else {
[16610]1400          InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
[16652]1401          for (int i = 1; i < node.SubtreeCount; i++) {
1402            InterpretRec(node.GetSubtree(i), nodeValues, out g, out dg);
1403            // protected division
1404            if (g.IsAlmost(0.0)) {
1405              z = 0;
1406              dz = Vector.Zero;
1407              return;
1408            } else {
1409              var inv_g = 1.0 / g;
1410              f = f * inv_g;
1411              df = dg.Scale(-f * inv_g * inv_g).Add(df.Scale(inv_g));
1412            }
[16610]1413          }
[16652]1414          z = f;
1415          dz = df;
[16602]1416        }
1417      } else if (node.Symbol is Sine) {
[16616]1418        Assert(node.SubtreeCount == 1);
[16602]1419        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1420        z = Math.Sin(f);
[16604]1421        dz = df.Scale(Math.Cos(f));
[16602]1422      } else if (node.Symbol is Cosine) {
[16616]1423        Assert(node.SubtreeCount == 1);
[16602]1424        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1425        z = Math.Cos(f);
[16604]1426        dz = df.Scale(-Math.Sin(f));
[16602]1427      } else if (node.Symbol is Square) {
[16616]1428        Assert(node.SubtreeCount == 1);
[16602]1429        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1430        z = f * f;
[16604]1431        dz = df.Scale(2.0 * f);
[16610]1432      } else if (node.Symbol is Exponential) {
[16616]1433        Assert(node.SubtreeCount == 1);
[16610]1434        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1435        z = Math.Exp(f);
1436        dz = df.Scale(Math.Exp(f));
1437      } else if (node.Symbol is Logarithm) {
[16616]1438        Assert(node.SubtreeCount == 1);
[16610]1439        InterpretRec(node.GetSubtree(0), nodeValues, out f, out df);
1440        z = Math.Log(f);
1441        dz = df.Scale(1.0 / f);
[16602]1442      } else {
1443        throw new NotSupportedException("unsupported symbol");
[15964]1444      }
1445    }
[16602]1446
[15968]1447    #endregion
[15964]1448
1449    #region events
[15968]1450    /*
1451     * Dependencies between parameters:
1452     *
1453     * ProblemData
1454     *    |
1455     *    V
[15970]1456     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables
1457     *               |   |                 |                   |
1458     *               V   V                 |                   |
1459     *             Grammar <---------------+-------------------
[15968]1460     *                |
1461     *                V
1462     *            Encoding
1463     */
[15964]1464    private void RegisterEventHandlers() {
[15968]1465      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
[16215]1466      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
[15968]1467
1468      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
[16215]1469      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
[15968]1470
1471      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
[16215]1472      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
[15968]1473
1474      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
[15970]1475
1476      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
[15964]1477    }
1478
[15970]1479    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1480      UpdateGrammarAndEncoding();
1481    }
1482
[15968]1483    private void MaximumLengthChanged(object sender, EventArgs e) {
1484      UpdateGrammarAndEncoding();
1485    }
1486
1487    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1488      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1489    }
1490
[16268]1491    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
[15968]1492      UpdateGrammarAndEncoding();
1493    }
1494
1495    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1496      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1497    }
1498
[16268]1499    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<IndexedItem<StringValue>> e) {
[15968]1500      UpdateGrammarAndEncoding();
1501    }
1502
[15964]1503    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
[15968]1504      ProblemDataParameter.Value.Changed += ProblemData_Changed;
[15964]1505      OnProblemDataChanged();
1506      OnReset();
1507    }
1508
1509    private void ProblemData_Changed(object sender, EventArgs e) {
[15968]1510      OnProblemDataChanged();
[15964]1511      OnReset();
1512    }
1513
1514    private void OnProblemDataChanged() {
[15968]1515      UpdateTargetVariables();        // implicitly updates other dependent parameters
[15964]1516      var handler = ProblemDataChanged;
[16215]1517      if (handler != null) handler(this, EventArgs.Empty);
[15964]1518    }
1519
[15968]1520    #endregion
1521
1522    #region  helper
1523
[16660]1524    private static IEnumerable<T> EveryNth<T>(IEnumerable<T> xs, int step) {
1525      var e = xs.GetEnumerator();
1526      while (e.MoveNext()) {
1527        for (int i = 0; i < step; i++) {
1528          if (!e.MoveNext()) yield break;
1529        }
1530        yield return e.Current;
1531      }
1532    }
1533
[15968]1534    private void InitAllParameters() {
[16602]1535      UpdateTargetVariables(); // implicitly updates the grammar and the encoding
[15968]1536    }
1537
[16268]1538    private ReadOnlyCheckedItemList<StringValue> CreateFunctionSet() {
1539      var l = new CheckedItemList<StringValue>();
[16602]1540      l.Add(new StringValue("Addition").AsReadOnly());
1541      l.Add(new StringValue("Multiplication").AsReadOnly());
1542      l.Add(new StringValue("Division").AsReadOnly());
1543      l.Add(new StringValue("Subtraction").AsReadOnly());
1544      l.Add(new StringValue("Sine").AsReadOnly());
1545      l.Add(new StringValue("Cosine").AsReadOnly());
1546      l.Add(new StringValue("Square").AsReadOnly());
[15968]1547      return l.AsReadOnly();
1548    }
1549
1550    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
[16602]1551      // return n.Symbol.Name[0] == 'θ';
1552      return n is ConstantTreeNode;
[15968]1553    }
[16601]1554    private static double GetConstantValue(ISymbolicExpressionTreeNode n) {
[16602]1555      return ((ConstantTreeNode)n).Value;
[16601]1556    }
[15970]1557    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
[16399]1558      return n.Symbol.Name[0] == 'λ';
[15970]1559    }
[16251]1560    private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
1561      return (n.SubtreeCount == 0) && !IsConstantNode(n) && !IsLatentVariableNode(n);
1562    }
[16601]1563    private static string GetVariableName(ISymbolicExpressionTreeNode n) {
[16602]1564      return ((VariableTreeNode)n).VariableName;
[16601]1565    }
[15968]1566
1567    private void UpdateTargetVariables() {
[16268]1568      var currentlySelectedVariables = TargetVariables.CheckedItems
1569        .OrderBy(i => i.Index)
1570        .Select(i => i.Value.Value)
1571        .ToArray();
[15968]1572
[16268]1573      var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
[15968]1574      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
[16597]1575      foreach (var item in newVariablesList) {
1576        if (currentlySelectedVariables.Contains(item.Value)) {
1577          newVariablesList.SetItemCheckedState(item, true);
1578        } else {
1579          newVariablesList.SetItemCheckedState(item, false);
1580        }
[15968]1581      }
1582      TargetVariablesParameter.Value = newVariablesList;
1583    }
1584
1585    private void UpdateGrammarAndEncoding() {
1586      var encoding = new MultiEncoding();
1587      var g = CreateGrammar();
[16215]1588      foreach (var targetVar in TargetVariables.CheckedItems) {
[16603]1589        var e = new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength);
1590        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1591        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1592        // make sure our multi-manipulator is the only manipulator
1593        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
[16616]1594
1595        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1596        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1597        foreach (var xover in subtreeCrossovers) {
1598          xover.CrossoverProbability.Value = 0.3;
1599        }
1600
[16603]1601        encoding = encoding.Add(e); // only limit by length
[15968]1602      }
[16215]1603      for (int i = 1; i <= NumberOfLatentVariables; i++) {
[16603]1604        var e = new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength);
1605        var multiManipulator = e.Operators.Where(op => op is MultiSymbolicExpressionTreeManipulator).First();
1606        var filteredOperators = e.Operators.Where(op => !(op is IManipulator)).ToArray();
1607        // make sure our multi-manipulator is the only manipulator
1608        e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators);
[16616]1609
1610        // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time
1611        var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>();
1612        foreach (var xover in subtreeCrossovers) {
1613          xover.CrossoverProbability.Value = 0.3;
1614        }
1615
[16603]1616        encoding = encoding.Add(e);
[15970]1617      }
[15968]1618      Encoding = encoding;
1619    }
1620
1621    private ISymbolicExpressionGrammar CreateGrammar() {
[16602]1622      var grammar = new TypeCoherentExpressionGrammar();
1623      grammar.StartGrammarManipulation();
1624
1625      var problemData = ProblemData;
1626      var ds = problemData.Dataset;
1627      grammar.MaximumFunctionArguments = 0;
1628      grammar.MaximumFunctionDefinitions = 0;
1629      var allowedVariables = problemData.AllowedInputVariables.Concat(TargetVariables.CheckedItems.Select(chk => chk.Value.Value));
1630      foreach (var varSymbol in grammar.Symbols.OfType<HeuristicLab.Problems.DataAnalysis.Symbolic.VariableBase>()) {
1631        if (!varSymbol.Fixed) {
1632          varSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<double>(x));
1633          varSymbol.VariableNames = allowedVariables.Where(x => ds.VariableHasType<double>(x));
1634        }
[16597]1635      }
[16602]1636      foreach (var factorSymbol in grammar.Symbols.OfType<BinaryFactorVariable>()) {
1637        if (!factorSymbol.Fixed) {
1638          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1639          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1640          factorSymbol.VariableValues = factorSymbol.VariableNames
1641            .ToDictionary(varName => varName, varName => ds.GetStringValues(varName).Distinct().ToList());
1642        }
[16597]1643      }
[16602]1644      foreach (var factorSymbol in grammar.Symbols.OfType<FactorVariable>()) {
1645        if (!factorSymbol.Fixed) {
1646          factorSymbol.AllVariableNames = problemData.InputVariables.Select(x => x.Value).Where(x => ds.VariableHasType<string>(x));
1647          factorSymbol.VariableNames = problemData.AllowedInputVariables.Where(x => ds.VariableHasType<string>(x));
1648          factorSymbol.VariableValues = factorSymbol.VariableNames
1649            .ToDictionary(varName => varName,
1650            varName => ds.GetStringValues(varName).Distinct()
1651            .Select((n, i) => Tuple.Create(n, i))
1652            .ToDictionary(tup => tup.Item1, tup => tup.Item2));
1653        }
[15964]1654      }
[15970]1655
[16602]1656      grammar.ConfigureAsDefaultRegressionGrammar();
1657      grammar.GetSymbol("Logarithm").Enabled = false; // not supported yet
1658      grammar.GetSymbol("Exponential").Enabled = false; // not supported yet
[15970]1659
[16602]1660      // configure initialization of constants
1661      var constSy = (Constant)grammar.GetSymbol("Constant");
1662      // max and min are only relevant for initialization
1663      constSy.MaxValue = +1.0e-1; // small initial values for constant opt
1664      constSy.MinValue = -1.0e-1;
1665      constSy.MultiplicativeManipulatorSigma = 1.0; // allow large jumps for manipulation
1666      constSy.ManipulatorMu = 0.0;
1667      constSy.ManipulatorSigma = 1.0; // allow large jumps
[15968]1668
[16602]1669      // configure initialization of variables
1670      var varSy = (Variable)grammar.GetSymbol("Variable");
1671      // fix variable weights to 1.0
1672      varSy.WeightMu = 1.0;
1673      varSy.WeightSigma = 0.0;
1674      varSy.WeightManipulatorMu = 0.0;
1675      varSy.WeightManipulatorSigma = 0.0;
1676      varSy.MultiplicativeWeightManipulatorSigma = 0.0;
[16251]1677
[16602]1678      foreach (var f in FunctionSet) {
1679        grammar.GetSymbol(f.Value).Enabled = FunctionSet.ItemChecked(f);
[16399]1680      }
1681
[16602]1682      grammar.FinishedGrammarManipulation();
1683      return grammar;
1684      // // whenever ProblemData is changed we create a new grammar with the necessary symbols
1685      // var g = new SimpleSymbolicExpressionGrammar();
1686      // var unaryFunc = new string[] { "sin", "cos", "sqr" };
1687      // var binaryFunc = new string[] { "+", "-", "*", "%" };
1688      // foreach (var func in unaryFunc) {
1689      //   if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 1, 1);
1690      // }
1691      // foreach (var func in binaryFunc) {
1692      //   if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 2, 2);
1693      // }
1694      //
1695      // foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value.Value)))
1696      //   g.AddTerminalSymbol(variableName);
1697      //
1698      // // generate symbols for numeric parameters for which the value is optimized using AutoDiff
1699      // // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
1700      // var numericConstantsFactor = 2.0;
1701      // for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
1702      //   g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
1703      // }
1704      //
1705      // // generate symbols for latent variables
1706      // for (int i = 1; i <= NumberOfLatentVariables; i++) {
1707      //   g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
1708      // }
1709      //
1710      // return g;
[16251]1711    }
[15964]1712    #endregion
1713
[16601]1714
[15964]1715    #region Import & Export
1716    public void Load(IRegressionProblemData data) {
1717      Name = data.Name;
1718      Description = data.Description;
1719      ProblemData = data;
1720    }
1721
1722    public IRegressionProblemData Export() {
1723      return ProblemData;
1724    }
[16601]1725    #endregion
[16600]1726
[16601]1727
1728    // TODO: for integration we only need a part of the data that we need for optimization
1729
[16600]1730    public class OptimizationData {
1731      public readonly ISymbolicExpressionTree[] trees;
1732      public readonly string[] targetVariables;
1733      public readonly IRegressionProblemData problemData;
[16601]1734      public readonly double[][] targetValues;
[16603]1735      public readonly double[] inverseStandardDeviation;
[16600]1736      public readonly IntRange[] episodes;
1737      public readonly int numericIntegrationSteps;
1738      public readonly string[] latentVariables;
1739      public readonly string odeSolver;
[16601]1740      public readonly NodeValueLookup nodeValueLookup;
[16603]1741      public readonly int[] rows;
[16610]1742      internal readonly string[] variables;
[16600]1743
[16610]1744      public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, string[] inputVariables,
1745        IRegressionProblemData problemData,
[16601]1746        double[][] targetValues,
1747        IntRange[] episodes,
1748        int numericIntegrationSteps, string[] latentVariables, string odeSolver) {
[16600]1749        this.trees = trees;
1750        this.targetVariables = targetVars;
1751        this.problemData = problemData;
1752        this.targetValues = targetValues;
[16610]1753        this.variables = inputVariables;
[16616]1754        if (targetValues != null) {
1755          this.inverseStandardDeviation = new double[targetValues.Length];
1756          for (int i = 0; i < targetValues.Length; i++) {
1757            // calculate variance for each episode separately and calc the average
1758            var epStartIdx = 0;
1759            var stdevs = new List<double>();
1760            foreach (var ep in episodes) {
1761              var epValues = targetValues[i].Skip(epStartIdx).Take(ep.Size);
1762              stdevs.Add(epValues.StandardDeviation());
1763              epStartIdx += ep.Size;
1764            }
1765            inverseStandardDeviation[i] = 1.0 / stdevs.Average();
1766          }
1767        } else
1768          this.inverseStandardDeviation = Enumerable.Repeat(1.0, trees.Length).ToArray();
[16600]1769        this.episodes = episodes;
1770        this.numericIntegrationSteps = numericIntegrationSteps;
1771        this.latentVariables = latentVariables;
1772        this.odeSolver = odeSolver;
[16601]1773        this.nodeValueLookup = new NodeValueLookup(trees);
[16604]1774        this.rows = episodes.SelectMany(ep => Enumerable.Range(ep.Start, ep.Size)).ToArray();
[16600]1775      }
1776    }
[15964]1777
[16601]1778    public class NodeValueLookup {
1779      private readonly Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> node2val = new Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>>();
1780      private readonly Dictionary<string, List<ISymbolicExpressionTreeNode>> name2nodes = new Dictionary<string, List<ISymbolicExpressionTreeNode>>();
[16603]1781      private readonly ConstantTreeNode[] constantNodes;
1782      private readonly Vector[] constantGradientVectors;
[16601]1783
[16603]1784      // private readonly Dictionary<int, ISymbolicExpressionTreeNode> paramIdx2node = new Dictionary<int, ISymbolicExpressionTreeNode>();
1785
[16601]1786      public double NodeValue(ISymbolicExpressionTreeNode node) => node2val[node].Item1;
1787      public Vector NodeGradient(ISymbolicExpressionTreeNode node) => node2val[node].Item2;
1788
1789      public NodeValueLookup(ISymbolicExpressionTree[] trees) {
1790
[16603]1791        this.constantNodes = trees.SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>()).ToArray();
1792        constantGradientVectors = new Vector[constantNodes.Length];
[16604]1793        for (int paramIdx = 0; paramIdx < constantNodes.Length; paramIdx++) {
[16603]1794          constantGradientVectors[paramIdx] = Vector.CreateIndicator(length: constantNodes.Length, idx: paramIdx);
1795
1796          var node = constantNodes[paramIdx];
1797          node2val[node] = Tuple.Create(node.Value, constantGradientVectors[paramIdx]);
[16601]1798        }
1799
1800        foreach (var tree in trees) {
1801          foreach (var node in tree.IterateNodesPrefix().Where(IsVariableNode)) {
1802            var varName = GetVariableName(node);
1803            if (!name2nodes.TryGetValue(varName, out List<ISymbolicExpressionTreeNode> nodes)) {
1804              nodes = new List<ISymbolicExpressionTreeNode>();
1805              name2nodes.Add(varName, nodes);
1806            }
1807            nodes.Add(node);
[16602]1808            SetVariableValue(varName, 0.0);  // this value is updated in the prediction loop
[16601]1809          }
1810        }
1811      }
1812
[16603]1813      public int ParameterCount => constantNodes.Length;
[16601]1814
1815      public void SetVariableValue(string variableName, double val) {
1816        SetVariableValue(variableName, val, Vector.Zero);
1817      }
1818      public Tuple<double, Vector> GetVariableValue(string variableName) {
1819        return node2val[name2nodes[variableName].First()];
1820      }
1821      public void SetVariableValue(string variableName, double val, Vector dVal) {
1822        if (name2nodes.TryGetValue(variableName, out List<ISymbolicExpressionTreeNode> nodes)) {
1823          nodes.ForEach(n => node2val[n] = Tuple.Create(val, dVal));
1824        } else {
[16602]1825          var fakeNode = new VariableTreeNode(new Variable());
[16610]1826          fakeNode.Weight = 1.0;
1827          fakeNode.VariableName = variableName;
[16601]1828          var newNodeList = new List<ISymbolicExpressionTreeNode>();
1829          newNodeList.Add(fakeNode);
1830          name2nodes.Add(variableName, newNodeList);
1831          node2val[fakeNode] = Tuple.Create(val, dVal);
1832        }
1833      }
1834
1835      internal void UpdateParamValues(double[] x) {
[16603]1836        for (int i = 0; i < x.Length; i++) {
1837          constantNodes[i].Value = x[i];
1838          node2val[constantNodes[i]] = Tuple.Create(x[i], constantGradientVectors[i]);
[16601]1839        }
1840      }
1841    }
[15964]1842  }
1843}
Note: See TracBrowser for help on using the repository browser.