Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs @ 16225

Last change on this file since 16225 was 16225, checked in by gkronber, 6 years ago

#2925: first working version of CVODES integration

File size: 55.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Diagnostics;
25using System.Linq;
26using System.Runtime.InteropServices;
27using HeuristicLab.Analysis;
28using HeuristicLab.Collections;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
33using HeuristicLab.Optimization;
34using HeuristicLab.Parameters;
35using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
36using HeuristicLab.Problems.DataAnalysis;
37using HeuristicLab.Problems.DataAnalysis.Symbolic;
38using HeuristicLab.Problems.Instances;
39using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
40
41namespace HeuristicLab.Problems.DynamicalSystemsModelling {
42
43  public class Vector {
44    public readonly static Vector Zero = new Vector(new double[0]);
45
46    public static Vector operator +(Vector a, Vector b) {
47      if (a == Zero) return b;
48      if (b == Zero) return a;
49      Debug.Assert(a.arr.Length == b.arr.Length);
50      var res = new double[a.arr.Length];
51      for (int i = 0; i < res.Length; i++)
52        res[i] = a.arr[i] + b.arr[i];
53      return new Vector(res);
54    }
55    public static Vector operator -(Vector a, Vector b) {
56      if (b == Zero) return a;
57      if (a == Zero) return -b;
58      Debug.Assert(a.arr.Length == b.arr.Length);
59      var res = new double[a.arr.Length];
60      for (int i = 0; i < res.Length; i++)
61        res[i] = a.arr[i] - b.arr[i];
62      return new Vector(res);
63    }
64    public static Vector operator -(Vector v) {
65      if (v == Zero) return Zero;
66      for (int i = 0; i < v.arr.Length; i++)
67        v.arr[i] = -v.arr[i];
68      return v;
69    }
70
71    public static Vector operator *(double s, Vector v) {
72      if (v == Zero) return Zero;
73      if (s == 0.0) return Zero;
74      var res = new double[v.arr.Length];
75      for (int i = 0; i < res.Length; i++)
76        res[i] = s * v.arr[i];
77      return new Vector(res);
78    }
79
80    public static Vector operator *(Vector v, double s) {
81      return s * v;
82    }
83    public static Vector operator *(Vector u, Vector v) {
84      if (v == Zero) return Zero;
85      if (u == Zero) return Zero;
86      var res = new double[v.arr.Length];
87      for (int i = 0; i < res.Length; i++)
88        res[i] = u.arr[i] * v.arr[i];
89      return new Vector(res);
90    }
91    public static Vector operator /(double s, Vector v) {
92      if (s == 0.0) return Zero;
93      if (v == Zero) throw new ArgumentException("Division by zero vector");
94      var res = new double[v.arr.Length];
95      for (int i = 0; i < res.Length; i++)
96        res[i] = 1.0 / v.arr[i];
97      return new Vector(res);
98    }
99    public static Vector operator /(Vector v, double s) {
100      return v * 1.0 / s;
101    }
102
103    public static Vector Sin(Vector s) {
104      var res = new double[s.arr.Length];
105      for (int i = 0; i < res.Length; i++) res[i] = Math.Sin(s.arr[i]);
106      return new Vector(res);
107    }
108    public static Vector Cos(Vector s) {
109      var res = new double[s.arr.Length];
110      for (int i = 0; i < res.Length; i++) res[i] = Math.Cos(s.arr[i]);
111      return new Vector(res);
112    }
113
114    private readonly double[] arr; // backing array;
115
116    public Vector(double[] v) {
117      this.arr = v;
118    }
119
120    public void CopyTo(double[] target) {
121      Debug.Assert(arr.Length <= target.Length);
122      Array.Copy(arr, target, arr.Length);
123    }
124  }
125
126  [Item("Dynamical Systems Modelling Problem", "TODO")]
127  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
128  [StorableClass]
129  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
130
131    // CVODES types
132    public enum MultistepMethod : int { CV_ADAMS = 1, CV_BDF = 2 };
133    public enum NonlinearSolverIteration : int { CV_NEWTON = 1, CV_FUNCTIONAL = 2 };
134
135
136    /* itask */
137    public const int CV_NORMAL = 1;
138    public const int CV_ONE_STEP = 2;
139
140    /* ism */
141    public const int CV_SIMULTANEOUS = 1;
142    public const int CV_STAGGERED = 2;
143    public const int CV_STAGGERED1 = 3;
144
145    /* DQtype */
146    public const int CV_CENTERED = 1;
147    public const int CV_FORWARD = 2;
148
149    /* interp */
150    public const int CV_HERMITE = 1;
151    public const int CV_POLYNOMIAL = 2;
152
153    /*
154     * ----------------------------------------
155     * CVODES return flags
156     * ----------------------------------------
157     */
158
159    public const int CV_SUCCESS = 0;
160    public const int CV_TSTOP_RETURN = 1;
161    public const int CV_ROOT_RETURN = 2;
162
163    public const int CV_WARNING = 99;
164
165    public const int CV_TOO_MUCH_WORK = -1;
166    public const int CV_TOO_MUCH_ACC = -2;
167    public const int CV_ERR_FAILURE = -3;
168    public const int CV_CONV_FAILURE = -4;
169
170    public const int CV_LINIT_FAIL = -5;
171    public const int CV_LSETUP_FAIL = -6;
172    public const int CV_LSOLVE_FAIL = -7;
173    public const int CV_RHSFUNC_FAIL = -8;
174    public const int CV_FIRST_RHSFUNC_ERR = -9;
175    public const int CV_REPTD_RHSFUNC_ERR = -10;
176    public const int CV_UNREC_RHSFUNC_ERR = -11;
177    public const int CV_RTFUNC_FAIL = -12;
178    public const int CV_CONSTR_FAIL = -13;
179
180    public const int CV_MEM_FAIL = -20;
181    public const int CV_MEM_NULL = -21;
182    public const int CV_ILL_INPUT = -22;
183    public const int CV_NO_MALLOC = -23;
184    public const int CV_BAD_K = -24;
185    public const int CV_BAD_T = -25;
186    public const int CV_BAD_DKY = -26;
187    public const int CV_TOO_CLOSE = -27;
188
189    public const int CV_NO_QUAD = -30;
190    public const int CV_QRHSFUNC_FAIL = -31;
191    public const int CV_FIRST_QRHSFUNC_ERR = -32;
192    public const int CV_REPTD_QRHSFUNC_ERR = -33;
193    public const int CV_UNREC_QRHSFUNC_ERR = -34;
194
195    public const int CV_NO_SENS = -40;
196    public const int CV_SRHSFUNC_FAIL = -41;
197    public const int CV_FIRST_SRHSFUNC_ERR = -42;
198    public const int CV_REPTD_SRHSFUNC_ERR = -43;
199    public const int CV_UNREC_SRHSFUNC_ERR = -44;
200
201    public const int CV_BAD_IS = -45;
202
203    public const int CV_NO_QUADSENS = -50;
204    public const int CV_QSRHSFUNC_FAIL = -51;
205    public const int CV_FIRST_QSRHSFUNC_ERR = -52;
206    public const int CV_REPTD_QSRHSFUNC_ERR = -53;
207    public const int CV_UNREC_QSRHSFUNC_ERR = -54;
208
209    /*
210     * ----------------------------------------
211     * CVODEA return flags
212     * ----------------------------------------
213     */
214
215    public const int CV_NO_ADJ = -101;
216    public const int CV_NO_FWD = -102;
217    public const int CV_NO_BCK = -103;
218    public const int CV_BAD_TB0 = -104;
219    public const int CV_REIFWD_FAIL = -105;
220    public const int CV_FWD_FAIL = -106;
221    public const int CV_GETY_BADT = -107;
222
223    [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
224    public delegate int CVRhsFunc(
225        double t, // realtype
226        IntPtr y, // N_Vector
227        IntPtr ydot, // N_Vector
228        IntPtr user_data
229      );
230
231    [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
232    public delegate int CVDlsJacFunc(
233      double t,
234      IntPtr y, // N_Vector
235      IntPtr fy, // N_Vector
236      IntPtr Jac, // SUNMatrix
237      IntPtr user_data,
238      IntPtr tmp1, // N_Vector
239      IntPtr tmp2, // N_Vector
240      IntPtr tmp3 // N_Vector
241      );
242
243
244    [DllImport("sundials_cvodes.dll", EntryPoint = "CVodeCreate", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
245    // returns a void* to the cvodes memory block if successful otherwise NULL
246    public static extern IntPtr CVodeCreate(MultistepMethod lmm, NonlinearSolverIteration iter);
247
248    [DllImport("sundials_cvodes.dll", EntryPoint = "CVodeInit", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
249
250    public static extern int CVodeInit(
251      IntPtr cvode_mem, // pointer returned by CVodeCreate
252      CVRhsFunc f,
253      double t0, // realtype, the inital value of t
254      IntPtr y0 // N_Vector the initial value of y
255    );
256
257
258    [DllImport("sundials_cvodes.dll", EntryPoint = "CVodeSStolerances", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
259    public static extern int CVodeSStolerances(
260      IntPtr cvode_mem,
261      double reltol,
262      double abstol
263      );
264
265    [DllImport("sundials_cvodes.dll", EntryPoint = "CVDlsSetLinearSolver", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
266    public static extern int CVDlsSetLinearSolver(
267      IntPtr cvode_mem,
268      IntPtr linearSolver, // SUNLinearSolver
269      IntPtr j // SUNMatrix
270      );
271
272    [DllImport("sundials_cvodes.dll", EntryPoint = "CVDlsSetJacFn", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
273    public static extern int CVDlsSetJacFn(
274      IntPtr cvode_mem,
275      CVDlsJacFunc jacFunc
276      );
277
278    [DllImport("sundials_cvodes.dll", EntryPoint = "CVode", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
279    public static extern int CVode(
280      IntPtr cvode_mem,
281      double tout, // next time at which a solution is desired
282      IntPtr yout, // N_Vector
283      ref double tret, // the time reached by the solver (output)
284      int itask // flag indicating the job of the solver for the next step.
285      );
286
287    [DllImport("sundials_cvodes.dll", EntryPoint = "CVodeFree", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
288
289    public static extern int CVodeFree(IntPtr cvode_mem);
290
291    #region matrix
292    [DllImport("sundials_cvodes.dll", EntryPoint = "SUNDenseMatrix", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
293    public static extern IntPtr SUNDenseMatrix(long m, long n);
294
295    [DllImport("sundials_cvodes.dll", EntryPoint = "SUNMatDestroy", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
296    public static extern void SUNMatDestroy(IntPtr A);
297    #endregion
298
299    #region linear solver
300    [DllImport("sundials_cvodes.dll", EntryPoint = "SUNDenseLinearSolver", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
301    public static extern IntPtr SUNDenseLinearSolver(
302      IntPtr y, // N_Vector
303      IntPtr A // SUNMatrix
304      );
305
306    [DllImport("sundials_cvodes.dll", EntryPoint = "SUNLinSolInitialize", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
307    public static extern int SUNLinSolInitialize(IntPtr linearSolver);
308
309    [DllImport("sundials_cvodes.dll", EntryPoint = "SUNLinSolSetup", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
310    public static extern int SUNLinSolSetup(
311      IntPtr linearSolver,
312      IntPtr A // SUNMatrix
313      );
314
315    [DllImport("sundials_cvodes.dll", EntryPoint = "SUNLinSolSolve", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
316    public static extern int SUNLinSolSolve(
317      IntPtr linearSolver,
318      IntPtr A, // SUNMatrix
319      IntPtr x, // N_Vector
320      IntPtr b, // N_Vector
321      double tol
322      );
323
324    [DllImport("sundials_cvodes.dll", EntryPoint = "SUNLinSolFree", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
325    public static extern int SUNLinSolFree(IntPtr linearSolver);
326
327    #endregion
328
329    #region N_Vector
330    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VNew_Serial", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
331    public static extern IntPtr N_VNew_Serial(long vec_length);
332
333    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VDestroy_Serial", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
334    public static extern void N_VDestroy_Serial(IntPtr vec);
335
336    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VPrint_Serial", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
337    public static extern void N_VPrint_Serial(IntPtr vec);
338
339    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VConst_Serial", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
340    public static extern void N_VConst_Serial(double c, IntPtr vec);
341
342   
343    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VL1Norm_Serial", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
344    public static extern double N_VL1Norm_Serial(IntPtr vec);
345
346    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VMake_Serial", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
347    public static extern IntPtr N_VMake_Serial(long vec_length, double[] v_data);
348
349    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VScale", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
350    ///  Performs the operation z = c*x
351    public static extern void N_VScale(double s,
352      IntPtr x, // N_Vector
353      IntPtr z // N_Vector
354      );
355
356    [DllImport("sundials_cvodes.dll", EntryPoint = "N_VMake_Serial", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)]
357    public unsafe static extern double* N_VGetArrayPointer_Serial(IntPtr vec);
358    #endregion
359
360
361
362
363
364    #region parameter names
365    private const string ProblemDataParameterName = "Data";
366    private const string TargetVariablesParameterName = "Target variables";
367    private const string FunctionSetParameterName = "Function set";
368    private const string MaximumLengthParameterName = "Size limit";
369    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
370    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
371    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
372    private const string TrainingEpisodesParameterName = "Training episodes";
373    private const string OptimizeParametersForEpisodesParameterName = "Optimize parameters for episodes";
374    #endregion
375
376    #region Parameter Properties
377    IParameter IDataAnalysisProblem.ProblemDataParameter { get { return ProblemDataParameter; } }
378
379    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
380      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
381    }
382    public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> TargetVariablesParameter {
383      get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[TargetVariablesParameterName]; }
384    }
385    public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> FunctionSetParameter {
386      get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[FunctionSetParameterName]; }
387    }
388    public IFixedValueParameter<IntValue> MaximumLengthParameter {
389      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
390    }
391    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
392      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
393    }
394    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
395      get { return (IFixedValueParameter<IntValue>)Parameters[NumberOfLatentVariablesParameterName]; }
396    }
397    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
398      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
399    }
400    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
401      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
402    }
403    public IFixedValueParameter<BoolValue> OptimizeParametersForEpisodesParameter {
404      get { return (IFixedValueParameter<BoolValue>)Parameters[OptimizeParametersForEpisodesParameterName]; }
405    }
406    #endregion
407
408    #region Properties
409    public IRegressionProblemData ProblemData {
410      get { return ProblemDataParameter.Value; }
411      set { ProblemDataParameter.Value = value; }
412    }
413    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
414
415    public ReadOnlyCheckedItemCollection<StringValue> TargetVariables {
416      get { return TargetVariablesParameter.Value; }
417    }
418
419    public ReadOnlyCheckedItemCollection<StringValue> FunctionSet {
420      get { return FunctionSetParameter.Value; }
421    }
422
423    public int MaximumLength {
424      get { return MaximumLengthParameter.Value.Value; }
425    }
426    public int MaximumParameterOptimizationIterations {
427      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
428    }
429    public int NumberOfLatentVariables {
430      get { return NumberOfLatentVariablesParameter.Value.Value; }
431    }
432    public int NumericIntegrationSteps {
433      get { return NumericIntegrationStepsParameter.Value.Value; }
434    }
435    public IEnumerable<IntRange> TrainingEpisodes {
436      get { return TrainingEpisodesParameter.Value; }
437    }
438    public bool OptimizeParametersForEpisodes {
439      get { return OptimizeParametersForEpisodesParameter.Value.Value; }
440    }
441
442    #endregion
443
444    public event EventHandler ProblemDataChanged;
445
446    public override bool Maximization {
447      get { return false; } // we minimize NMSE
448    }
449
450    #region item cloning and persistence
451    // persistence
452    [StorableConstructor]
453    private Problem(bool deserializing) : base(deserializing) { }
454    [StorableHook(HookType.AfterDeserialization)]
455    private void AfterDeserialization() {
456      if (!Parameters.ContainsKey(OptimizeParametersForEpisodesParameterName)) {
457        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
458      }
459      RegisterEventHandlers();
460    }
461
462    // cloning
463    private Problem(Problem original, Cloner cloner)
464      : base(original, cloner) {
465      RegisterEventHandlers();
466    }
467    public override IDeepCloneable Clone(Cloner cloner) { return new Problem(this, cloner); }
468    #endregion
469
470    public Problem()
471      : base() {
472      var targetVariables = new CheckedItemCollection<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
473      var functions = CreateFunctionSet();
474      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system. Use CSV import functionality to import data.", new RegressionProblemData()));
475      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
476      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
477      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
478      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
479      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
480      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
481      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
482      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
483      RegisterEventHandlers();
484      InitAllParameters();
485
486      // TODO: do not clear selection of target variables when the input variables are changed (keep selected target variables)
487      // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item
488      // TODO: use training range as default training episode
489
490    }
491
492    public override double Evaluate(Individual individual, IRandom random) {
493      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
494
495      if (OptimizeParametersForEpisodes) {
496        int eIdx = 0;
497        double totalNMSE = 0.0;
498        int totalSize = 0;
499        foreach (var episode in TrainingEpisodes) {
500          double[] optTheta;
501          double nmse;
502          OptimizeForEpisodes(trees, random, new[] { episode }, out optTheta, out nmse);
503          individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
504          eIdx++;
505          totalNMSE += nmse * episode.Size;
506          totalSize += episode.Size;
507        }
508        return totalNMSE / totalSize;
509      } else {
510        double[] optTheta;
511        double nmse;
512        OptimizeForEpisodes(trees, random, TrainingEpisodes, out optTheta, out nmse);
513        individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
514        return nmse;
515      }
516    }
517
518    private void OptimizeForEpisodes(ISymbolicExpressionTree[] trees, IRandom random, IEnumerable<IntRange> episodes, out double[] optTheta, out double nmse) {
519      var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray();
520      var problemData = ProblemData;
521      var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
522      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
523      var targetValues = new double[rows.Length, targetVars.Length];
524
525      // collect values of all target variables
526      var colIdx = 0;
527      foreach (var targetVar in targetVars) {
528        int rowIdx = 0;
529        foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
530          targetValues[rowIdx, colIdx] = value;
531          rowIdx++;
532        }
533        colIdx++;
534      }
535
536      var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>();
537
538      foreach (var tree in trees) {
539        foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
540          nodeIdx.Add(node, nodeIdx.Count);
541        }
542      }
543
544      var theta = nodeIdx.Select(_ => random.NextDouble() * 2.0 - 1.0).ToArray(); // init params randomly from Unif(-1,1)
545
546      optTheta = new double[0];
547      if (theta.Length > 0) {
548        alglib.minlbfgsstate state;
549        alglib.minlbfgsreport report;
550        alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state);
551        alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations);
552        alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null,
553          new object[] { trees, targetVars, problemData, nodeIdx, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables }); //TODO: create a type
554        alglib.minlbfgsresults(state, out optTheta, out report);
555
556        /*
557         *
558         *         L-BFGS algorithm results
559
560          INPUT PARAMETERS:
561              State   -   algorithm state
562
563          OUTPUT PARAMETERS:
564              X       -   array[0..N-1], solution
565              Rep     -   optimization report:
566                          * Rep.TerminationType completetion code:
567                              * -7    gradient verification failed.
568                                      See MinLBFGSSetGradientCheck() for more information.
569                              * -2    rounding errors prevent further improvement.
570                                      X contains best point found.
571                              * -1    incorrect parameters were specified
572                              *  1    relative function improvement is no more than
573                                      EpsF.
574                              *  2    relative step is no more than EpsX.
575                              *  4    gradient norm is no more than EpsG
576                              *  5    MaxIts steps was taken
577                              *  7    stopping conditions are too stringent,
578                                      further improvement is impossible
579                          * Rep.IterationsCount contains iterations count
580                          * NFEV countains number of function calculations
581         */
582        if (report.terminationtype < 0) { nmse = 10E6; return; }
583      }
584
585      // perform evaluation for optimal theta to get quality value
586      double[] grad = new double[optTheta.Length];
587      nmse = double.NaN;
588      EvaluateObjectiveAndGradient(optTheta, ref nmse, grad,
589        new object[] { trees, targetVars, problemData, nodeIdx, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables });
590      if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10E6; return; } // return a large value (TODO: be consistent by using NMSE)
591    }
592
593    private static void EvaluateObjectiveAndGradient(double[] x, ref double f, double[] grad, object obj) {
594      var trees = (ISymbolicExpressionTree[])((object[])obj)[0];
595      var targetVariables = (string[])((object[])obj)[1];
596      var problemData = (IRegressionProblemData)((object[])obj)[2];
597      var nodeIdx = (Dictionary<ISymbolicExpressionTreeNode, int>)((object[])obj)[3];
598      var targetValues = (double[,])((object[])obj)[4];
599      var episodes = (IntRange[])((object[])obj)[5];
600      var numericIntegrationSteps = (int)((object[])obj)[6];
601      var latentVariables = (string[])((object[])obj)[7];
602
603      var predicted = Integrate(
604        trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
605        problemData.Dataset,
606        problemData.AllowedInputVariables.ToArray(),
607        targetVariables,
608        latentVariables,
609        episodes,
610        nodeIdx,
611        x, numericIntegrationSteps).ToArray();
612
613
614      // for normalized MSE = 1/variance(t) * MSE(t, pred)
615      // TODO: Perf. (by standardization of target variables before evaluation of all trees)     
616      var invVar = Enumerable.Range(0, targetVariables.Length)
617        .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors
618        .Select(vec => vec.Variance())
619        .Select(v => 1.0 / v)
620        .ToArray();
621
622      // objective function is NMSE
623      f = 0.0;
624      int n = predicted.Length;
625      double invN = 1.0 / n;
626      var g = Vector.Zero;
627      int r = 0;
628      foreach (var y_pred in predicted) {
629        for (int c = 0; c < y_pred.Length; c++) {
630
631          var y_pred_f = y_pred[c].Item1;
632          var y = targetValues[r, c];
633
634          var res = (y - y_pred_f);
635          var ressq = res * res;
636          f += ressq * invN * invVar[c];
637          g += -2.0 * res * y_pred[c].Item2 * invN * invVar[c];
638        }
639        r++;
640      }
641
642      g.CopyTo(grad);
643    }
644
645    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
646      base.Analyze(individuals, qualities, results, random);
647
648      if (!results.ContainsKey("Prediction (training)")) {
649        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
650      }
651      if (!results.ContainsKey("Prediction (test)")) {
652        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
653      }
654      if (!results.ContainsKey("Models")) {
655        results.Add(new Result("Models", typeof(VariableCollection)));
656      }
657
658      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
659      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
660
661      // TODO extract common functionality from Evaluate and Analyze
662      var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>();
663      foreach (var tree in trees) {
664        foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
665          nodeIdx.Add(node, nodeIdx.Count);
666        }
667      }
668      var problemData = ProblemData;
669      var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
670      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
671
672      var trainingList = new ItemList<DataTable>();
673
674      if (OptimizeParametersForEpisodes) {
675        var eIdx = 0;
676        var trainingPredictions = new List<Tuple<double, Vector>[][]>();
677        foreach (var episode in TrainingEpisodes) {
678          var episodes = new[] { episode };
679          var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta_" + eIdx]).ToArray(); // see evaluate
680          var trainingPrediction = Integrate(
681                                   trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
682                                   problemData.Dataset,
683                                   problemData.AllowedInputVariables.ToArray(),
684                                   targetVars,
685                                   latentVariables,
686                                   episodes,
687                                   nodeIdx,
688                                   optTheta,
689                                   NumericIntegrationSteps).ToArray();
690          trainingPredictions.Add(trainingPrediction);
691          eIdx++;
692        }
693
694        // only for actual target values
695        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
696        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
697          var targetVar = targetVars[colIdx];
698          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
699          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
700          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPredictions.SelectMany(arr => arr.Select(row => row[colIdx].Item1)).ToArray());
701          trainingDataTable.Rows.Add(actualValuesRow);
702          trainingDataTable.Rows.Add(predictedValuesRow);
703          trainingList.Add(trainingDataTable);
704        }
705        results["Prediction (training)"].Value = trainingList.AsReadOnly();
706
707
708        var models = new VariableCollection();
709
710        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
711          var targetVarName = tup.Item1;
712          var tree = tup.Item2;
713
714          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
715          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
716          models.Add(origTreeVar);
717        }
718        results["Models"].Value = models;
719      } else {
720        var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate
721        var trainingPrediction = Integrate(
722                                   trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
723                                   problemData.Dataset,
724                                   problemData.AllowedInputVariables.ToArray(),
725                                   targetVars,
726                                   latentVariables,
727                                   TrainingEpisodes,
728                                   nodeIdx,
729                                   optTheta,
730                                   NumericIntegrationSteps).ToArray();
731        // only for actual target values
732        var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
733        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
734          var targetVar = targetVars[colIdx];
735          var trainingDataTable = new DataTable(targetVar + " prediction (training)");
736          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
737          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
738          trainingDataTable.Rows.Add(actualValuesRow);
739          trainingDataTable.Rows.Add(predictedValuesRow);
740          trainingList.Add(trainingDataTable);
741        }
742        // TODO: DRY for training and test
743        var testList = new ItemList<DataTable>();
744        var testRows = ProblemData.TestIndices.ToArray();
745        var testPrediction = Integrate(
746         trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
747         problemData.Dataset,
748         problemData.AllowedInputVariables.ToArray(),
749         targetVars,
750         latentVariables,
751         new IntRange[] { ProblemData.TestPartition },
752         nodeIdx,
753         optTheta,
754         NumericIntegrationSteps).ToArray();
755
756        for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
757          var targetVar = targetVars[colIdx];
758          var testDataTable = new DataTable(targetVar + " prediction (test)");
759          var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
760          var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
761          testDataTable.Rows.Add(actualValuesRow);
762          testDataTable.Rows.Add(predictedValuesRow);
763          testList.Add(testDataTable);
764        }
765
766        results["Prediction (training)"].Value = trainingList.AsReadOnly();
767        results["Prediction (test)"].Value = testList.AsReadOnly();
768        #region simplification of models
769        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
770        var models = new VariableCollection();    // to store target var names and original version of tree
771
772        foreach (var tup in targetVars.Zip(trees, Tuple.Create)) {
773          var targetVarName = tup.Item1;
774          var tree = tup.Item2;
775
776          // when we reference HeuristicLab.Problems.DataAnalysis.Symbolic we can translate symbols
777          int nextParIdx = 0;
778          var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta, ref nextParIdx));
779
780          // var shownTree = (SymbolicExpressionTree)tree.Clone();
781          // var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode);
782          // var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode);
783          //
784          // foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) {
785          //   double constantsVal = optTheta[nodeIdx[n.original]];
786          //
787          //   ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal };
788          //
789          //   var parentNode = n.shown.Parent;
790          //   int replacementIndex = parentNode.IndexOfSubtree(n.shown);
791          //   parentNode.RemoveSubtree(replacementIndex);
792          //   parentNode.InsertSubtree(replacementIndex, replacementNode);
793          // }
794
795          var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
796          origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
797          models.Add(origTreeVar);
798          var simplifiedTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(simplified)");
799          simplifiedTreeVar.Value = TreeSimplifier.Simplify(shownTree);
800          models.Add(simplifiedTreeVar);
801
802        }
803        results["Models"].Value = models;
804        #endregion
805      }
806    }
807
808    private ISymbolicExpressionTreeNode TranslateTreeNode(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
809      ISymbolicExpressionTreeNode translatedNode = null;
810      if (n.Symbol is StartSymbol) {
811        translatedNode = new StartSymbol().CreateTreeNode();
812      } else if (n.Symbol is ProgramRootSymbol) {
813        translatedNode = new ProgramRootSymbol().CreateTreeNode();
814      } else if (n.Symbol.Name == "+") {
815        translatedNode = new Addition().CreateTreeNode();
816      } else if (n.Symbol.Name == "-") {
817        translatedNode = new Subtraction().CreateTreeNode();
818      } else if (n.Symbol.Name == "*") {
819        translatedNode = new Multiplication().CreateTreeNode();
820      } else if (n.Symbol.Name == "%") {
821        translatedNode = new Division().CreateTreeNode();
822      } else if (n.Symbol.Name == "sin") {
823        translatedNode = new Sine().CreateTreeNode();
824      } else if (n.Symbol.Name == "cos") {
825        translatedNode = new Cosine().CreateTreeNode();
826      } else if (IsConstantNode(n)) {
827        var constNode = (ConstantTreeNode)new Constant().CreateTreeNode();
828        constNode.Value = parameterValues[nextParIdx];
829        nextParIdx++;
830        translatedNode = constNode;
831      } else {
832        // assume a variable name
833        var varName = n.Symbol.Name;
834        var varNode = (VariableTreeNode)new Variable().CreateTreeNode();
835        varNode.Weight = 1.0;
836        varNode.VariableName = varName;
837        translatedNode = varNode;
838      }
839      foreach (var child in n.Subtrees) {
840        translatedNode.AddSubtree(TranslateTreeNode(child, parameterValues, ref nextParIdx));
841      }
842      return translatedNode;
843    }
844
845    #region interpretation
846
847    // the following uses auto-diff to calculate the gradient w.r.t. the parameters forward in time.
848    // this is basically the method described in Gronwall T. Note on the derivatives with respect to a parameter of the solutions of a system of differential equations. Ann. Math. 1919;20:292–296.
849
850    // a comparison of three potential calculation methods for the gradient is given in:
851    // Sengupta, B., Friston, K. J., & Penny, W. D. (2014). Efficient gradient computation for dynamical models. Neuroimage, 98(100), 521–527. http://doi.org/10.1016/j.neuroimage.2014.04.040
852    // "Our comparison establishes that the adjoint method is computationally more efficient for numerical estimation of parametric gradients
853    // for state-space models — both linear and non-linear, as in the case of a dynamical causal model (DCM)"
854
855    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
856    /*
857     * SUNDIALS: SUite of Nonlinear and DIfferential/ALgebraic Equation Solvers
858     * CVODES
859     * CVODES is a solver for stiff and nonstiff ODE systems (initial value problem) given in explicit
860     * form y’ = f(t,y,p) with sensitivity analysis capabilities (both forward and adjoint modes). CVODES
861     * is a superset of CVODE and hence all options available to CVODE (with the exception of the FCVODE
862     * interface module) are also available for CVODES. Both integration methods (Adams-Moulton and BDF)
863     * and the corresponding nonlinear iteration methods, as well as all linear solver and preconditioner
864     * modules, are available for the integration of the original ODEs, the sensitivity systems, or the
865     * adjoint system. Depending on the number of model parameters and the number of functional outputs,
866     * one of two sensitivity methods is more appropriate. The forward sensitivity analysis (FSA) method
867     * is mostly suitable when the gradients of many outputs (for example the entire solution vector) with
868     * respect to relatively few parameters are needed. In this approach, the model is differentiated with
869     * respect to each parameter in turn to yield an additional system of the same size as the original
870     * one, the result of which is the solution sensitivity. The gradient of any output function depending
871     * on the solution can then be directly obtained from these sensitivities by applying the chain rule
872     * of differentiation. The adjoint sensitivity analysis (ASA) method is more practical than the
873     * forward approach when the number of parameters is large and the gradients of only few output
874     * functionals are needed. In this approach, the solution sensitivities need not be computed
875     * explicitly. Instead, for each output functional of interest, an additional system, adjoint to the
876     * original one, is formed and solved. The solution of the adjoint system can then be used to evaluate
877     * the gradient of the output functional with respect to any set of model parameters. The FSA module
878     * in CVODES implements a simultaneous corrector method as well as two flavors of staggered corrector
879     * methods–for the case when sensitivity right hand sides are generated all at once or separated for
880     * each model parameter. The ASA module provides the infrastructure required for the backward
881     * integration in time of systems of differential equations dependent on the solution of the original
882     * ODEs. It employs a checkpointing scheme for efficient interpolation of forward solutions during the
883     * backward integration.
884     */
885    private static IEnumerable<Tuple<double, Vector>[]> Integrate(
886      ISymbolicExpressionTree[] trees, IDataset dataset, string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes,
887      Dictionary<ISymbolicExpressionTreeNode, int> nodeIdx, double[] parameterValues, int numericIntegrationSteps = 100) {
888
889      int NUM_STEPS = numericIntegrationSteps;
890      double h = 1.0 / NUM_STEPS;
891
892      foreach (var episode in episodes) {
893        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start);
894        // return first value as stored in the dataset
895        yield return targetVariables
896          .Select(targetVar => Tuple.Create(dataset.GetDoubleValue(targetVar, rows.First()), Vector.Zero))
897          .ToArray();
898
899        // integrate forward starting with known values for the target in t0
900
901        var variableValues = new Dictionary<string, Tuple<double, Vector>>();
902        var t0 = rows.First();
903        foreach (var varName in inputVariables) {
904          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
905        }
906        foreach (var varName in targetVariables) {
907          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
908        }
909        // add value entries for latent variables which are also integrated
910        foreach (var latentVar in latentVariables) {
911          variableValues.Add(latentVar, Tuple.Create(0.0, Vector.Zero)); // we don't have observations for latent variables -> assume zero as starting value
912        }
913        var calculatedVariables = targetVariables.Concat(latentVariables); // TODO: must conincide with the order of trees in the encoding
914
915        foreach (var t in rows.Skip(1)) {
916          for (int step = 0; step < NUM_STEPS; step++) {
917            var deltaValues = new Dictionary<string, Tuple<double, Vector>>();
918            foreach (var tup in trees.Zip(calculatedVariables, Tuple.Create)) {
919              var tree = tup.Item1;
920              var targetVarName = tup.Item2;
921              // skip programRoot and startSymbol
922              var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), variableValues, nodeIdx, parameterValues);
923              deltaValues.Add(targetVarName, res);
924            }
925
926            // update variableValues for next step
927            foreach (var kvp in deltaValues) {
928              var oldVal = variableValues[kvp.Key];
929              variableValues[kvp.Key] = Tuple.Create(
930                oldVal.Item1 + h * kvp.Value.Item1,
931                oldVal.Item2 + h * kvp.Value.Item2
932              );
933            }
934          }
935
936          // only return the target variables for calculation of errors
937          yield return targetVariables
938            .Select(targetVar => variableValues[targetVar])
939            .ToArray();
940
941          // update for next time step
942          foreach (var varName in inputVariables) {
943            variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero);
944          }
945        }
946      }
947    }
948
949    private static Tuple<double, Vector> InterpretRec(
950      ISymbolicExpressionTreeNode node,
951      Dictionary<string, Tuple<double, Vector>> variableValues,
952      Dictionary<ISymbolicExpressionTreeNode, int> nodeIdx,
953      double[] parameterValues
954        ) {
955
956      switch (node.Symbol.Name) {
957        case "+": {
958            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); // TODO capture all parameters into a state type for interpretation
959            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
960
961            return Tuple.Create(l.Item1 + r.Item1, l.Item2 + r.Item2);
962          }
963        case "*": {
964            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
965            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
966
967            return Tuple.Create(l.Item1 * r.Item1, l.Item2 * r.Item1 + l.Item1 * r.Item2);
968          }
969
970        case "-": {
971            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
972            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
973
974            return Tuple.Create(l.Item1 - r.Item1, l.Item2 - r.Item2);
975          }
976        case "%": {
977            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
978            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
979
980            // protected division
981            if (r.Item1.IsAlmost(0.0)) {
982              return Tuple.Create(0.0, Vector.Zero);
983            } else {
984              return Tuple.Create(
985                l.Item1 / r.Item1,
986                l.Item1 * -1.0 / (r.Item1 * r.Item1) * r.Item2 + 1.0 / r.Item1 * l.Item2 // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f'
987                );
988            }
989          }
990        case "sin": {
991            var x = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
992            return Tuple.Create(
993              Math.Sin(x.Item1),
994              Vector.Cos(x.Item2) * x.Item2
995            );
996          }
997        case "cos": {
998            var x = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
999            return Tuple.Create(
1000              Math.Cos(x.Item1),
1001              -Vector.Sin(x.Item2) * x.Item2
1002            );
1003          }
1004        default: {
1005            // distinguish other cases
1006            if (IsConstantNode(node)) {
1007              var vArr = new double[parameterValues.Length]; // backing array for vector
1008              vArr[nodeIdx[node]] = 1.0;
1009              var g = new Vector(vArr);
1010              return Tuple.Create(parameterValues[nodeIdx[node]], g);
1011            } else {
1012              // assume a variable name
1013              var varName = node.Symbol.Name;
1014              return variableValues[varName];
1015            }
1016          }
1017      }
1018    }
1019    #endregion
1020
1021    #region events
1022    /*
1023     * Dependencies between parameters:
1024     *
1025     * ProblemData
1026     *    |
1027     *    V
1028     * TargetVariables   FunctionSet    MaximumLength    NumberOfLatentVariables
1029     *               |   |                 |                   |
1030     *               V   V                 |                   |
1031     *             Grammar <---------------+-------------------
1032     *                |
1033     *                V
1034     *            Encoding
1035     */
1036    private void RegisterEventHandlers() {
1037      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
1038      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
1039
1040      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
1041      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1042
1043      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
1044      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1045
1046      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
1047
1048      NumberOfLatentVariablesParameter.Value.ValueChanged += NumLatentVariablesChanged;
1049    }
1050
1051    private void NumLatentVariablesChanged(object sender, EventArgs e) {
1052      UpdateGrammarAndEncoding();
1053    }
1054
1055    private void MaximumLengthChanged(object sender, EventArgs e) {
1056      UpdateGrammarAndEncoding();
1057    }
1058
1059    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
1060      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
1061    }
1062
1063    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) {
1064      UpdateGrammarAndEncoding();
1065    }
1066
1067    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
1068      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
1069    }
1070
1071    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) {
1072      UpdateGrammarAndEncoding();
1073    }
1074
1075    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
1076      ProblemDataParameter.Value.Changed += ProblemData_Changed;
1077      OnProblemDataChanged();
1078      OnReset();
1079    }
1080
1081    private void ProblemData_Changed(object sender, EventArgs e) {
1082      OnProblemDataChanged();
1083      OnReset();
1084    }
1085
1086    private void OnProblemDataChanged() {
1087      UpdateTargetVariables();        // implicitly updates other dependent parameters
1088      var handler = ProblemDataChanged;
1089      if (handler != null) handler(this, EventArgs.Empty);
1090    }
1091
1092    #endregion
1093
1094    #region  helper
1095
1096    private void InitAllParameters() {
1097      UpdateTargetVariables(); // implicitly updates the grammar and the encoding     
1098    }
1099
1100    private ReadOnlyCheckedItemCollection<StringValue> CreateFunctionSet() {
1101      var l = new CheckedItemCollection<StringValue>();
1102      l.Add(new StringValue("+").AsReadOnly());
1103      l.Add(new StringValue("*").AsReadOnly());
1104      l.Add(new StringValue("%").AsReadOnly());
1105      l.Add(new StringValue("-").AsReadOnly());
1106      l.Add(new StringValue("sin").AsReadOnly());
1107      l.Add(new StringValue("cos").AsReadOnly());
1108      return l.AsReadOnly();
1109    }
1110
1111    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
1112      return n.Symbol.Name.StartsWith("θ");
1113    }
1114    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
1115      return n.Symbol.Name.StartsWith("λ");
1116    }
1117
1118
1119    private void UpdateTargetVariables() {
1120      var currentlySelectedVariables = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
1121
1122      var newVariablesList = new CheckedItemCollection<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
1123      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
1124      foreach (var matchingItem in matchingItems) {
1125        newVariablesList.SetItemCheckedState(matchingItem, true);
1126      }
1127      TargetVariablesParameter.Value = newVariablesList;
1128    }
1129
1130    private void UpdateGrammarAndEncoding() {
1131      var encoding = new MultiEncoding();
1132      var g = CreateGrammar();
1133      foreach (var targetVar in TargetVariables.CheckedItems) {
1134        encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength)); // only limit by length
1135      }
1136      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1137        encoding = encoding.Add(new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength));
1138      }
1139      Encoding = encoding;
1140    }
1141
1142    private ISymbolicExpressionGrammar CreateGrammar() {
1143      // whenever ProblemData is changed we create a new grammar with the necessary symbols
1144      var g = new SimpleSymbolicExpressionGrammar();
1145      g.AddSymbols(FunctionSet.CheckedItems.Select(i => i.Value).ToArray(), 2, 2);
1146
1147      // TODO
1148      //g.AddSymbols(new[] {
1149      //  "exp",
1150      //  "log", // log( <expr> ) // TODO: init a theta to ensure the value is always positive
1151      //  "exp_minus" // exp((-1) * <expr>
1152      //}, 1, 1);
1153
1154      foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value)))
1155        g.AddTerminalSymbol(variableName);
1156
1157      // generate symbols for numeric parameters for which the value is optimized using AutoDiff
1158      // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
1159      var numericConstantsFactor = 2.0;
1160      for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
1161        g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
1162      }
1163
1164      // generate symbols for latent variables
1165      for (int i = 1; i <= NumberOfLatentVariables; i++) {
1166        g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
1167      }
1168
1169      return g;
1170    }
1171
1172    #endregion
1173
1174    #region Import & Export
1175    public void Load(IRegressionProblemData data) {
1176      Name = data.Name;
1177      Description = data.Description;
1178      ProblemData = data;
1179    }
1180
1181    public IRegressionProblemData Export() {
1182      return ProblemData;
1183    }
1184    #endregion
1185
1186  }
1187}
Note: See TracBrowser for help on using the repository browser.