Changeset 16153


Ignore:
Timestamp:
09/17/18 20:17:05 (13 months ago)
Author:
gkronber
Message:

#2925 added support for multiple training episodes, added simplification of models, fixed a bug in the normalization based on target variable variance

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs

    r16152 r16153  
    3636using HeuristicLab.Problems.DataAnalysis.Symbolic;
    3737using HeuristicLab.Problems.Instances;
     38using Variable = HeuristicLab.Problems.DataAnalysis.Symbolic.Variable;
    3839
    3940namespace HeuristicLab.Problems.DynamicalSystemsModelling {
     
    4243
    4344    public static Vector operator +(Vector a, Vector b) {
    44       if (a == Zero) return b;
    45       if (b == Zero) return a;
     45      if(a == Zero) return b;
     46      if(b == Zero) return a;
    4647      Debug.Assert(a.arr.Length == b.arr.Length);
    4748      var res = new double[a.arr.Length];
    48       for (int i = 0; i < res.Length; i++)
     49      for(int i = 0; i < res.Length; i++)
    4950        res[i] = a.arr[i] + b.arr[i];
    5051      return new Vector(res);
    5152    }
    5253    public static Vector operator -(Vector a, Vector b) {
    53       if (b == Zero) return a;
    54       if (a == Zero) return -b;
     54      if(b == Zero) return a;
     55      if(a == Zero) return -b;
    5556      Debug.Assert(a.arr.Length == b.arr.Length);
    5657      var res = new double[a.arr.Length];
    57       for (int i = 0; i < res.Length; i++)
     58      for(int i = 0; i < res.Length; i++)
    5859        res[i] = a.arr[i] - b.arr[i];
    5960      return new Vector(res);
    6061    }
    6162    public static Vector operator -(Vector v) {
    62       if (v == Zero) return Zero;
    63       for (int i = 0; i < v.arr.Length; i++)
     63      if(v == Zero) return Zero;
     64      for(int i = 0; i < v.arr.Length; i++)
    6465        v.arr[i] = -v.arr[i];
    6566      return v;
     
    6768
    6869    public static Vector operator *(double s, Vector v) {
    69       if (v == Zero) return Zero;
    70       if (s == 0.0) return Zero;
     70      if(v == Zero) return Zero;
     71      if(s == 0.0) return Zero;
    7172      var res = new double[v.arr.Length];
    72       for (int i = 0; i < res.Length; i++)
     73      for(int i = 0; i < res.Length; i++)
    7374        res[i] = s * v.arr[i];
    7475      return new Vector(res);
     
    7879    }
    7980    public static Vector operator /(double s, Vector v) {
    80       if (s == 0.0) return Zero;
    81       if (v == Zero) throw new ArgumentException("Division by zero vector");
     81      if(s == 0.0) return Zero;
     82      if(v == Zero) throw new ArgumentException("Division by zero vector");
    8283      var res = new double[v.arr.Length];
    83       for (int i = 0; i < res.Length; i++)
     84      for(int i = 0; i < res.Length; i++)
    8485        res[i] = 1.0 / v.arr[i];
    8586      return new Vector(res);
     
    115116    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
    116117    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
     118    private const string TrainingEpisodesParameterName = "Training episodes";
    117119    #endregion
    118120
     
    140142    public IFixedValueParameter<IntValue> NumericIntegrationStepsParameter {
    141143      get { return (IFixedValueParameter<IntValue>)Parameters[NumericIntegrationStepsParameterName]; }
     144    }
     145    public IValueParameter<ItemList<IntRange>> TrainingEpisodesParameter {
     146      get { return (IValueParameter<ItemList<IntRange>>)Parameters[TrainingEpisodesParameterName]; }
    142147    }
    143148    #endregion
     
    170175      get { return NumericIntegrationStepsParameter.Value.Value; }
    171176    }
    172 
    173     #endregion                                                                                     
     177    public IEnumerable<IntRange> TrainingEpisodes {
     178      get { return TrainingEpisodesParameter.Value; }
     179    }
     180
     181    #endregion
    174182
    175183    public event EventHandler ProblemDataChanged;
     
    207215      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
    208216      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
     217      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
    209218
    210219      RegisterEventHandlers();
    211220      InitAllParameters();
    212221
    213       // TODO: do not clear selection of target variables when the input variables are changed
     222      // TODO: do not clear selection of target variables when the input variables are changed (keep selected target variables)
    214223      // TODO: UI hangs when selecting / deselecting input variables because the encoding is updated on each item
     224
    215225    }
    216226
     
    227237      // collect values of all target variables
    228238      var colIdx = 0;
    229       foreach (var targetVar in targetVars) {
     239      foreach(var targetVar in targetVars) {
    230240        int rowIdx = 0;
    231         foreach (var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
     241        foreach(var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
    232242          targetValues[rowIdx, colIdx] = value;
    233243          rowIdx++;
     
    238248      var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>();
    239249
    240       foreach (var tree in trees) {
    241         foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
     250      foreach(var tree in trees) {
     251        foreach(var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
    242252          nodeIdx.Add(node, nodeIdx.Count);
    243253        }
     
    247257
    248258      double[] optTheta = new double[0];
    249       if (theta.Length > 0) {
     259      if(theta.Length > 0) {
    250260        alglib.minlbfgsstate state;
    251261        alglib.minlbfgsreport report;
     
    253263        alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations);
    254264        alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null,
    255           new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows, NumericIntegrationSteps, latentVariables }); //TODO: create a type
     265          new object[] { trees, targetVars, problemData, nodeIdx, targetValues, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables }); //TODO: create a type
    256266        alglib.minlbfgsresults(state, out optTheta, out report);
    257267
     
    282292                          * NFEV countains number of function calculations
    283293         */
    284         if (report.terminationtype < 0) return double.MaxValue;
     294        if(report.terminationtype < 0) return double.MaxValue;
    285295      }
    286296
     
    289299      double optQuality = double.NaN;
    290300      EvaluateObjectiveAndGradient(optTheta, ref optQuality, grad,
    291         new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows, NumericIntegrationSteps, latentVariables });
    292       if (double.IsNaN(optQuality) || double.IsInfinity(optQuality)) return 10E6; // return a large value (TODO: be consistent by using NMSE)
     301        new object[] { trees, targetVars, problemData, nodeIdx, targetValues, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables });
     302      if(double.IsNaN(optQuality) || double.IsInfinity(optQuality)) return 10E6; // return a large value (TODO: be consistent by using NMSE)
    293303
    294304      individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
     
    302312      var nodeIdx = (Dictionary<ISymbolicExpressionTreeNode, int>)((object[])obj)[3];
    303313      var targetValues = (double[,])((object[])obj)[4];
    304       var rows = (int[])((object[])obj)[5];
     314      var episodes = (IntRange[])((object[])obj)[5];
    305315      var numericIntegrationSteps = (int)((object[])obj)[6];
    306316      var latentVariables = (string[])((object[])obj)[7];
     
    312322        targetVariables,
    313323        latentVariables,
    314         rows,
    315         nodeIdx,                // TODO: is it Ok to use rows here ?
     324        episodes,
     325        nodeIdx,
    316326        x, numericIntegrationSteps).ToArray();
    317327
    318328
    319329      // for normalized MSE = 1/variance(t) * MSE(t, pred)
    320       // TODO: Perf. (by standardization of target variables before evaluation of all trees)
     330      // TODO: Perf. (by standardization of target variables before evaluation of all trees)     
    321331      var invVar = Enumerable.Range(0, targetVariables.Length)
    322         .Select(c => rows.Select(row => targetValues[row, c])) // colums vectors
     332        .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors
    323333        .Select(vec => vec.Variance())
    324334        .Select(v => 1.0 / v)
     
    331341      var g = Vector.Zero;
    332342      int r = 0;
    333       foreach (var y_pred in predicted) {
    334         for (int c = 0; c < y_pred.Length; c++) {
     343      foreach(var y_pred in predicted) {
     344        for(int c = 0; c < y_pred.Length; c++) {
    335345
    336346          var y_pred_f = y_pred[c].Item1;
     
    351361      base.Analyze(individuals, qualities, results, random);
    352362
    353       if (!results.ContainsKey("Prediction (training)")) {
     363      if(!results.ContainsKey("Prediction (training)")) {
    354364        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
    355365      }
    356       if (!results.ContainsKey("Prediction (test)")) {
     366      if(!results.ContainsKey("Prediction (test)")) {
    357367        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
    358368      }
    359       if (!results.ContainsKey("Models")) {
    360         results.Add(new Result("Models", typeof(ReadOnlyItemList<ISymbolicExpressionTree>)));
     369      if(!results.ContainsKey("Models")) {
     370        results.Add(new Result("Models", typeof(VariableCollection)));
    361371      }
    362372
     
    368378
    369379
    370       foreach (var tree in trees) {
    371         foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
     380      foreach(var tree in trees) {
     381        foreach(var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
    372382          nodeIdx.Add(node, nodeIdx.Count);
    373383        }
     
    378388
    379389      var trainingList = new ItemList<DataTable>();
    380       var trainingRows = ProblemData.TrainingIndices.ToArray();
    381390      var trainingPrediction = Integrate(
    382391       trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
     
    385394       targetVars,
    386395       latentVariables,
    387        trainingRows,
     396       TrainingEpisodes,
    388397       nodeIdx,
    389398       optTheta,
    390399       NumericIntegrationSteps).ToArray();
    391400
    392       for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
     401      // only for actual target values
     402      var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start));
     403      for(int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
    393404        var targetVar = targetVars[colIdx];
    394405        var trainingDataTable = new DataTable(targetVar + " prediction (training)");
     
    409420       targetVars,
    410421       latentVariables,
    411        testRows,
     422       new IntRange[] { ProblemData.TestPartition },
    412423       nodeIdx,
    413424       optTheta,
    414425       NumericIntegrationSteps).ToArray();
    415426
    416       for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
     427      for(int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
    417428        var targetVar = targetVars[colIdx];
    418429        var testDataTable = new DataTable(targetVar + " prediction (test)");
     
    429440      #region simplification of models
    430441      // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
    431       var modelList = new ItemList<ISymbolicExpressionTree>();
    432       foreach (var tree in trees) {
    433         var shownTree = (ISymbolicExpressionTree)tree.Clone();
    434         var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode);
    435         var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode);
    436 
    437         foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) {
    438           double constantsVal = optTheta[nodeIdx[n.original]];
    439 
    440           ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal };
    441 
    442           var parentNode = n.shown.Parent;
    443           int replacementIndex = parentNode.IndexOfSubtree(n.shown);
    444           parentNode.RemoveSubtree(replacementIndex);
    445           parentNode.InsertSubtree(replacementIndex, replacementNode);
    446         }
    447 
    448         modelList.Add(shownTree);
    449       }
    450       results["Models"].Value = modelList.AsReadOnly();
     442      var models = new VariableCollection();    // to store target var names and original version of tree
     443
     444      foreach(var tup in targetVars.Zip(trees, Tuple.Create)) {
     445        var targetVarName = tup.Item1;
     446        var tree = tup.Item2;
     447
     448        // when we reference HeuristicLab.Problems.DataAnalysis.Symbolic we can translate symbols
     449        int nextParIdx = 0;
     450        var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta, ref nextParIdx));
     451
     452        // var shownTree = (SymbolicExpressionTree)tree.Clone();
     453        // var constantsNodeOrig = tree.IterateNodesPrefix().Where(IsConstantNode);
     454        // var constantsNodeShown = shownTree.IterateNodesPrefix().Where(IsConstantNode);
     455        //
     456        // foreach (var n in constantsNodeOrig.Zip(constantsNodeShown, (original, shown) => new { original, shown })) {
     457        //   double constantsVal = optTheta[nodeIdx[n.original]];
     458        //
     459        //   ConstantTreeNode replacementNode = new ConstantTreeNode(new Constant()) { Value = constantsVal };
     460        //
     461        //   var parentNode = n.shown.Parent;
     462        //   int replacementIndex = parentNode.IndexOfSubtree(n.shown);
     463        //   parentNode.RemoveSubtree(replacementIndex);
     464        //   parentNode.InsertSubtree(replacementIndex, replacementNode);
     465        // }
     466
     467        var origTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(original)");
     468        origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone();
     469        models.Add(origTreeVar);
     470        var simplifiedTreeVar = new HeuristicLab.Core.Variable(targetVarName + "(simplified)");
     471        simplifiedTreeVar.Value = TreeSimplifier.Simplify(shownTree);
     472        models.Add(simplifiedTreeVar);
     473
     474      }
     475      results["Models"].Value = models;
    451476      #endregion
    452477    }
    453478
     479    private ISymbolicExpressionTreeNode TranslateTreeNode(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
     480      ISymbolicExpressionTreeNode translatedNode = null;
     481      if(n.Symbol is StartSymbol) {
     482        translatedNode = new StartSymbol().CreateTreeNode();
     483      } else if(n.Symbol is ProgramRootSymbol) {
     484        translatedNode = new ProgramRootSymbol().CreateTreeNode();
     485      } else if(n.Symbol.Name == "+") {
     486        translatedNode = new Addition().CreateTreeNode();
     487      } else if(n.Symbol.Name == "-") {
     488        translatedNode = new Subtraction().CreateTreeNode();
     489      } else if(n.Symbol.Name == "*") {
     490        translatedNode = new Multiplication().CreateTreeNode();
     491      } else if(n.Symbol.Name == "%") {
     492        translatedNode = new Division().CreateTreeNode();
     493      } else if(IsConstantNode(n)) {
     494        var constNode = (ConstantTreeNode)new Constant().CreateTreeNode();
     495        constNode.Value = parameterValues[nextParIdx];
     496        nextParIdx++;
     497        translatedNode = constNode;
     498      } else {
     499        // assume a variable name
     500        var varName = n.Symbol.Name;
     501        var varNode = (VariableTreeNode)new Variable().CreateTreeNode();
     502        varNode.Weight = 1.0;
     503        varNode.VariableName = varName;
     504        translatedNode = varNode;
     505      }
     506      foreach(var child in n.Subtrees) {
     507        translatedNode.AddSubtree(TranslateTreeNode(child, parameterValues, ref nextParIdx));
     508      }
     509      return translatedNode;
     510    }
    454511
    455512    #region interpretation
    456513    private static IEnumerable<Tuple<double, Vector>[]> Integrate(
    457       ISymbolicExpressionTree[] trees, IDataset dataset, string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<int> rows,
     514      ISymbolicExpressionTree[] trees, IDataset dataset, string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes,
    458515      Dictionary<ISymbolicExpressionTreeNode, int> nodeIdx, double[] parameterValues, int numericIntegrationSteps = 100) {
    459516
    460       int NUM_STEPS = numericIntegrationSteps ;
     517      int NUM_STEPS = numericIntegrationSteps;
    461518      double h = 1.0 / NUM_STEPS;
    462519
    463       // return first value as stored in the dataset
    464       yield return targetVariables
    465         .Select(targetVar => Tuple.Create(dataset.GetDoubleValue(targetVar, rows.First()), Vector.Zero))
    466         .ToArray();
    467 
    468       // integrate forward starting with known values for the target in t0
    469 
    470       var variableValues = new Dictionary<string, Tuple<double, Vector>>();
    471       var t0 = rows.First();
    472       foreach (var varName in inputVariables) {
    473         variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
    474       }
    475       foreach (var varName in targetVariables) {
    476         variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
    477       }
    478       // add value entries for latent variables which are also integrated
    479       foreach(var latentVar in latentVariables) {
    480         variableValues.Add(latentVar, Tuple.Create(0.0, Vector.Zero)); // we don't have observations for latent variables -> assume zero as starting value
    481       }
    482       var calculatedVariables = targetVariables.Concat(latentVariables); // TODO: must conincide with the order of trees in the encoding
    483 
    484       foreach (var t in rows.Skip(1)) {
    485         for (int step = 0; step < NUM_STEPS; step++) {
    486           var deltaValues = new Dictionary<string, Tuple<double, Vector>>();
    487           foreach (var tup in trees.Zip(calculatedVariables, Tuple.Create)) {
    488             var tree = tup.Item1;
    489             var targetVarName = tup.Item2;
    490             // skip programRoot and startSymbol
    491             var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), variableValues, nodeIdx, parameterValues);
    492             deltaValues.Add(targetVarName, res);
     520      foreach(var episode in episodes) {
     521        var rows = Enumerable.Range(episode.Start, episode.End - episode.Start);
     522        // return first value as stored in the dataset
     523        yield return targetVariables
     524          .Select(targetVar => Tuple.Create(dataset.GetDoubleValue(targetVar, rows.First()), Vector.Zero))
     525          .ToArray();
     526
     527        // integrate forward starting with known values for the target in t0
     528
     529        var variableValues = new Dictionary<string, Tuple<double, Vector>>();
     530        var t0 = rows.First();
     531        foreach(var varName in inputVariables) {
     532          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
     533        }
     534        foreach(var varName in targetVariables) {
     535          variableValues.Add(varName, Tuple.Create(dataset.GetDoubleValue(varName, t0), Vector.Zero));
     536        }
     537        // add value entries for latent variables which are also integrated
     538        foreach(var latentVar in latentVariables) {
     539          variableValues.Add(latentVar, Tuple.Create(0.0, Vector.Zero)); // we don't have observations for latent variables -> assume zero as starting value
     540        }
     541        var calculatedVariables = targetVariables.Concat(latentVariables); // TODO: must conincide with the order of trees in the encoding
     542
     543        foreach(var t in rows.Skip(1)) {
     544          for(int step = 0; step < NUM_STEPS; step++) {
     545            var deltaValues = new Dictionary<string, Tuple<double, Vector>>();
     546            foreach(var tup in trees.Zip(calculatedVariables, Tuple.Create)) {
     547              var tree = tup.Item1;
     548              var targetVarName = tup.Item2;
     549              // skip programRoot and startSymbol
     550              var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), variableValues, nodeIdx, parameterValues);
     551              deltaValues.Add(targetVarName, res);
     552            }
     553
     554            // update variableValues for next step
     555            foreach(var kvp in deltaValues) {
     556              var oldVal = variableValues[kvp.Key];
     557              variableValues[kvp.Key] = Tuple.Create(
     558                oldVal.Item1 + h * kvp.Value.Item1,
     559                oldVal.Item2 + h * kvp.Value.Item2
     560              );
     561            }
    493562          }
    494563
    495           // update variableValues for next step
    496           foreach (var kvp in deltaValues) {
    497             var oldVal = variableValues[kvp.Key];
    498             variableValues[kvp.Key] = Tuple.Create(
    499               oldVal.Item1 + h * kvp.Value.Item1,
    500               oldVal.Item2 + h * kvp.Value.Item2
    501             );
     564          // only return the target variables for calculation of errors
     565          yield return targetVariables
     566            .Select(targetVar => variableValues[targetVar])
     567            .ToArray();
     568
     569          // update for next time step
     570          foreach(var varName in inputVariables) {
     571            variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero);
    502572          }
    503         }
    504 
    505         // only return the target variables for calculation of errors
    506         yield return targetVariables
    507           .Select(targetVar => variableValues[targetVar])
    508           .ToArray();
    509 
    510         // update for next time step
    511         foreach (var varName in inputVariables) {
    512           variableValues[varName] = Tuple.Create(dataset.GetDoubleValue(varName, t), Vector.Zero);
    513573        }
    514574      }
     
    522582        ) {
    523583
    524       switch (node.Symbol.Name) {
     584      switch(node.Symbol.Name) {
    525585        case "+": {
    526586            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues); // TODO capture all parameters into a state type for interpretation
     
    547607
    548608            // protected division
    549             if (r.Item1.IsAlmost(0.0)) {
     609            if(r.Item1.IsAlmost(0.0)) {
    550610              return Tuple.Create(0.0, Vector.Zero);
    551611            } else {
     
    558618        default: {
    559619            // distinguish other cases
    560             if (IsConstantNode(node)) {
     620            if(IsConstantNode(node)) {
    561621              var vArr = new double[parameterValues.Length]; // backing array for vector
    562622              vArr[nodeIdx[node]] = 1.0;
     
    590650    private void RegisterEventHandlers() {
    591651      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
    592       if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
     652      if(ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
    593653
    594654      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
    595       if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
     655      if(TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
    596656
    597657      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
    598       if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
     658      if(FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
    599659
    600660      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
     
    641701      UpdateTargetVariables();        // implicitly updates other dependent parameters
    642702      var handler = ProblemDataChanged;
    643       if (handler != null) handler(this, EventArgs.Empty);
     703      if(handler != null) handler(this, EventArgs.Empty);
    644704    }
    645705
     
    674734      var newVariablesList = new CheckedItemCollection<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
    675735      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
    676       foreach (var matchingItem in matchingItems) {
     736      foreach(var matchingItem in matchingItems) {
    677737        newVariablesList.SetItemCheckedState(matchingItem, true);
    678738      }
     
    683743      var encoding = new MultiEncoding();
    684744      var g = CreateGrammar();
    685       foreach (var targetVar in TargetVariables.CheckedItems) {
     745      foreach(var targetVar in TargetVariables.CheckedItems) {
    686746        encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar + "_tree", g, MaximumLength, MaximumLength)); // only limit by length
    687747      }
    688       for (int i = 1; i <= NumberOfLatentVariables; i++) {
     748      for(int i = 1; i <= NumberOfLatentVariables; i++) {
    689749        encoding = encoding.Add(new SymbolicExpressionTreeEncoding("λ" + i + "_tree", g, MaximumLength, MaximumLength));
    690750      }
     
    704764      //}, 1, 1);
    705765
    706       foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value)))
     766      foreach(var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value)))
    707767        g.AddTerminalSymbol(variableName);
    708768
     
    710770      // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
    711771      var numericConstantsFactor = 2.0;
    712       for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
     772      for(int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
    713773        g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
    714774      }
    715775
    716776      // generate symbols for latent variables
    717       for (int i = 1; i <= NumberOfLatentVariables; i++) {
     777      for(int i = 1; i <= NumberOfLatentVariables; i++) {
    718778        g.AddTerminalSymbol("λ" + i); // numeric parameter for which the value is optimized using AutoDiff
    719779      }
Note: See TracChangeset for help on using the changeset viewer.