Free cookie consent management tool by TermsFeed Policy Generator

Changeset 15968


Ignore:
Timestamp:
06/20/18 14:33:02 (6 years ago)
Author:
gkronber
Message:

#2925: added analysis method to generate results for trees and line charts. Added support for multiple target variables. Bugfixing. Added scaling to calculate NMSE as objective

Location:
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3

    • Property svn:ignore set to
      Plugin.cs
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/HeuristicLab.Problems.DynamicalSystemsModelling-3.3.csproj

    r15964 r15968  
    121121  </ItemGroup>
    122122  <ItemGroup>
     123    <ProjectReference Include="..\..\HeuristicLab.Analysis\3.3\HeuristicLab.Analysis-3.3.csproj">
     124      <Project>{887425B4-4348-49ED-A457-B7D2C26DDBF9}</Project>
     125      <Name>HeuristicLab.Analysis-3.3</Name>
     126    </ProjectReference>
    123127    <ProjectReference Include="..\..\HeuristicLab.Collections\3.3\HeuristicLab.Collections-3.3.csproj">
    124128      <Project>{958B43BC-CC5C-4FA2-8628-2B3B01D890B6}</Project>
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs

    r15964 r15968  
    2424using System.Diagnostics;
    2525using System.Linq;
     26using HeuristicLab.Analysis;
     27using HeuristicLab.Collections;
    2628using HeuristicLab.Common;
    2729using HeuristicLab.Core;
     30using HeuristicLab.Data;
    2831using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
     32using HeuristicLab.Optimization;
    2933using HeuristicLab.Parameters;
    3034using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     
    100104  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 900)]
    101105  [StorableClass]
    102   public sealed class Problem : SymbolicExpressionTreeProblem, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
     106  public sealed class Problem : SingleObjectiveBasicProblem<MultiEncoding>, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
    103107
    104108    #region parameter names
    105     private const string ProblemDataParameterName = "ProblemData";
     109    private const string ProblemDataParameterName = "Data";
     110    private const string TargetVariablesParameterName = "Target variables";
     111    private const string FunctionSetParameterName = "Function set";
     112    private const string MaximumLengthParameterName = "Size limit";
     113    private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
    106114    #endregion
    107115
     
    111119    public IValueParameter<IRegressionProblemData> ProblemDataParameter {
    112120      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
     121    }
     122    public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> TargetVariablesParameter {
     123      get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[TargetVariablesParameterName]; }
     124    }
     125    public IValueParameter<ReadOnlyCheckedItemCollection<StringValue>> FunctionSetParameter {
     126      get { return (IValueParameter<ReadOnlyCheckedItemCollection<StringValue>>)Parameters[FunctionSetParameterName]; }
     127    }
     128    public IFixedValueParameter<IntValue> MaximumLengthParameter {
     129      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumLengthParameterName]; }
     130    }
     131    public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
     132      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
    113133    }
    114134    #endregion
     
    120140    }
    121141    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
    122     #endregion
     142
     143    public ReadOnlyCheckedItemCollection<StringValue> TargetVariables {
     144      get { return TargetVariablesParameter.Value; }
     145    }
     146
     147    public ReadOnlyCheckedItemCollection<StringValue> FunctionSet {
     148      get { return FunctionSetParameter.Value; }
     149    }
     150
     151    public int MaximumLength {
     152      get { return MaximumLengthParameter.Value.Value; }
     153    }
     154    public int MaximumParameterOptimizationIterations {
     155      get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
     156    }
     157    #endregion                                                                                     
    123158
    124159    public event EventHandler ProblemDataChanged;
     
    147182    public Problem()
    148183      : base() {
     184      var targetVariables = new CheckedItemCollection<StringValue>().AsReadOnly(); // HACK: it would be better to provide a new class derived from IDataAnalysisProblem
     185      var functions = CreateFunctionSet();
    149186      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data captured from the dynamical system", new RegressionProblemData()));
    150 
    151       // TODO: support multiple target variables
    152 
    153       var g = new SimpleSymbolicExpressionGrammar(); // empty grammar is replaced in UpdateGrammar()
    154       base.Encoding = new SymbolicExpressionTreeEncoding(g, 10, 5);         // small for testing
    155 
    156       UpdateGrammar();
     187      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(TargetVariablesParameterName, "Target variables (overrides setting in ProblemData)", targetVariables));
     188      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemCollection<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
     189      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression", new IntValue(20)));
     190      Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS)", new IntValue(100)));
     191
    157192      RegisterEventHandlers();
    158     }
    159 
    160 
    161     public override double Evaluate(ISymbolicExpressionTree tree, IRandom random) {
     193      InitAllParameters();
     194    }
     195
     196
     197    public override double Evaluate(Individual individual, IRandom random) {
     198      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
     199
    162200      var problemData = ProblemData;
    163201      var rows = ProblemData.TrainingIndices.ToArray();
    164       var target = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows);
     202      var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
     203      var targetValues = new double[rows.Length,targetVars.Length];
     204     
     205      // collect values of all target variables
     206      var colIdx = 0;
     207      foreach(var targetVar in targetVars) {
     208        int rowIdx = 0;
     209        foreach(var value in problemData.Dataset.GetDoubleValues(targetVar, rows)) {
     210          targetValues[rowIdx, colIdx] = value;
     211          rowIdx++;
     212        }
     213        colIdx++;
     214      }
    165215
    166216      var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>();
    167      
    168       foreach(var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
    169         nodeIdx.Add(node, nodeIdx.Count);
     217
     218      foreach (var tree in trees) {
     219        foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
     220          nodeIdx.Add(node, nodeIdx.Count);
     221        }
    170222      }
    171223
     
    177229        alglib.minlbfgsreport report;
    178230        alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state);
    179         alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, 100);
    180         alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, new object[] { tree, problemData, nodeIdx });
     231        alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations);
     232        alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows }); //TODO: create a type
    181233        alglib.minlbfgsresults(state, out optTheta, out report);
    182234
     
    213265      double[] grad = new double[optTheta.Length];
    214266      double optQuality = double.NaN;
    215       EvaluateObjectiveAndGradient(optTheta, ref optQuality, grad, new object[] { tree, problemData, nodeIdx});
     267      EvaluateObjectiveAndGradient(optTheta, ref optQuality, grad, new object[] { trees, targetVars, problemData, nodeIdx, targetValues, rows });
    216268      if (double.IsNaN(optQuality) || double.IsInfinity(optQuality)) return 10E6; // return a large value (TODO: be consistent by using NMSE)
    217       // TODO: write back values
     269
     270      individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
    218271      return optQuality;
    219272    }
    220273
    221274    private static void EvaluateObjectiveAndGradient(double[] x, ref double f, double[] grad, object obj) {
    222       var tree = (ISymbolicExpressionTree)((object[])obj)[0];
    223       var problemData = (IRegressionProblemData)((object[])obj)[1];
    224       var nodeIdx = (Dictionary<ISymbolicExpressionTreeNode, int>)((object[])obj)[2];
    225      
     275      var trees = (ISymbolicExpressionTree[])((object[])obj)[0];
     276      var targetVariables = (string[])((object[])obj)[1];
     277      var problemData = (IRegressionProblemData)((object[])obj)[2];
     278      var nodeIdx = (Dictionary<ISymbolicExpressionTreeNode, int>)((object[])obj)[3];
     279      var targetValues = (double[,])((object[])obj)[4];
     280      var rows = (int[])((object[])obj)[5];
    226281
    227282      var predicted = Integrate(
    228         new[] { tree },  // we assume tree contains an expression for the change of the target variable over time y'(t)
     283        trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
    229284        problemData.Dataset,
    230285        problemData.AllowedInputVariables.ToArray(),
    231         new[] { problemData.TargetVariable },
    232         problemData.TrainingIndices,
    233         nodeIdx,
     286        targetVariables,
     287        rows,
     288        nodeIdx,                // TODO: is it Ok to use rows here ?
    234289        x).ToArray();
    235290
    236       // objective function is MSE
     291
     292      // for normalized MSE = 1/variance(t) * MSE(t, pred)
     293      var invVar = Enumerable.Range(0, targetVariables.Length)
     294        .Select(c => rows.Select(row => targetValues[row, c])) // colums vectors
     295        .Select(vec => vec.Variance())
     296        .Select(v => 1.0 / v)
     297        .ToArray();
     298
     299      // objective function is NMSE
    237300      f = 0.0;
    238301      int n = predicted.Length;
    239302      double invN = 1.0 / n;
    240303      var g = Vector.Zero;
    241       foreach(var pair in predicted.Zip(problemData.TargetVariableTrainingValues, Tuple.Create)) {
    242         var y_pred = pair.Item1;
    243         var y = pair.Item2;
    244 
    245         var res = (y - y_pred.Item1);
    246         var ressq = res * res;
    247         f += ressq * invN;
    248         g += -2.0 * res * y_pred.Item2 * invN;
     304      int r = 0;
     305      foreach (var y_pred in predicted) {
     306        // TODO NMSE to put the same weight on each target regardless of the value range;
     307        for(int c = 0;c<y_pred.Length;c++) {
     308         
     309          var y_pred_f = y_pred[c].Item1;
     310          var y = targetValues[r,c];
     311
     312          var res = (y - y_pred_f);
     313          var ressq = res * res;
     314          f += ressq * invN * invVar[c];
     315          g += -2.0 * res * y_pred[c].Item2 * invN * invVar[c];
     316        }
     317        r++;
    249318      }
    250319
     
    252321    }
    253322
    254 
    255     private static IEnumerable<Tuple<double, Vector>> Integrate(
     323    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
     324      base.Analyze(individuals, qualities, results, random);
     325
     326      if (!results.ContainsKey("Prediction (training)")) {
     327        results.Add(new Result("Prediction (training)", typeof(ReadOnlyItemList<DataTable>)));
     328      }
     329      if (!results.ContainsKey("Prediction (test)")) {
     330        results.Add(new Result("Prediction (test)", typeof(ReadOnlyItemList<DataTable>)));
     331      }
     332      if (!results.ContainsKey("Models")) {
     333        results.Add(new Result("Models", typeof(ReadOnlyItemList<ISymbolicExpressionTree>)));
     334      }
     335
     336      // TODO extract common functionality from Evaluate and Analyze
     337      var bestIndividualAndQuality = this.GetBestIndividual(individuals, qualities);
     338      var optTheta = ((DoubleArray) bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate
     339      var trees = bestIndividualAndQuality.Item1.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
     340      var nodeIdx = new Dictionary<ISymbolicExpressionTreeNode, int>();
     341
     342
     343      foreach (var tree in trees) {
     344        foreach (var node in tree.Root.IterateNodesPrefix().Where(n => IsConstantNode(n))) {
     345          nodeIdx.Add(node, nodeIdx.Count);
     346        }
     347      }
     348      var problemData = ProblemData;
     349      var targetVars = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
     350
     351      var trainingList = new ItemList<DataTable>();
     352      var trainingRows = ProblemData.TrainingIndices.ToArray();
     353      var trainingPrediction = Integrate(
     354       trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
     355       problemData.Dataset,
     356       problemData.AllowedInputVariables.ToArray(),
     357       targetVars,
     358       trainingRows,
     359       nodeIdx,
     360       optTheta).ToArray();
     361
     362      for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
     363        var targetVar = targetVars[colIdx];
     364        var trainingDataTable = new DataTable(targetVar+ " prediction (training)");
     365        var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows));
     366        var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray());
     367        trainingDataTable.Rows.Add(actualValuesRow);
     368        trainingDataTable.Rows.Add(predictedValuesRow);
     369        trainingList.Add(trainingDataTable);
     370      }
     371
     372      // TODO: DRY for training and test
     373      var testList = new ItemList<DataTable>();
     374      var testRows = ProblemData.TestIndices.ToArray();
     375      var testPrediction = Integrate(
     376       trees,  // we assume trees contain expressions for the change of each target variable over time y'(t)
     377       problemData.Dataset,
     378       problemData.AllowedInputVariables.ToArray(),
     379       targetVars,
     380       testRows,
     381       nodeIdx,
     382       optTheta).ToArray();
     383
     384      for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) {
     385        var targetVar = targetVars[colIdx];
     386        var testDataTable = new DataTable(targetVar + " prediction (test)");
     387        var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows));
     388        var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray());
     389        testDataTable.Rows.Add(actualValuesRow);
     390        testDataTable.Rows.Add(predictedValuesRow);
     391        testList.Add(testDataTable);
     392      }
     393
     394      results["Prediction (training)"].Value = trainingList.AsReadOnly();
     395      results["Prediction (test)"].Value = testList.AsReadOnly();
     396      results["Models"].Value = new ItemList<ISymbolicExpressionTree>(trees).AsReadOnly();
     397    }
     398
     399
     400    #region interpretation
     401    private static IEnumerable<Tuple<double, Vector>[]> Integrate(
    256402      ISymbolicExpressionTree[] trees, IDataset dataset, string[] inputVariables, string[] targetVariables, IEnumerable<int> rows,
    257403      Dictionary<ISymbolicExpressionTreeNode, int> nodeIdx, double[] parameterValues) {
     
    261407
    262408      // return first value as stored in the dataset
    263       yield return Tuple.Create(dataset.GetDoubleValue(targetVariables.First(), rows.First()), Vector.Zero);
     409
     410      yield return targetVariables
     411        .Select(targetVar => Tuple.Create(dataset.GetDoubleValue(targetVar, rows.First()), Vector.Zero))
     412        .ToArray();
    264413
    265414      // integrate forward starting with known values for the target in t0
     
    295444        }
    296445
    297         // yield target values
    298         foreach (var varName in targetVariables) {
    299           yield return variableValues[varName];
    300         }
     446        yield return targetVariables
     447          .Select(targetVar => variableValues[targetVar])
     448          .ToArray();
    301449
    302450        // update for next time step
     
    322470          }
    323471        case "*": {
    324             var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx,parameterValues);
     472            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
    325473            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
    326474
     
    329477
    330478        case "-": {
    331             var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx,parameterValues);
     479            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
    332480            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
    333481
     
    335483          }
    336484        case "%": {
    337             var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx,parameterValues);
     485            var l = InterpretRec(node.GetSubtree(0), variableValues, nodeIdx, parameterValues);
    338486            var r = InterpretRec(node.GetSubtree(1), variableValues, nodeIdx, parameterValues);
    339487
     
    363511      }
    364512    }
    365 
     513    #endregion
    366514
    367515    #region events
     516    /*
     517     * Dependencies between parameters:
     518     *
     519     * ProblemData
     520     *    |
     521     *    V
     522     * TargetVariables   FunctionSet    MaximumLength
     523     *               |   |                 |
     524     *               V   V                 |
     525     *             Grammar <---------------+
     526     *                |
     527     *                V
     528     *            Encoding
     529     */
    368530    private void RegisterEventHandlers() {
    369       ProblemDataParameter.ValueChanged += new EventHandler(ProblemDataParameter_ValueChanged);
    370       if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += new EventHandler(ProblemData_Changed);
     531      ProblemDataParameter.ValueChanged += ProblemDataParameter_ValueChanged;
     532      if (ProblemDataParameter.Value != null) ProblemDataParameter.Value.Changed += ProblemData_Changed;
     533
     534      TargetVariablesParameter.ValueChanged += TargetVariablesParameter_ValueChanged;
     535      if (TargetVariablesParameter.Value != null) TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
     536
     537      FunctionSetParameter.ValueChanged += FunctionSetParameter_ValueChanged;
     538      if (FunctionSetParameter.Value != null) FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
     539
     540      MaximumLengthParameter.Value.ValueChanged += MaximumLengthChanged;
     541    }
     542
     543    private void MaximumLengthChanged(object sender, EventArgs e) {
     544      UpdateGrammarAndEncoding();
     545    }
     546
     547    private void FunctionSetParameter_ValueChanged(object sender, EventArgs e) {
     548      FunctionSetParameter.Value.CheckedItemsChanged += CheckedFunctionsChanged;
     549    }
     550
     551    private void CheckedFunctionsChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) {
     552      UpdateGrammarAndEncoding();
     553    }
     554
     555    private void TargetVariablesParameter_ValueChanged(object sender, EventArgs e) {
     556      TargetVariablesParameter.Value.CheckedItemsChanged += CheckedTargetVariablesChanged;
     557    }
     558
     559    private void CheckedTargetVariablesChanged(object sender, CollectionItemsChangedEventArgs<StringValue> e) {
     560      UpdateGrammarAndEncoding();
    371561    }
    372562
    373563    private void ProblemDataParameter_ValueChanged(object sender, EventArgs e) {
    374       ProblemDataParameter.Value.Changed += new EventHandler(ProblemData_Changed);
     564      ProblemDataParameter.Value.Changed += ProblemData_Changed;
    375565      OnProblemDataChanged();
    376566      OnReset();
     
    378568
    379569    private void ProblemData_Changed(object sender, EventArgs e) {
     570      OnProblemDataChanged();
    380571      OnReset();
    381572    }
    382573
    383574    private void OnProblemDataChanged() {
    384       UpdateGrammar();
     575      UpdateTargetVariables();        // implicitly updates other dependent parameters
    385576
    386577      var handler = ProblemDataChanged;
     
    388579    }
    389580
    390     private void UpdateGrammar() {
     581    #endregion
     582
     583    #region  helper
     584
     585    private void InitAllParameters() {
     586      UpdateTargetVariables(); // implicitly updates the grammar and the encoding     
     587    }
     588
     589    private ReadOnlyCheckedItemCollection<StringValue> CreateFunctionSet() {
     590      var l = new CheckedItemCollection<StringValue>();
     591      l.Add(new StringValue("+").AsReadOnly());
     592      l.Add(new StringValue("*").AsReadOnly());
     593      l.Add(new StringValue("%").AsReadOnly());
     594      l.Add(new StringValue("-").AsReadOnly());
     595      return l.AsReadOnly();
     596    }
     597
     598    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
     599      return n.Symbol.Name.StartsWith("θ");
     600    }
     601
     602
     603    private void UpdateTargetVariables() {
     604      var currentlySelectedVariables = TargetVariables.CheckedItems.Select(i => i.Value).ToArray();
     605
     606      var newVariablesList = new CheckedItemCollection<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly();
     607      var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray();
     608      foreach (var matchingItem in matchingItems) {
     609        newVariablesList.SetItemCheckedState(matchingItem, true);
     610      }
     611      TargetVariablesParameter.Value = newVariablesList;
     612    }
     613
     614    private void UpdateGrammarAndEncoding() {
     615      var encoding = new MultiEncoding();
     616      var g = CreateGrammar();
     617      foreach (var targetVar in TargetVariables.CheckedItems) {
     618        encoding = encoding.Add(new SymbolicExpressionTreeEncoding(targetVar+"_tree",g, MaximumLength, MaximumLength)); // only limit by length
     619      }
     620      Encoding = encoding;
     621    }
     622
     623    private ISymbolicExpressionGrammar CreateGrammar() {
    391624      // whenever ProblemData is changed we create a new grammar with the necessary symbols
    392625      var g = new SimpleSymbolicExpressionGrammar();
    393       g.AddSymbols(new[] {
    394         "+",
    395         "*",
    396 //        "%", // % is protected division 1/0 := 0 // removed for testing
    397         "-",
    398       }, 2, 2);
     626      g.AddSymbols(FunctionSet.CheckedItems.Select(i => i.Value).ToArray(), 2, 2);
    399627
    400628      // TODO
     
    405633      //}, 1, 1);
    406634
    407       foreach (var variableName in ProblemData.AllowedInputVariables)
    408         g.AddTerminalSymbol(variableName);
    409       foreach (var variableName in new string[] { ProblemData.TargetVariable }) // TODO: multiple target variables
     635      foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value)))
    410636        g.AddTerminalSymbol(variableName);
    411637
     
    413639      // we generate multiple symbols to balance the probability for selecting a numeric parameter in the generation of random trees
    414640      var numericConstantsFactor = 2.0;
    415       for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + 1); i++) {
     641      for (int i = 0; i < numericConstantsFactor * (ProblemData.AllowedInputVariables.Count() + TargetVariables.CheckedItems.Count()); i++) {
    416642        g.AddTerminalSymbol("θ" + i); // numeric parameter for which the value is optimized using AutoDiff
    417643      }
    418       Encoding.Grammar = g;
    419     }
     644      return g;
     645    }
     646
    420647    #endregion
    421648
     
    432659    #endregion
    433660
    434 
    435     #region  helper
    436 
    437     private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
    438       return n.Symbol.Name.StartsWith("θ");
    439     }
    440 
    441     #endregion
    442 
    443661  }
    444662}
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Properties

    • Property svn:ignore set to
      AssemblyInfo.cs
Note: See TracChangeset for help on using the changeset viewer.