Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
12/19/18 07:43:36 (6 years ago)
Author:
gkronber
Message:

#2925: solution class and solution view

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs

    r16398 r16399  
    198198    public override double Evaluate(Individual individual, IRandom random) {
    199199      var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual
    200       // write back optimized parameters to tree nodes instead of the separate OptTheta variable
    201       // retreive optimized parameters from nodes?
    202 
     200                                                                                                      // write back optimized parameters to tree nodes instead of the separate OptTheta variable
     201                                                                                                      // retreive optimized parameters from nodes?
     202
     203      var problemData = ProblemData;
     204      var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
     205      var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
    203206      if (OptimizeParametersForEpisodes) {
    204207        int eIdx = 0;
     
    208211          double[] optTheta;
    209212          double nmse;
    210           OptimizeForEpisodes(trees, random, new[] { episode }, out optTheta, out nmse);
     213          OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
    211214          individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
    212215          eIdx++;
     
    218221        double[] optTheta;
    219222        double nmse;
    220         OptimizeForEpisodes(trees, random, TrainingEpisodes, out optTheta, out nmse);
     223        OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse);
    221224        individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
    222225        return nmse;
     
    224227    }
    225228
    226     private void OptimizeForEpisodes(
     229    public static void OptimizeForEpisodes(
    227230      ISymbolicExpressionTree[] trees,
     231      IRegressionProblemData problemData,
     232      string[] targetVars,
     233      string[] latentVariables,
    228234      IRandom random,
    229235      IEnumerable<IntRange> episodes,
     236      int maxParameterOptIterations,
     237      int numericIntegrationSteps,
     238      string odeSolver,
    230239      out double[] optTheta,
    231240      out double nmse) {
    232241      var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray();
    233       var problemData = ProblemData;
    234       var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();
    235       var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees
    236242      var targetValues = new double[rows.Length, targetVars.Length];
    237243
     
    267273        alglib.minlbfgsreport report;
    268274        alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state);
    269         alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations);
     275        alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations);
    270276        //alglib.minlbfgssetgradientcheck(state, 1e-6);
    271277        alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null,
    272           new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver }); //TODO: create a type
     278          new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); //TODO: create a type
    273279
    274280        alglib.minlbfgsresults(state, out optTheta, out report);
     
    307313      nmse = double.NaN;
    308314      EvaluateObjectiveAndGradient(optTheta, ref nmse, grad,
    309         new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver });
     315        new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver });
    310316      if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10E6; return; } // return a large value (TODO: be consistent by using NMSE)
    311317    }
     
    385391        results.Add(new Result("Models", typeof(VariableCollection)));
    386392      }
    387       if(!results.ContainsKey("SNMSE")) {
     393      if (!results.ContainsKey("SNMSE")) {
    388394        results.Add(new Result("SNMSE", typeof(DoubleValue)));
     395      }
     396      if (!results.ContainsKey("Solution")) {
     397        results.Add(new Result("Solution", typeof(Solution)));
    389398      }
    390399
     
    517526        results["Prediction (training)"].Value = trainingList.AsReadOnly();
    518527        results["Prediction (test)"].Value = testList.AsReadOnly();
     528
     529
    519530        #region simplification of models
    520531        // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal
    521532        var models = new VariableCollection();    // to store target var names and original version of tree
    522533
     534        var optimizedTrees = new List<ISymbolicExpressionTree>();
    523535        int nextParIdx = 0;
     536        for (int idx = 0; idx < trees.Length; idx++) {
     537          var tree = trees[idx];
     538          optimizedTrees.Add(new SymbolicExpressionTree(FixParameters(tree.Root, optTheta.ToArray(), ref nextParIdx)));
     539        }
     540        var ds = problemData.Dataset;
     541        var newVarNames = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
     542        var allVarNames = ds.DoubleVariables.Concat(newVarNames);
     543        var newVarValues = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray();
     544        var allVarValues = ds.DoubleVariables.Select(varName => ds.GetDoubleValues(varName).ToList())
     545          .Concat(Enumerable.Range(0, nextParIdx).Select(i => Enumerable.Repeat(optTheta[i], ds.Rows).ToList()))
     546          .ToList();
     547        var newDs = new Dataset(allVarNames, allVarValues);
     548        var newProblemData = new RegressionProblemData(newDs, problemData.AllowedInputVariables.Concat(newVarValues).ToArray(), problemData.TargetVariable);
     549        results["Solution"].Value = new Solution(optimizedTrees.ToArray(),
     550                   // optTheta,
     551                   newProblemData,
     552                   targetVars,
     553                   latentVariables,
     554                   TrainingEpisodes,
     555                   OdeSolver,
     556                   NumericIntegrationSteps);
     557
     558
     559        nextParIdx = 0;
    524560        for (int idx = 0; idx < trees.Length; idx++) {
    525561          var varName = string.Empty;
     
    558594
    559595        }
     596
    560597        results["Models"].Value = models;
    561598        #endregion
     
    576613    // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes
    577614
    578     private static IEnumerable<Tuple<double, Vector>[]> Integrate(
     615    public static IEnumerable<Tuple<double, Vector>[]> Integrate(
    579616      ISymbolicExpressionTree[] trees, IDataset dataset,
    580617      string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes,
     
    9991036
    10001037
    1001         foreach(var node in nodeValues.Keys.ToArray()) {
    1002           if(node.SubtreeCount == 0 && !IsConstantNode(node)) {
     1038        foreach (var node in nodeValues.Keys.ToArray()) {
     1039          if (node.SubtreeCount == 0 && !IsConstantNode(node)) {
    10031040            // update values for (latent) variables
    10041041            var varName = node.Symbol.Name;
     
    11681205
    11691206    private static bool IsConstantNode(ISymbolicExpressionTreeNode n) {
    1170       return n.Symbol.Name.StartsWith("θ");
     1207      return n.Symbol.Name[0] == 'θ';
    11711208    }
    11721209    private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) {
    1173       return n.Symbol.Name.StartsWith("λ");
     1210      return n.Symbol.Name[0] == 'λ';
    11741211    }
    11751212    private static bool IsVariableNode(ISymbolicExpressionTreeNode n) {
     
    12321269
    12331270      return g;
     1271    }
     1272
     1273
     1274
     1275
     1276
     1277    private ISymbolicExpressionTreeNode FixParameters(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) {
     1278      ISymbolicExpressionTreeNode translatedNode = null;
     1279      if (n.Symbol is StartSymbol) {
     1280        translatedNode = new StartSymbol().CreateTreeNode();
     1281      } else if (n.Symbol is ProgramRootSymbol) {
     1282        translatedNode = new ProgramRootSymbol().CreateTreeNode();
     1283      } else if (n.Symbol.Name == "+") {
     1284        translatedNode = new SimpleSymbol("+", 2).CreateTreeNode();
     1285      } else if (n.Symbol.Name == "-") {
     1286        translatedNode = new SimpleSymbol("-", 2).CreateTreeNode();
     1287      } else if (n.Symbol.Name == "*") {
     1288        translatedNode = new SimpleSymbol("*", 2).CreateTreeNode();
     1289      } else if (n.Symbol.Name == "%") {
     1290        translatedNode = new SimpleSymbol("%", 2).CreateTreeNode();
     1291      } else if (n.Symbol.Name == "sin") {
     1292        translatedNode = new SimpleSymbol("sin", 1).CreateTreeNode();
     1293      } else if (n.Symbol.Name == "cos") {
     1294        translatedNode = new SimpleSymbol("cos", 1).CreateTreeNode();
     1295      } else if (n.Symbol.Name == "sqr") {
     1296        translatedNode = new SimpleSymbol("sqr", 1).CreateTreeNode();
     1297      } else if (IsConstantNode(n)) {
     1298        translatedNode = new SimpleSymbol("c_" + nextParIdx, 0).CreateTreeNode();
     1299        nextParIdx++;
     1300      } else {
     1301        translatedNode = new SimpleSymbol(n.Symbol.Name, n.SubtreeCount).CreateTreeNode();
     1302      }
     1303      foreach (var child in n.Subtrees) {
     1304        translatedNode.AddSubtree(FixParameters(child, parameterValues, ref nextParIdx));
     1305      }
     1306      return translatedNode;
    12341307    }
    12351308
Note: See TracChangeset for help on using the changeset viewer.