Free cookie consent management tool by TermsFeed Policy Generator

Changeset 16893


Ignore:
Timestamp:
05/04/19 08:41:14 (6 years ago)
Author:
gkronber
Message:

#2925: allow separate configuration of const opt steps for pre-tuning and the full ODE. Allow weighted combination of fitness using pretuning NMSE and the full ODE NMSE

Location:
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3

    • Property svn:ignore
      •  

        old new  
        11Plugin.cs
         2sundials-x64
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs

    r16786 r16893  
    5151    private const string FunctionSetParameterName = "Function set";
    5252    private const string MaximumLengthParameterName = "Size limit";
    53     private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations";
     53    private const string MaximumPretuningParameterOptimizationIterationsParameterName = "Max. pre-tuning parameter optimization iterations";
     54    private const string MaximumOdeParameterOptimizationIterationsParameterName = "Max. ODE parameter optimization iterations";
    5455    private const string NumberOfLatentVariablesParameterName = "Number of latent variables";
    5556    private const string NumericIntegrationStepsParameterName = "Steps for numeric integration";
     
    7576    }
    7677
    77     public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter {
    78       get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; }
     78    public IFixedValueParameter<IntValue> MaximumPretuningParameterOptimizationIterationsParameter {
     79      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumPretuningParameterOptimizationIterationsParameterName]; }
     80    }
     81    public IFixedValueParameter<IntValue> MaximumOdeParameterOptimizationIterationsParameter {
     82      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumOdeParameterOptimizationIterationsParameterName]; }
    7983    }
    8084    public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter {
     
    9296    public IConstrainedValueParameter<StringValue> OdeSolverParameter {
    9397      get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; }
     98    }
     99    public IFixedValueParameter<DoubleValue> PretuningErrorWeight {
     100      get { return (IFixedValueParameter<DoubleValue>)Parameters["Pretuning NMSE weight"]; }
     101    }
     102    public IFixedValueParameter<DoubleValue> OdeErrorWeight {
     103      get { return (IFixedValueParameter<DoubleValue>)Parameters["ODE NMSE weight"]; }
    94104    }
    95105    #endregion
     
    113123      get { return MaximumLengthParameter.Value.Value; }
    114124    }
    115     public int MaximumParameterOptimizationIterations {
    116       get { return MaximumParameterOptimizationIterationsParameter.Value.Value; }
     125    public int MaximumPretuningParameterOptimizationIterations {
     126      get { return MaximumPretuningParameterOptimizationIterationsParameter.Value.Value; }
     127    }
     128    public int MaximumOdeParameterOptimizationIterations {
     129      get { return MaximumOdeParameterOptimizationIterationsParameter.Value.Value; }
    117130    }
    118131    public int NumberOfLatentVariables {
     
    155168        Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
    156169      }
     170      int iters = 100;
     171      if (Parameters.ContainsKey("Max. parameter optimization iterations")) {
     172        iters = ((IFixedValueParameter<IntValue>)Parameters["Max. parameter optimization iterations"]).Value.Value;
     173      }
     174      if (!Parameters.ContainsKey(MaximumPretuningParameterOptimizationIterationsParameterName)) {
     175        Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters)));
     176      }
     177      if (!Parameters.ContainsKey(MaximumOdeParameterOptimizationIterationsParameterName)) {
     178        Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters)));
     179      }
     180
     181      if (!Parameters.ContainsKey("Pretuning NMSE weight"))
     182        Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
     183      if (!Parameters.ContainsKey("ODE NMSE weight"))
     184        Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
     185
     186
    157187      RegisterEventHandlers();
    158188    }
     
    174204      Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions));
    175205      Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10)));
    176       Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
     206      Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
     207      Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100)));
    177208      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0)));
    178209      Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10)));
    179210      Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>()));
    180211      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false)));
     212      Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
     213      Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5)));
    181214
    182215      var solversStr = new string[] { "HeuristicLab" /* , "CVODES" */};
     
    207240        foreach (var episode in TrainingEpisodes) {
    208241          // double[] optTheta;
    209           double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
     242          double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations);
    210243          // individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
    211244          eIdx++;
     
    216249      } else {
    217250        // double[] optTheta;
    218         double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);
     251        double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations,
     252          PretuningErrorWeight.Value.Value, OdeErrorWeight.Value.Value);
    219253        // individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method
    220254        return nmse;
     
    229263      IRandom random,
    230264      IEnumerable<IntRange> episodes,
    231       int maxParameterOptIterations,
     265      int maxPretuningParameterOptIterations,
    232266      int numericIntegrationSteps,
    233       string odeSolver) {
     267      string odeSolver,
     268      int maxOdeParameterOptIterations,
     269      double pretuningErrorWeight = 0.5,
     270      double odeErrorWeight = 0.5
     271      ) {
     272
     273
    234274
    235275      // extract constants from trees (without trees for latent variables)
     
    240280
    241281      // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t)
    242       double nmse = PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxParameterOptIterations,
     282      double nmse = pretuningErrorWeight * PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxPretuningParameterOptIterations,
    243283        initialTheta, out double[] pretunedParameters);
    244284
     
    250290
    251291      // optimize parameters using integration of f(x,y) to calculate y(t)
    252       nmse = OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,
     292      nmse += odeErrorWeight * OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxOdeParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,
    253293        out double[] optTheta);
    254294      // var optTheta = pretunedParameters;
     
    285325      var latentTrees = trees.Take(latentVariables.Length).ToArray();
    286326
    287       {
    288         // first calculate values of latent variables by integration
     327      // first calculate values of latent variables by integration
     328      if(latentVariables.Length > 0) {
    289329        var inputVariables = targetVars.Concat(latentTrees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))).Except(latentVariables).Distinct();
    290330        var myState = new OptimizationData(latentTrees, targetVars, inputVariables.ToArray(), problemData, null, episodes.ToArray(), 10, latentVariables, "HeuristicLab");
     
    334374        var paramCount = myState.nodeValueLookup.ParameterCount;
    335375
    336         optTheta = new double[0];
    337         if (initialTheta[treeIdx].Length > 0) {
     376        optTheta = initialTheta[treeIdx];
     377        if (initialTheta[treeIdx].Length > 0 && maxParameterOptIterations > -1) {
    338378          try {
    339379            alglib.minlmstate state;
     
    398438      optTheta = initialTheta;
    399439
    400       if (initialTheta.Length > 0) {
     440      if (initialTheta.Length > 0 && maxParameterOptIterations > -1) {
    401441        var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray();
    402442        var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray();
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Solution.cs

    r16663 r16893  
    66using HeuristicLab.Data;
    77using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    8 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    98using HeuristicLab.Problems.DataAnalysis;
    109using HeuristicLab.Problems.DataAnalysis.Symbolic;
    11 using HeuristicLab.Random;
    1210using HEAL.Attic;
    1311
     
    2018      get { return trees; }
    2119    }
    22     // [Storable]
    23     // private double[] theta;
    2420
    2521    [Storable]
     
    5955      this.trees = new ISymbolicExpressionTree[original.trees.Length];
    6056      for (int i = 0; i < trees.Length; i++) this.trees[i] = cloner.Clone(original.trees[i]);
    61       // this.theta = new double[original.theta.Length];
    62       // Array.Copy(original.theta, this.theta, this.theta.Length);
    6357      this.problemData = cloner.Clone(original.problemData);
    6458      this.targetVars = original.TargetVariables.ToArray();
     
    8983    public IEnumerable<double[]> Predict(IntRange episode, int forecastHorizon) {
    9084      var forecastEpisode = new IntRange(episode.Start, episode.End + forecastHorizon);
    91       //
    92       // var random = new FastRandom(12345);
    93       // snmse = Problem.OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { forecastEpisode }, 100, numericIntegrationSteps, odeSolver);
    9485
    9586      var inputVariables = trees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))
     
    9990
    10091      var optimizationData = new Problem.OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, new[] { forecastEpisode }, numericIntegrationSteps, latentVariables, odeSolver);
    101       //
    102       //
    103       // var theta = Problem.ExtractParametersFromTrees(trees);
    104 
    105 
    10692      var fi = new double[forecastEpisode.Size * targetVars.Length];
    10793      var jac = new double[forecastEpisode.Size * targetVars.Length, optimizationData.nodeValueLookup.ParameterCount];
  • branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/sundials

    • Property svn:ignore set to
      cvodes-3.2.0
Note: See TracChangeset for help on using the changeset viewer.