Changeset 16893 for branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling
- Timestamp:
- 05/04/19 08:41:14 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Property svn:ignore
-
old new 1 1 Plugin.cs 2 sundials-x64
-
- Property svn:ignore
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16786 r16893 51 51 private const string FunctionSetParameterName = "Function set"; 52 52 private const string MaximumLengthParameterName = "Size limit"; 53 private const string MaximumParameterOptimizationIterationsParameterName = "Max. parameter optimization iterations"; 53 private const string MaximumPretuningParameterOptimizationIterationsParameterName = "Max. pre-tuning parameter optimization iterations"; 54 private const string MaximumOdeParameterOptimizationIterationsParameterName = "Max. ODE parameter optimization iterations"; 54 55 private const string NumberOfLatentVariablesParameterName = "Number of latent variables"; 55 56 private const string NumericIntegrationStepsParameterName = "Steps for numeric integration"; … … 75 76 } 76 77 77 public IFixedValueParameter<IntValue> MaximumParameterOptimizationIterationsParameter { 78 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumParameterOptimizationIterationsParameterName]; } 78 public IFixedValueParameter<IntValue> MaximumPretuningParameterOptimizationIterationsParameter { 79 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumPretuningParameterOptimizationIterationsParameterName]; } 80 } 81 public IFixedValueParameter<IntValue> MaximumOdeParameterOptimizationIterationsParameter { 82 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumOdeParameterOptimizationIterationsParameterName]; } 79 83 } 80 84 public IFixedValueParameter<IntValue> NumberOfLatentVariablesParameter { … … 92 96 public IConstrainedValueParameter<StringValue> OdeSolverParameter { 93 97 get { return (IConstrainedValueParameter<StringValue>)Parameters[OdeSolverParameterName]; } 98 } 99 public IFixedValueParameter<DoubleValue> PretuningErrorWeight { 100 get { return (IFixedValueParameter<DoubleValue>)Parameters["Pretuning NMSE weight"]; } 101 } 102 public IFixedValueParameter<DoubleValue> OdeErrorWeight { 103 get { return (IFixedValueParameter<DoubleValue>)Parameters["ODE NMSE weight"]; } 94 104 } 95 105 #endregion … … 113 123 get { return MaximumLengthParameter.Value.Value; } 114 124 } 115 public int MaximumParameterOptimizationIterations { 116 get { return MaximumParameterOptimizationIterationsParameter.Value.Value; } 125 public int MaximumPretuningParameterOptimizationIterations { 126 get { return MaximumPretuningParameterOptimizationIterationsParameter.Value.Value; } 127 } 128 public int MaximumOdeParameterOptimizationIterations { 129 get { return MaximumOdeParameterOptimizationIterationsParameter.Value.Value; } 117 130 } 118 131 public int NumberOfLatentVariables { … … 155 168 Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false))); 156 169 } 170 int iters = 100; 171 if (Parameters.ContainsKey("Max. parameter optimization iterations")) { 172 iters = ((IFixedValueParameter<IntValue>)Parameters["Max. parameter optimization iterations"]).Value.Value; 173 } 174 if (!Parameters.ContainsKey(MaximumPretuningParameterOptimizationIterationsParameterName)) { 175 Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters))); 176 } 177 if (!Parameters.ContainsKey(MaximumOdeParameterOptimizationIterationsParameterName)) { 178 Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(iters))); 179 } 180 181 if (!Parameters.ContainsKey("Pretuning NMSE weight")) 182 Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5))); 183 if (!Parameters.ContainsKey("ODE NMSE weight")) 184 Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5))); 185 186 157 187 RegisterEventHandlers(); 158 188 } … … 174 204 Parameters.Add(new ValueParameter<ReadOnlyCheckedItemList<StringValue>>(FunctionSetParameterName, "The list of allowed functions", functions)); 175 205 Parameters.Add(new FixedValueParameter<IntValue>(MaximumLengthParameterName, "The maximally allowed length of each expression. Set to a small value (5 - 25). Default = 10", new IntValue(10))); 176 Parameters.Add(new FixedValueParameter<IntValue>(MaximumParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100))); 206 Parameters.Add(new FixedValueParameter<IntValue>(MaximumPretuningParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of parameters of individual equations for numerical derivatives (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100))); 207 Parameters.Add(new FixedValueParameter<IntValue>(MaximumOdeParameterOptimizationIterationsParameterName, "The maximum number of iterations for optimization of the full ODE parameters (using L-BFGS). More iterations makes the algorithm slower, fewer iterations might prevent convergence in the optimization scheme. Default = 100", new IntValue(100))); 177 208 Parameters.Add(new FixedValueParameter<IntValue>(NumberOfLatentVariablesParameterName, "Latent variables (unobserved variables) allow us to produce expressions which are integrated up and can be used in other expressions. They are handled similarly to target variables in forward simulation / integration. The difference to target variables is that there are no data to which the calculated values of latent variables are compared. Set to a small value (0 .. 5) as necessary (default = 0)", new IntValue(0))); 178 209 Parameters.Add(new FixedValueParameter<IntValue>(NumericIntegrationStepsParameterName, "Number of steps in the numeric integration that are taken from one row to the next (set to 1 to 100). More steps makes the algorithm slower, less steps worsens the accuracy of the numeric integration scheme.", new IntValue(10))); 179 210 Parameters.Add(new ValueParameter<ItemList<IntRange>>(TrainingEpisodesParameterName, "A list of ranges that should be used for training, each range represents an independent episode. This overrides the TrainingSet parameter in ProblemData.", new ItemList<IntRange>())); 180 211 Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeParametersForEpisodesParameterName, "Flag to select if parameters should be optimized globally or for each episode individually.", new BoolValue(false))); 212 Parameters.Add(new FixedValueParameter<DoubleValue>("Pretuning NMSE weight", "For fitness weighting", new DoubleValue(0.5))); 213 Parameters.Add(new FixedValueParameter<DoubleValue>("ODE NMSE weight", "For fitness weighting", new DoubleValue(0.5))); 181 214 182 215 var solversStr = new string[] { "HeuristicLab" /* , "CVODES" */}; … … 207 240 foreach (var episode in TrainingEpisodes) { 208 241 // double[] optTheta; 209 double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumP arameterOptimizationIterations, NumericIntegrationSteps, OdeSolver);242 double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations); 210 243 // individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 211 244 eIdx++; … … 216 249 } else { 217 250 // double[] optTheta; 218 double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver); 251 double nmse = OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumPretuningParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, MaximumOdeParameterOptimizationIterations, 252 PretuningErrorWeight.Value.Value, OdeErrorWeight.Value.Value); 219 253 // individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 220 254 return nmse; … … 229 263 IRandom random, 230 264 IEnumerable<IntRange> episodes, 231 int maxP arameterOptIterations,265 int maxPretuningParameterOptIterations, 232 266 int numericIntegrationSteps, 233 string odeSolver) { 267 string odeSolver, 268 int maxOdeParameterOptIterations, 269 double pretuningErrorWeight = 0.5, 270 double odeErrorWeight = 0.5 271 ) { 272 273 234 274 235 275 // extract constants from trees (without trees for latent variables) … … 240 280 241 281 // optimize parameters by fitting f(x,y) to calculated differences dy/dt(t) 242 double nmse = PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxParameterOptIterations,282 double nmse = pretuningErrorWeight * PreTuneParameters(trees, problemData, targetVars, latentVariables, random, episodes, maxPretuningParameterOptIterations, 243 283 initialTheta, out double[] pretunedParameters); 244 284 … … 250 290 251 291 // optimize parameters using integration of f(x,y) to calculate y(t) 252 nmse = OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver,292 nmse += odeErrorWeight * OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxOdeParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver, 253 293 out double[] optTheta); 254 294 // var optTheta = pretunedParameters; … … 285 325 var latentTrees = trees.Take(latentVariables.Length).ToArray(); 286 326 287 {288 // first calculate values of latent variables by integration327 // first calculate values of latent variables by integration 328 if(latentVariables.Length > 0) { 289 329 var inputVariables = targetVars.Concat(latentTrees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName))).Except(latentVariables).Distinct(); 290 330 var myState = new OptimizationData(latentTrees, targetVars, inputVariables.ToArray(), problemData, null, episodes.ToArray(), 10, latentVariables, "HeuristicLab"); … … 334 374 var paramCount = myState.nodeValueLookup.ParameterCount; 335 375 336 optTheta = new double[0];337 if (initialTheta[treeIdx].Length > 0 ) {376 optTheta = initialTheta[treeIdx]; 377 if (initialTheta[treeIdx].Length > 0 && maxParameterOptIterations > -1) { 338 378 try { 339 379 alglib.minlmstate state; … … 398 438 optTheta = initialTheta; 399 439 400 if (initialTheta.Length > 0 ) {440 if (initialTheta.Length > 0 && maxParameterOptIterations > -1) { 401 441 var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray(); 402 442 var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray(); -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Solution.cs
r16663 r16893 6 6 using HeuristicLab.Data; 7 7 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; 8 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;9 8 using HeuristicLab.Problems.DataAnalysis; 10 9 using HeuristicLab.Problems.DataAnalysis.Symbolic; 11 using HeuristicLab.Random;12 10 using HEAL.Attic; 13 11 … … 20 18 get { return trees; } 21 19 } 22 // [Storable]23 // private double[] theta;24 20 25 21 [Storable] … … 59 55 this.trees = new ISymbolicExpressionTree[original.trees.Length]; 60 56 for (int i = 0; i < trees.Length; i++) this.trees[i] = cloner.Clone(original.trees[i]); 61 // this.theta = new double[original.theta.Length];62 // Array.Copy(original.theta, this.theta, this.theta.Length);63 57 this.problemData = cloner.Clone(original.problemData); 64 58 this.targetVars = original.TargetVariables.ToArray(); … … 89 83 public IEnumerable<double[]> Predict(IntRange episode, int forecastHorizon) { 90 84 var forecastEpisode = new IntRange(episode.Start, episode.End + forecastHorizon); 91 //92 // var random = new FastRandom(12345);93 // snmse = Problem.OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { forecastEpisode }, 100, numericIntegrationSteps, odeSolver);94 85 95 86 var inputVariables = trees.SelectMany(t => t.IterateNodesPrefix().OfType<VariableTreeNode>().Select(n => n.VariableName)) … … 99 90 100 91 var optimizationData = new Problem.OptimizationData(trees, targetVars, inputVariables.ToArray(), problemData, null, new[] { forecastEpisode }, numericIntegrationSteps, latentVariables, odeSolver); 101 //102 //103 // var theta = Problem.ExtractParametersFromTrees(trees);104 105 106 92 var fi = new double[forecastEpisode.Size * targetVars.Length]; 107 93 var jac = new double[forecastEpisode.Size * targetVars.Length, optimizationData.nodeValueLookup.ParameterCount]; -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/sundials
-
Property
svn:ignore
set to
cvodes-3.2.0
-
Property
svn:ignore
set to
Note: See TracChangeset
for help on using the changeset viewer.