Changeset 16399 for branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
- Timestamp:
- 12/19/18 07:43:36 (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16398 r16399 198 198 public override double Evaluate(Individual individual, IRandom random) { 199 199 var trees = individual.Values.Select(v => v.Value).OfType<ISymbolicExpressionTree>().ToArray(); // extract all trees from individual 200 // write back optimized parameters to tree nodes instead of the separate OptTheta variable 201 // retreive optimized parameters from nodes? 202 200 // write back optimized parameters to tree nodes instead of the separate OptTheta variable 201 // retreive optimized parameters from nodes? 202 203 var problemData = ProblemData; 204 var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray(); 205 var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees 203 206 if (OptimizeParametersForEpisodes) { 204 207 int eIdx = 0; … … 208 211 double[] optTheta; 209 212 double nmse; 210 OptimizeForEpisodes(trees, random, new[] { episode }, out optTheta, out nmse);213 OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { episode }, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse); 211 214 individual["OptTheta_" + eIdx] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 212 215 eIdx++; … … 218 221 double[] optTheta; 219 222 double nmse; 220 OptimizeForEpisodes(trees, random, TrainingEpisodes, out optTheta, out nmse);223 OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, TrainingEpisodes, MaximumParameterOptimizationIterations, NumericIntegrationSteps, OdeSolver, out optTheta, out nmse); 221 224 individual["OptTheta"] = new DoubleArray(optTheta); // write back optimized parameters so that we can use them in the Analysis method 222 225 return nmse; … … 224 227 } 225 228 226 p rivatevoid OptimizeForEpisodes(229 public static void OptimizeForEpisodes( 227 230 ISymbolicExpressionTree[] trees, 231 IRegressionProblemData problemData, 232 string[] targetVars, 233 string[] latentVariables, 228 234 IRandom random, 229 235 IEnumerable<IntRange> episodes, 236 int maxParameterOptIterations, 237 int numericIntegrationSteps, 238 string odeSolver, 230 239 out double[] optTheta, 231 240 out double nmse) { 232 241 var rows = episodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)).ToArray(); 233 var problemData = ProblemData;234 var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray();235 var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees236 242 var targetValues = new double[rows.Length, targetVars.Length]; 237 243 … … 267 273 alglib.minlbfgsreport report; 268 274 alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state); 269 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, MaximumParameterOptimizationIterations);275 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 270 276 //alglib.minlbfgssetgradientcheck(state, 1e-6); 271 277 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, 272 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver }); //TODO: create a type278 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); //TODO: create a type 273 279 274 280 alglib.minlbfgsresults(state, out optTheta, out report); … … 307 313 nmse = double.NaN; 308 314 EvaluateObjectiveAndGradient(optTheta, ref nmse, grad, 309 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver });315 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); 310 316 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10E6; return; } // return a large value (TODO: be consistent by using NMSE) 311 317 } … … 385 391 results.Add(new Result("Models", typeof(VariableCollection))); 386 392 } 387 if (!results.ContainsKey("SNMSE")) {393 if (!results.ContainsKey("SNMSE")) { 388 394 results.Add(new Result("SNMSE", typeof(DoubleValue))); 395 } 396 if (!results.ContainsKey("Solution")) { 397 results.Add(new Result("Solution", typeof(Solution))); 389 398 } 390 399 … … 517 526 results["Prediction (training)"].Value = trainingList.AsReadOnly(); 518 527 results["Prediction (test)"].Value = testList.AsReadOnly(); 528 529 519 530 #region simplification of models 520 531 // TODO the dependency of HeuristicLab.Problems.DataAnalysis.Symbolic is not ideal 521 532 var models = new VariableCollection(); // to store target var names and original version of tree 522 533 534 var optimizedTrees = new List<ISymbolicExpressionTree>(); 523 535 int nextParIdx = 0; 536 for (int idx = 0; idx < trees.Length; idx++) { 537 var tree = trees[idx]; 538 optimizedTrees.Add(new SymbolicExpressionTree(FixParameters(tree.Root, optTheta.ToArray(), ref nextParIdx))); 539 } 540 var ds = problemData.Dataset; 541 var newVarNames = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray(); 542 var allVarNames = ds.DoubleVariables.Concat(newVarNames); 543 var newVarValues = Enumerable.Range(0, nextParIdx).Select(i => "c_" + i).ToArray(); 544 var allVarValues = ds.DoubleVariables.Select(varName => ds.GetDoubleValues(varName).ToList()) 545 .Concat(Enumerable.Range(0, nextParIdx).Select(i => Enumerable.Repeat(optTheta[i], ds.Rows).ToList())) 546 .ToList(); 547 var newDs = new Dataset(allVarNames, allVarValues); 548 var newProblemData = new RegressionProblemData(newDs, problemData.AllowedInputVariables.Concat(newVarValues).ToArray(), problemData.TargetVariable); 549 results["Solution"].Value = new Solution(optimizedTrees.ToArray(), 550 // optTheta, 551 newProblemData, 552 targetVars, 553 latentVariables, 554 TrainingEpisodes, 555 OdeSolver, 556 NumericIntegrationSteps); 557 558 559 nextParIdx = 0; 524 560 for (int idx = 0; idx < trees.Length; idx++) { 525 561 var varName = string.Empty; … … 558 594 559 595 } 596 560 597 results["Models"].Value = models; 561 598 #endregion … … 576 613 // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes 577 614 578 p rivatestatic IEnumerable<Tuple<double, Vector>[]> Integrate(615 public static IEnumerable<Tuple<double, Vector>[]> Integrate( 579 616 ISymbolicExpressionTree[] trees, IDataset dataset, 580 617 string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes, … … 999 1036 1000 1037 1001 foreach (var node in nodeValues.Keys.ToArray()) {1002 if (node.SubtreeCount == 0 && !IsConstantNode(node)) {1038 foreach (var node in nodeValues.Keys.ToArray()) { 1039 if (node.SubtreeCount == 0 && !IsConstantNode(node)) { 1003 1040 // update values for (latent) variables 1004 1041 var varName = node.Symbol.Name; … … 1168 1205 1169 1206 private static bool IsConstantNode(ISymbolicExpressionTreeNode n) { 1170 return n.Symbol.Name .StartsWith("θ");1207 return n.Symbol.Name[0] == 'θ'; 1171 1208 } 1172 1209 private static bool IsLatentVariableNode(ISymbolicExpressionTreeNode n) { 1173 return n.Symbol.Name .StartsWith("λ");1210 return n.Symbol.Name[0] == 'λ'; 1174 1211 } 1175 1212 private static bool IsVariableNode(ISymbolicExpressionTreeNode n) { … … 1232 1269 1233 1270 return g; 1271 } 1272 1273 1274 1275 1276 1277 private ISymbolicExpressionTreeNode FixParameters(ISymbolicExpressionTreeNode n, double[] parameterValues, ref int nextParIdx) { 1278 ISymbolicExpressionTreeNode translatedNode = null; 1279 if (n.Symbol is StartSymbol) { 1280 translatedNode = new StartSymbol().CreateTreeNode(); 1281 } else if (n.Symbol is ProgramRootSymbol) { 1282 translatedNode = new ProgramRootSymbol().CreateTreeNode(); 1283 } else if (n.Symbol.Name == "+") { 1284 translatedNode = new SimpleSymbol("+", 2).CreateTreeNode(); 1285 } else if (n.Symbol.Name == "-") { 1286 translatedNode = new SimpleSymbol("-", 2).CreateTreeNode(); 1287 } else if (n.Symbol.Name == "*") { 1288 translatedNode = new SimpleSymbol("*", 2).CreateTreeNode(); 1289 } else if (n.Symbol.Name == "%") { 1290 translatedNode = new SimpleSymbol("%", 2).CreateTreeNode(); 1291 } else if (n.Symbol.Name == "sin") { 1292 translatedNode = new SimpleSymbol("sin", 1).CreateTreeNode(); 1293 } else if (n.Symbol.Name == "cos") { 1294 translatedNode = new SimpleSymbol("cos", 1).CreateTreeNode(); 1295 } else if (n.Symbol.Name == "sqr") { 1296 translatedNode = new SimpleSymbol("sqr", 1).CreateTreeNode(); 1297 } else if (IsConstantNode(n)) { 1298 translatedNode = new SimpleSymbol("c_" + nextParIdx, 0).CreateTreeNode(); 1299 nextParIdx++; 1300 } else { 1301 translatedNode = new SimpleSymbol(n.Symbol.Name, n.SubtreeCount).CreateTreeNode(); 1302 } 1303 foreach (var child in n.Subtrees) { 1304 translatedNode.AddSubtree(FixParameters(child, parameterValues, ref nextParIdx)); 1305 } 1306 return translatedNode; 1234 1307 } 1235 1308
Note: See TracChangeset
for help on using the changeset viewer.