Changeset 16329 for branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
- Timestamp:
- 11/27/18 08:21:13 (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16275 r16329 259 259 } 260 260 // init params randomly from Unif(-1e-5, 1e-5) 261 var theta = paramNodes.Select(_ => random.NextDouble() * 2.0e-2 - 1.0e-2).ToArray(); 261 // theta contains parameter values for trees and then the initial values for latent variables (a separate vector for each episode) 262 // inital values for latent variables are also optimized 263 var theta = new double[paramNodes.Count + latentVariables.Length * episodes.Count()]; 264 for (int i = 0; i < theta.Length; i++) 265 theta[i] = random.NextDouble() * 2.0e-2 - 1.0e-2; 262 266 263 267 optTheta = new double[0]; … … 355 359 int r = 0; 356 360 foreach (var y_pred in predicted) { 357 for (int c = 0; c < y_pred.Length; c++) { 361 // y_pred contains the predicted values for target variables first and then predicted values for latent variables 362 for (int c = 0; c < targetVariables.Length; c++) { 358 363 359 364 var y_pred_f = y_pred[c].Item1; … … 413 418 } 414 419 415 // only for actualtarget values420 // only for target values 416 421 var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)); 417 422 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { … … 450 455 OdeSolver, 451 456 NumericIntegrationSteps).ToArray(); 452 // only for actual target values457 // for target values and latent variables 453 458 var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)); 454 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 455 var targetVar = targetVars[colIdx]; 456 var trainingDataTable = new DataTable(targetVar + " prediction (training)"); 457 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows)); 458 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 459 trainingDataTable.Rows.Add(actualValuesRow); 460 trainingDataTable.Rows.Add(predictedValuesRow); 461 trainingList.Add(trainingDataTable); 459 for (int colIdx = 0; colIdx < trees.Length; colIdx++) { 460 // is target variable 461 if (colIdx < targetVars.Length) { 462 var targetVar = targetVars[colIdx]; 463 var trainingDataTable = new DataTable(targetVar + " prediction (training)"); 464 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, trainingRows)); 465 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 466 trainingDataTable.Rows.Add(actualValuesRow); 467 trainingDataTable.Rows.Add(predictedValuesRow); 468 trainingList.Add(trainingDataTable); 469 } else { 470 var latentVar = latentVariables[colIdx - targetVars.Length]; 471 var trainingDataTable = new DataTable(latentVar + " prediction (training)"); 472 var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, trainingPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 473 var emptyRow = new DataRow(latentVar); 474 trainingDataTable.Rows.Add(emptyRow); 475 trainingDataTable.Rows.Add(predictedValuesRow); 476 trainingList.Add(trainingDataTable); 477 } 462 478 } 463 479 // TODO: DRY for training and test … … 475 491 NumericIntegrationSteps).ToArray(); 476 492 477 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 478 var targetVar = targetVars[colIdx]; 479 var testDataTable = new DataTable(targetVar + " prediction (test)"); 480 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows)); 481 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 482 testDataTable.Rows.Add(actualValuesRow); 483 testDataTable.Rows.Add(predictedValuesRow); 484 testList.Add(testDataTable); 493 for (int colIdx = 0; colIdx < trees.Length; colIdx++) { 494 // is target variable 495 if (colIdx < targetVars.Length) { 496 var targetVar = targetVars[colIdx]; 497 var testDataTable = new DataTable(targetVar + " prediction (test)"); 498 var actualValuesRow = new DataRow(targetVar, "The values of " + targetVar, problemData.Dataset.GetDoubleValues(targetVar, testRows)); 499 var predictedValuesRow = new DataRow(targetVar + " pred.", "Predicted values for " + targetVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 500 testDataTable.Rows.Add(actualValuesRow); 501 testDataTable.Rows.Add(predictedValuesRow); 502 testList.Add(testDataTable); 503 504 } else { 505 var latentVar = latentVariables[colIdx - targetVars.Length]; 506 var testDataTable = new DataTable(latentVar + " prediction (test)"); 507 var predictedValuesRow = new DataRow(latentVar + " pred.", "Predicted values for " + latentVar, testPrediction.Select(arr => arr[colIdx].Item1).ToArray()); 508 var emptyRow = new DataRow(latentVar); 509 testDataTable.Rows.Add(emptyRow); 510 testDataTable.Rows.Add(predictedValuesRow); 511 testList.Add(testDataTable); 512 } 485 513 } 486 514 … … 492 520 493 521 int nextParIdx = 0; 494 foreach (var tup in targetVars.Zip(trees, Tuple.Create)) { 495 var targetVarName = tup.Item1; 496 var tree = tup.Item2; 522 for (int idx = 0; idx < trees.Length; idx++) { 523 var varName = string.Empty; 524 if (idx < targetVars.Length) { 525 varName = targetVars[idx]; 526 } else { 527 varName = latentVariables[idx - targetVars.Length]; 528 } 529 var tree = trees[idx]; 497 530 498 531 // when we reference HeuristicLab.Problems.DataAnalysis.Symbolic we can translate symbols 499 var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta, ref nextParIdx)); 532 var shownTree = new SymbolicExpressionTree(TranslateTreeNode(tree.Root, optTheta.ToArray(), 533 ref nextParIdx)); 500 534 501 535 // var shownTree = (SymbolicExpressionTree)tree.Clone(); … … 514 548 // } 515 549 516 var origTreeVar = new HeuristicLab.Core.Variable( targetVarName + "(original)");550 var origTreeVar = new HeuristicLab.Core.Variable(varName + "(original)"); 517 551 origTreeVar.Value = (ISymbolicExpressionTree)tree.Clone(); 518 552 models.Add(origTreeVar); 519 var simplifiedTreeVar = new HeuristicLab.Core.Variable( targetVarName + "(simplified)");553 var simplifiedTreeVar = new HeuristicLab.Core.Variable(varName + "(simplified)"); 520 554 simplifiedTreeVar.Value = TreeSimplifier.Simplify(shownTree); 521 555 models.Add(simplifiedTreeVar); … … 541 575 542 576 private static IEnumerable<Tuple<double, Vector>[]> Integrate( 543 ISymbolicExpressionTree[] trees, IDataset dataset, string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes, 577 ISymbolicExpressionTree[] trees, IDataset dataset, 578 string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes, 544 579 double[] parameterValues, 545 580 string odeSolver, int numericIntegrationSteps = 100) { 546 581 547 582 // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver 548 583 var episodeIdx = 0; 549 584 foreach (var episode in episodes) { 550 585 var rows = Enumerable.Range(episode.Start, episode.End - episode.Start); 551 // return first value as stored in the dataset552 yield return targetVariables553 .Select(targetVar => Tuple.Create(dataset.GetDoubleValue(targetVar, rows.First()), Vector.Zero))554 .ToArray();555 586 556 587 // integrate forward starting with known values for the target in t0 … … 565 596 } 566 597 // add value entries for latent variables which are also integrated 598 // initial values are at the end of the parameter vector 599 // separete initial values for each episode 600 var initialValueIdx = parameterValues.Length - episodes.Count() * latentVariables.Length + episodeIdx * latentVariables.Length; 567 601 foreach (var latentVar in latentVariables) { 568 variableValues.Add(latentVar, Tuple.Create(0.0, Vector.Zero)); // we don't have observations for latent variables -> assume zero as starting value TODO 569 } 602 var arr = new double[parameterValues.Length]; // backing array 603 arr[initialValueIdx] = 1.0; 604 var g = new Vector(arr); 605 variableValues.Add(latentVar, 606 Tuple.Create(parameterValues[initialValueIdx], g)); // we don't have observations for latent variables therefore we optimize the initial value for each episode 607 initialValueIdx++; 608 } 609 570 610 var calculatedVariables = targetVariables.Concat(latentVariables).ToArray(); // TODO: must conincide with the order of trees in the encoding 611 612 // return first value as stored in the dataset 613 yield return calculatedVariables 614 .Select(calcVarName => variableValues[calcVarName]) 615 .ToArray(); 571 616 572 617 var prevT = rows.First(); // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements. … … 582 627 //if (variableValues.Count == targetVariables.Length) { 583 628 // only return the target variables for calculation of errors 584 var res = targetVariables629 var res = calculatedVariables 585 630 .Select(targetVar => variableValues[targetVar]) 586 631 .ToArray(); … … 597 642 } 598 643 } 644 episodeIdx++; 599 645 } 600 646 } … … 1012 1058 ); 1013 1059 } 1060 case "sqr": { 1061 var x = InterpretRec(node.GetSubtree(0), nodeValues); 1062 return Tuple.Create( 1063 x.Item1 * x.Item1, 1064 2.0 * x.Item1 * x.Item2 1065 ); 1066 } 1014 1067 default: { 1015 1068 return nodeValues[node]; // value and gradient for constants and variables must be set by the caller … … 1106 1159 l.Add(new StringValue("sin").AsReadOnly()); 1107 1160 l.Add(new StringValue("cos").AsReadOnly()); 1161 l.Add(new StringValue("sqr").AsReadOnly()); 1108 1162 return l.AsReadOnly(); 1109 1163 } … … 1195 1249 } else if (n.Symbol.Name == "cos") { 1196 1250 translatedNode = new Cosine().CreateTreeNode(); 1251 } else if (n.Symbol.Name == "sqr") { 1252 translatedNode = new Square().CreateTreeNode(); 1197 1253 } else if (IsConstantNode(n)) { 1198 1254 var constNode = (ConstantTreeNode)new Constant().CreateTreeNode();
Note: See TracChangeset
for help on using the changeset viewer.