Changeset 16600
- Timestamp:
- 02/12/19 13:59:15 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16599 r16600 250 250 var targetValues = new double[rows.Length, targetVars.Length]; 251 251 252 252 253 253 // collect values of all target variables 254 254 var colIdx = 0; … … 275 275 var theta = new double[paramNodes.Count + latentVariables.Length * episodes.Count()]; 276 276 for (int i = 0; i < theta.Length; i++) 277 theta[i] = random.NextDouble() * 2.0e- 2 - 1.0e-2;277 theta[i] = random.NextDouble() * 2.0e-1 - 1.0e-1; 278 278 279 279 optTheta = new double[0]; … … 284 284 alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 285 285 alglib.minlmsetgradientcheck(state, 1.0e-3); 286 //TODO: create a type 287 var myState = new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }; 288 alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState); 286 var myState = new OptimizationData(trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver); 287 alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState); 289 288 290 289 alglib.minlmresults(state, out optTheta, out report); … … 330 329 nmse = targetValues.Length; 331 330 } 332 331 333 332 } 334 333 … … 342 341 alglib.ndimensional_jac jac; 343 342 344 private static void EvaluateObjectiveVector(double[] x, double[] fi, object obj) { 345 EvaluateObjectiveVectorAndJacobian(x, fi, null, obj); 346 } 347 348 private static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object obj) { 349 var trees = (ISymbolicExpressionTree[])((object[])obj)[0]; 350 var targetVariables = (string[])((object[])obj)[1]; 351 var problemData = (IRegressionProblemData)((object[])obj)[2]; 352 var targetValues = (double[,])((object[])obj)[3]; 353 var episodes = (IntRange[])((object[])obj)[4]; 354 var numericIntegrationSteps = (int)((object[])obj)[5]; 355 var latentVariables = (string[])((object[])obj)[6]; 356 var odeSolver = (string)((object[])obj)[7]; 343 public static void EvaluateObjectiveVector(double[] x, double[] fi, object optimizationData) { EvaluateObjectiveVector(x, fi, (OptimizationData)optimizationData); } // for alglib 344 public static void EvaluateObjectiveVector(double[] x, double[] fi, OptimizationData optimizationData) { 345 EvaluateObjectiveVectorAndJacobian(x, fi, null, optimizationData); 346 } 347 348 public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object optimizationData) { EvaluateObjectiveVectorAndJacobian(x, fi, jac, (OptimizationData)optimizationData); } // for alglib 349 public static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, OptimizationData optimizationData) { 357 350 358 351 359 352 Tuple<double, Vector>[][] predicted = null; // one array of predictions for each episode 360 // TODO: stop integration early for diverging solutions 361 predicted = Integrate( 362 trees, // we assume trees contain expressions for the change of each target variable over time y'(t) 363 problemData.Dataset, 364 problemData.AllowedInputVariables.ToArray(), 365 targetVariables, 366 latentVariables, 367 episodes, 368 x, 369 odeSolver, 370 numericIntegrationSteps).ToArray(); 353 predicted = Integrate(optimizationData, x).ToArray(); 371 354 372 355 // clear all result data structures … … 376 359 } 377 360 378 if (predicted.Length != targetValues.GetLength(0)) {361 if (predicted.Length != optimizationData.targetValues.GetLength(0)) { 379 362 return; 380 363 } … … 398 381 foreach (var y_pred in predicted) { 399 382 // y_pred contains the predicted values for target variables first and then predicted values for latent variables 400 for (int c = 0; c < targetVariables.Length; c++) {383 for (int c = 0; c < optimizationData.targetVariables.Length; c++) { 401 384 402 385 var y_pred_f = y_pred[c].Item1; 403 var y = targetValues[r, c];386 var y = optimizationData.targetValues[r, c]; 404 387 405 388 fi[i] = (y - y_pred_f); … … 451 434 var episodes = new[] { episode }; 452 435 var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta_" + eIdx]).ToArray(); // see evaluate 453 var trainingPrediction = Integrate( 454 trees, // we assume trees contain expressions for the change of each target variable over time y'(t) 455 problemData.Dataset, 456 problemData.AllowedInputVariables.ToArray(), 457 targetVars, 458 latentVariables, 459 episodes, 460 optTheta, 461 OdeSolver, 462 NumericIntegrationSteps).ToArray(); 436 var optimizationData = new OptimizationData(trees, targetVars, problemData, null, episodes, NumericIntegrationSteps, latentVariables, OdeSolver); 437 var trainingPrediction = Integrate(optimizationData, optTheta).ToArray(); 463 438 trainingPredictions.Add(trainingPrediction); 464 439 eIdx++; … … 492 467 } else { 493 468 var optTheta = ((DoubleArray)bestIndividualAndQuality.Item1["OptTheta"]).ToArray(); // see evaluate 494 var trainingPrediction = Integrate( 495 trees, // we assume trees contain expressions for the change of each target variable over time dy/dt 496 problemData.Dataset, 497 problemData.AllowedInputVariables.ToArray(), 498 targetVars, 499 latentVariables, 500 TrainingEpisodes, 501 optTheta, 502 OdeSolver, 503 NumericIntegrationSteps).ToArray(); 469 var optimizationData = new OptimizationData(trees, targetVars, problemData, null, TrainingEpisodes.ToArray(), NumericIntegrationSteps, latentVariables, OdeSolver); 470 var trainingPrediction = Integrate(optimizationData, optTheta).ToArray(); 504 471 // for target values and latent variables 505 472 var trainingRows = TrainingEpisodes.SelectMany(e => Enumerable.Range(e.Start, e.End - e.Start)); … … 564 531 var testList = new ItemList<DataTable>(); 565 532 var testRows = ProblemData.TestIndices.ToArray(); 566 var testPrediction = Integrate( 567 trees, // we assume trees contain expressions for the change of each target variable over time y'(t) 568 problemData.Dataset, 569 problemData.AllowedInputVariables.ToArray(), 570 targetVars, 571 latentVariables, 572 new IntRange[] { ProblemData.TestPartition }, 573 optTheta, 574 OdeSolver, 575 NumericIntegrationSteps).ToArray(); 533 var testOptimizationData = new OptimizationData(trees, targetVars, problemData, null, new IntRange[] { ProblemData.TestPartition }, NumericIntegrationSteps, latentVariables, OdeSolver); 534 var testPrediction = Integrate(testOptimizationData, optTheta).ToArray(); 576 535 577 536 for (int colIdx = 0; colIdx < trees.Length; colIdx++) { … … 686 645 // for a solver with the necessary features see: https://computation.llnl.gov/projects/sundials/cvodes 687 646 688 public static IEnumerable<Tuple<double, Vector>[]> Integrate( 689 ISymbolicExpressionTree[] trees, IDataset dataset, 690 string[] inputVariables, string[] targetVariables, string[] latentVariables, IEnumerable<IntRange> episodes, 691 double[] parameterValues, 692 string odeSolver, int numericIntegrationSteps = 100) { 647 public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData, double[] parameterValues) { 648 649 var trees = optimizationData.trees; 650 var dataset = optimizationData.problemData.Dataset; 651 var inputVariables = optimizationData.problemData.AllowedInputVariables.ToArray(); 652 var targetVariables = optimizationData.targetVariables; 653 var latentVariables = optimizationData.latentVariables; 654 var episodes = optimizationData.episodes; 655 var odeSolver = optimizationData.odeSolver; 656 var numericIntegrationSteps = optimizationData.numericIntegrationSteps; 657 693 658 694 659 // TODO: numericIntegrationSteps is only relevant for the HeuristicLab solver 695 660 var episodeIdx = 0; 696 foreach (var episode in episodes) {661 foreach (var episode in optimizationData.episodes) { 697 662 var rows = Enumerable.Range(episode.Start, episode.End - episode.Start); 698 663 … … 1129 1094 ISymbolicExpressionTreeNode node, 1130 1095 Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> nodeValues, // contains value and gradient vector for a node (variables and constants only) 1131 out double f,1132 out Vector g1096 out double z, 1097 out Vector dz 1133 1098 ) { 1134 double f l, fr;1135 Vector gl, gr;1099 double f, g; 1100 Vector df, dg; 1136 1101 switch (node.Symbol.Name) { 1137 1102 case "+": { 1138 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1139 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);1140 f = fl + fr;1141 g =Vector.AddTo(gl, gr);1103 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1104 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); 1105 z = f + g; 1106 dz = df + dg; // Vector.AddTo(gl, gr); 1142 1107 break; 1143 1108 } 1144 1109 case "*": { 1145 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1146 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);1147 f = fl * fr;1148 g =Vector.AddTo(gl.Scale(fr), gr.Scale(fl)); // f'*g + f*g'1110 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1111 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); 1112 z = f * g; 1113 dz = df * g + f * dg; // Vector.AddTo(gl.Scale(fr), gr.Scale(fl)); // f'*g + f*g' 1149 1114 break; 1150 1115 } … … 1152 1117 case "-": { 1153 1118 if (node.SubtreeCount == 1) { 1154 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1155 f = -fl;1156 g = gl.Scale(-1.0);1119 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1120 z = -f; 1121 dz = -df; 1157 1122 } else { 1158 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1159 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);1160 1161 f = fl - fr;1162 g = Vector.Subtract(gl, gr);1123 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1124 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); 1125 1126 z = f - g; 1127 dz = df - dg; 1163 1128 } 1164 1129 break; 1165 1130 } 1166 1131 case "%": { 1167 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1168 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr);1132 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1133 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); 1169 1134 1170 1135 // protected division 1171 if ( fr.IsAlmost(0.0)) {1172 f= 0;1173 g= Vector.Zero;1136 if (g.IsAlmost(0.0)) { 1137 z = 0; 1138 dz = Vector.Zero; 1174 1139 } else { 1175 f = fl / fr;1176 g = Vector.AddTo(gr.Scale(fl * -1.0 / (fr * fr)), gl.Scale(1.0 / fr)); // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f'1140 z = f / g; 1141 dz = -f / (g * g) * dg + df / g; // Vector.AddTo(dg.Scale(f * -1.0 / (g * g)), df.Scale(1.0 / g)); // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f' 1177 1142 } 1178 1143 break; 1179 1144 } 1180 1145 case "sin": { 1181 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1182 f = Math.Sin(fl);1183 g = gl.Scale(Math.Cos(fl));1146 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1147 z = Math.Sin(f); 1148 dz = Math.Cos(f) * df; 1184 1149 break; 1185 1150 } 1186 1151 case "cos": { 1187 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1188 f = Math.Cos(fl);1189 g = gl.Scale(-Math.Sin(fl));1152 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1153 z = Math.Cos(f); 1154 dz = -Math.Sin(f) * df; 1190 1155 break; 1191 1156 } 1192 1157 case "sqr": { 1193 InterpretRec(node.GetSubtree(0), nodeValues, out f l, out gl);1194 f = fl * fl;1195 g = gl.Scale(2.0 * fl);1158 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1159 z = f * f; 1160 dz = 2.0 * f * df; 1196 1161 break; 1197 1162 } 1198 1163 default: { 1199 1164 var t = nodeValues[node]; 1200 f= t.Item1;1201 g= Vector.CreateNew(t.Item2);1165 z = t.Item1; 1166 dz = Vector.CreateNew(t.Item2); 1202 1167 break; 1203 1168 } … … 1455 1420 return ProblemData; 1456 1421 } 1422 1423 public class OptimizationData { 1424 public readonly ISymbolicExpressionTree[] trees; 1425 public readonly string[] targetVariables; 1426 public readonly IRegressionProblemData problemData; 1427 public readonly double[,] targetValues; 1428 public readonly IntRange[] episodes; 1429 public readonly int numericIntegrationSteps; 1430 public readonly string[] latentVariables; 1431 public readonly string odeSolver; 1432 1433 public OptimizationData(ISymbolicExpressionTree[] trees, string[] targetVars, IRegressionProblemData problemData, double[,] targetValues, IntRange[] episodes, int numericIntegrationSteps, string[] latentVariables, string odeSolver) { 1434 this.trees = trees; 1435 this.targetVariables = targetVars; 1436 this.problemData = problemData; 1437 this.targetValues = targetValues; 1438 this.episodes = episodes; 1439 this.numericIntegrationSteps = numericIntegrationSteps; 1440 this.latentVariables = latentVariables; 1441 this.odeSolver = odeSolver; 1442 } 1443 } 1457 1444 #endregion 1458 1445 -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Solution.cs
r16400 r16600 91 91 var random = new FastRandom(12345); 92 92 Problem.OptimizeForEpisodes(trees, problemData, targetVars, latentVariables, random, new[] { forecastEpisode }, 100, numericIntegrationSteps, odeSolver, out optL0, out snmse); 93 var predictions = Problem.Integrate( 94 trees, 95 problemData.Dataset, 96 problemData.AllowedInputVariables.ToArray(), 97 targetVars, 98 latentVariables, 99 new[] { forecastEpisode }, 100 optL0, 101 odeSolver, 102 numericIntegrationSteps).ToArray(); 93 var optimizationData = new Problem.OptimizationData(trees, targetVars, problemData, null, new[] { forecastEpisode }, numericIntegrationSteps, latentVariables, odeSolver); 94 var predictions = Problem.Integrate(optimizationData, optL0).ToArray(); 103 95 return predictions.Select(p => p.Select(pi => pi.Item1).ToArray()).ToArray(); 104 96 } -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Vector.cs
r16599 r16600 16 16 } 17 17 18 public static Vector AddTo(Vector a, Vector b) {19 if (b == Zero) return a;20 if (a == Zero) {21 var vArr = new double[b.Length];22 b.CopyTo(vArr);23 return new Vector(vArr);24 } else {25 Trace.Assert(a.arr.Length == b.arr.Length);26 for (int i = 0; i < a.arr.Length; i++)27 a.arr[i] += b.arr[i];28 return a;29 }30 }18 // public static Vector AddTo(Vector a, Vector b) { 19 // if (b == Zero) return a; 20 // if (a == Zero) { 21 // var vArr = new double[b.Length]; 22 // b.CopyTo(vArr); 23 // return new Vector(vArr); 24 // } else { 25 // Trace.Assert(a.arr.Length == b.arr.Length); 26 // for (int i = 0; i < a.arr.Length; i++) 27 // a.arr[i] += b.arr[i]; 28 // return a; 29 // } 30 // } 31 31 32 32 … … 38 38 } 39 39 Trace.Assert(a.arr.Length == b.arr.Length); 40 var res = new double[a.arr.Length]; 40 41 for (int i = 0; i < a.arr.Length; i++) 41 a.arr[i] -=b.arr[i];42 return a;42 res[i] = a.arr[i] - b.arr[i]; 43 return new Vector(res); 43 44 } 44 45 45 46 public static Vector operator -(Vector a, Vector b) { 46 if (b == Zero) return a; 47 if (a == Zero) return -b; 48 Trace.Assert(a.arr.Length == b.arr.Length); 49 var res = new double[a.arr.Length]; 50 for (int i = 0; i < res.Length; i++) 51 res[i] = a.arr[i] - b.arr[i]; 52 return new Vector(res); 47 return Subtract(a, b); 53 48 } 54 49 public static Vector operator -(Vector v) { 55 if (v == Zero) return Zero; 56 for (int i = 0; i < v.arr.Length; i++) 57 v.arr[i] = -v.arr[i]; 58 return v; 50 return v.Scale(-1.0); 59 51 } 60 52 61 53 public static Vector operator *(double s, Vector v) { 62 if (v == Zero) return Zero; 63 if (s == 0.0) return Zero; 64 var res = new double[v.arr.Length]; 65 for (int i = 0; i < res.Length; i++) 66 res[i] = s * v.arr[i]; 67 return new Vector(res); 54 return v.Scale(s); 68 55 } 69 56 70 57 public Vector Scale(double s) { 71 if (this != Zero) { 72 for (int i = 0; i < arr.Length; i++) { 73 arr[i] *= s; 74 } 58 if (this == Zero) return Zero; 59 60 var res = new double[arr.Length]; 61 for (int i = 0; i < arr.Length; i++) { 62 res[i] = arr[i] * s; 75 63 } 64 return new Vector(res); 76 65 77 return this;78 66 } 79 67 … … 81 69 return s * v; 82 70 } 71 83 72 public static Vector operator *(Vector u, Vector v) { 84 73 if (v == Zero) return Zero; … … 89 78 return new Vector(res); 90 79 } 80 91 81 public static Vector operator /(double s, Vector v) { 92 82 if (s == 0.0) return Zero; … … 94 84 var res = new double[v.arr.Length]; 95 85 for (int i = 0; i < res.Length; i++) 96 res[i] = 1.0/ v.arr[i];86 res[i] = s / v.arr[i]; 97 87 return new Vector(res); 98 88 } 89 99 90 public static Vector operator /(Vector v, double s) { 100 return v * 1.0 / s;91 return v.Scale(1.0 / s); 101 92 } 102 93
Note: See TracChangeset
for help on using the changeset viewer.