Changeset 16597 for branches/2925_AutoDiffForDynamicalModels
- Timestamp:
- 02/11/19 14:15:47 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/OdeParameterIdentification.cs
r16329 r16597 140 140 #region nonlinear regression 141 141 protected override void Run(CancellationToken cancellationToken) { 142 IRegressionSolution bestSolution = null;143 142 if (SetSeedRandomly) Seed = (new System.Random()).Next(); 144 143 var rand = new MersenneTwister((uint)Seed); -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16399 r16597 274 274 alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state); 275 275 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 276 // alglib.minlbfgssetgradientcheck(state, 1e-6);276 // alglib.minlbfgssetgradientcheck(state, 1e-4); 277 277 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, 278 278 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); //TODO: create a type … … 306 306 * NFEV countains number of function calculations 307 307 */ 308 if (report.terminationtype < 0) { nmse = 10 E6; return; }308 if (report.terminationtype < 0) { nmse = 10.0; return; } 309 309 } 310 310 … … 314 314 EvaluateObjectiveAndGradient(optTheta, ref nmse, grad, 315 315 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); 316 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10 E6; return; } // return a large value (TODO: be consistent by using NMSE)316 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10.0; return; } // return a large value (TODO: be consistent by using NMSE) 317 317 } 318 318 … … 342 342 343 343 if (predicted.Length != targetValues.GetLength(0)) { 344 f = double.MaxValue;344 f = 10.0; // TODO 345 345 Array.Clear(grad, 0, grad.Length); 346 346 return; … … 349 349 // for normalized MSE = 1/variance(t) * MSE(t, pred) 350 350 // TODO: Perf. (by standardization of target variables before evaluation of all trees) 351 var invVar = Enumerable.Range(0, targetVariables.Length) 352 .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors 353 .Select(vec => vec.Variance()) 354 .Select(v => 1.0 / v) 355 .ToArray(); 351 // var invVar = Enumerable.Range(0, targetVariables.Length) 352 // .Select(c => Enumerable.Range(0, targetValues.GetLength(0)).Select(row => targetValues[row, c])) // column vectors 353 // .Select(vec => vec.StandardDeviation()) // TODO: variance of stddev 354 // .Select(v => 1.0 / v) 355 // .ToArray(); 356 357 double[] invVar = Enumerable.Repeat(1.0, targetVariables.Length).ToArray(); 358 356 359 357 360 // objective function is NMSE … … 370 373 var res = (y - y_pred_f); 371 374 var ressq = res * res; 372 f += ressq * invN * invVar[c] ;373 g += -2.0 * res * y_pred[c].Item2 * invN * invVar[c] ;375 f += ressq * invN * invVar[c] /* * Math.Exp(-0.2 * r) */ ; 376 g += -2.0 * res * y_pred[c].Item2 * invN * invVar[c] /* * Math.Exp(-0.2 * r) */; 374 377 } 375 378 r++; … … 396 399 if (!results.ContainsKey("Solution")) { 397 400 results.Add(new Result("Solution", typeof(Solution))); 401 } 402 if (!results.ContainsKey("Squared error and gradient")) { 403 results.Add(new Result("Squared error and gradient", typeof(DataTable))); 398 404 } 399 405 … … 477 483 trainingDataTable.Rows.Add(actualValuesRow); 478 484 trainingDataTable.Rows.Add(predictedValuesRow); 485 486 for (int paramIdx = 0; paramIdx < optTheta.Length; paramIdx++) { 487 var paramSensitivityRow = new DataRow($"∂{targetVar}/∂θ{paramIdx}", $"Sensitivities of parameter {paramIdx}", trainingPrediction.Select(arr => arr[colIdx].Item2[paramIdx]).ToArray()); 488 paramSensitivityRow.VisualProperties.SecondYAxis = true; 489 trainingDataTable.Rows.Add(paramSensitivityRow); 490 } 479 491 trainingList.Add(trainingDataTable); 480 492 } else { … … 488 500 } 489 501 } 502 503 var errorTable = new DataTable("Squared error and gradient"); 504 var seRow = new DataRow("Squared error"); 505 var gradientRows = Enumerable.Range(0, optTheta.Length).Select(i => new DataRow($"∂SE/∂θ{i}")).ToArray(); 506 errorTable.Rows.Add(seRow); 507 foreach (var gRow in gradientRows) { 508 gRow.VisualProperties.SecondYAxis = true; 509 errorTable.Rows.Add(gRow); 510 } 511 var targetValues = targetVars.Select(v => problemData.Dataset.GetDoubleValues(v, trainingRows).ToArray()).ToArray(); 512 int r = 0; 513 double invN = 1.0 / trainingRows.Count(); 514 foreach (var y_pred in trainingPrediction) { 515 // calculate objective function gradient 516 double f_i = 0.0; 517 Vector g_i = Vector.CreateNew(new double[optTheta.Length]); 518 for (int colIdx = 0; colIdx < targetVars.Length; colIdx++) { 519 var y_pred_f = y_pred[colIdx].Item1; 520 var y = targetValues[colIdx][r]; 521 522 var res = (y - y_pred_f); 523 var ressq = res * res; 524 f_i += ressq * invN /* * Math.Exp(-0.2 * r) */; 525 g_i = g_i - 2.0 * res * y_pred[colIdx].Item2 * invN /* * Math.Exp(-0.2 * r)*/; 526 } 527 seRow.Values.Add(f_i); 528 for (int j = 0; j < g_i.Length; j++) gradientRows[j].Values.Add(g_i[j]); 529 r++; 530 } 531 results["Squared error and gradient"].Value = errorTable; 532 490 533 // TODO: DRY for training and test 491 534 var testList = new ItemList<DataTable>(); … … 659 702 IntegrateHL(trees, calculatedVariables, variableValues, parameterValues, numericIntegrationSteps); 660 703 else if (odeSolver == "CVODES") 661 IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT); 704 throw new NotImplementedException(); 705 // IntegrateCVODES(trees, calculatedVariables, variableValues, parameterValues, t - prevT); 662 706 else throw new InvalidOperationException("Unknown ODE solver " + odeSolver); 663 707 prevT = t; … … 687 731 #region CVODES 688 732 689 733 /* 690 734 /// <summary> 691 735 /// Here we use CVODES to solve the ODE. Forward sensitivities are used to calculate the gradient for parameter optimization … … 981 1025 }; 982 1026 } 1027 */ 983 1028 #endregion 984 1029 … … 1012 1057 } 1013 1058 1059 double[] deltaF = new double[calculatedVariables.Length]; 1060 Vector[] deltaG = new Vector[calculatedVariables.Length]; 1014 1061 1015 1062 double h = 1.0 / numericIntegrationSteps; 1016 1063 for (int step = 0; step < numericIntegrationSteps; step++) { 1017 var deltaValues = new Dictionary<string, Tuple<double, Vector>>();1064 //var deltaValues = new Dictionary<string, Tuple<double, Vector>>(); 1018 1065 for (int i = 0; i < trees.Length; i++) { 1019 1066 var tree = trees[i]; … … 1021 1068 1022 1069 // Root.GetSubtree(0).GetSubtree(0) skips programRoot and startSymbol 1023 var res = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues); 1024 deltaValues.Add(targetVarName, res); 1070 double f; Vector g; 1071 InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValues, out f, out g); 1072 deltaF[i] = f; 1073 deltaG[i] = g; 1025 1074 } 1026 1075 1027 1076 // update variableValues for next step, trapezoid integration 1028 foreach (var kvp in deltaValues) { 1029 var oldVal = variableValues[kvp.Key]; 1077 for (int i = 0; i < trees.Length; i++) { 1078 var varName = calculatedVariables[i]; 1079 var oldVal = variableValues[varName]; 1030 1080 var newVal = Tuple.Create( 1031 oldVal.Item1 + h * kvp.Value.Item1,1032 oldVal.Item2 + h * kvp.Value.Item21081 oldVal.Item1 + h * deltaF[i], 1082 oldVal.Item2 + deltaG[i].Scale(h) 1033 1083 ); 1034 variableValues[ kvp.Key] = newVal;1035 } 1036 1037 1084 variableValues[varName] = newVal; 1085 } 1086 1087 // TODO perf 1038 1088 foreach (var node in nodeValues.Keys.ToArray()) { 1039 1089 if (node.SubtreeCount == 0 && !IsConstantNode(node)) { … … 1046 1096 } 1047 1097 1048 private static Tuple<double, Vector>InterpretRec(1098 private static void InterpretRec( 1049 1099 ISymbolicExpressionTreeNode node, 1050 Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> nodeValues // contains value and gradient vector for a node (variables and constants only) 1051 ) { 1052 1100 Dictionary<ISymbolicExpressionTreeNode, Tuple<double, Vector>> nodeValues, // contains value and gradient vector for a node (variables and constants only) 1101 out double f, 1102 out Vector g 1103 ) { 1104 double fl, fr; 1105 Vector gl, gr; 1053 1106 switch (node.Symbol.Name) { 1054 1107 case "+": { 1055 var l = InterpretRec(node.GetSubtree(0), nodeValues); 1056 var r = InterpretRec(node.GetSubtree(1), nodeValues); 1057 1058 return Tuple.Create(l.Item1 + r.Item1, l.Item2 + r.Item2); 1108 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1109 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr); 1110 f = fl + fr; 1111 g = Vector.AddTo(gl, gr); 1112 break; 1059 1113 } 1060 1114 case "*": { 1061 var l = InterpretRec(node.GetSubtree(0), nodeValues); 1062 var r = InterpretRec(node.GetSubtree(1), nodeValues); 1063 1064 return Tuple.Create(l.Item1 * r.Item1, l.Item2 * r.Item1 + l.Item1 * r.Item2); 1115 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1116 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr); 1117 f = fl * fr; 1118 g = Vector.AddTo(gl.Scale(fr), gr.Scale(fl)); // f'*g + f*g' 1119 break; 1065 1120 } 1066 1121 1067 1122 case "-": { 1068 var l = InterpretRec(node.GetSubtree(0), nodeValues); 1069 var r = InterpretRec(node.GetSubtree(1), nodeValues); 1070 1071 return Tuple.Create(l.Item1 - r.Item1, l.Item2 - r.Item2); 1123 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1124 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr); 1125 f = fl - fr; 1126 g = Vector.Subtract(gl, gr); 1127 break; 1072 1128 } 1073 1129 case "%": { 1074 var l = InterpretRec(node.GetSubtree(0), nodeValues);1075 var r = InterpretRec(node.GetSubtree(1), nodeValues);1130 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1131 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr); 1076 1132 1077 1133 // protected division 1078 if (r.Item1.IsAlmost(0.0)) { 1079 return Tuple.Create(0.0, Vector.Zero); 1134 if (fr.IsAlmost(0.0)) { 1135 f = 0; 1136 g = Vector.Zero; 1080 1137 } else { 1081 return Tuple.Create( 1082 l.Item1 / r.Item1, 1083 l.Item1 * -1.0 / (r.Item1 * r.Item1) * r.Item2 + 1.0 / r.Item1 * l.Item2 // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f' 1084 ); 1138 f = fl / fr; 1139 g = Vector.AddTo(gr.Scale(fl * -1.0 / (fr * fr)), gl.Scale(1.0 / fr)); // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f' 1085 1140 } 1141 break; 1086 1142 } 1087 1143 case "sin": { 1088 var x = InterpretRec(node.GetSubtree(0), nodeValues); 1089 return Tuple.Create( 1090 Math.Sin(x.Item1), 1091 Vector.Cos(x.Item2) * x.Item2 1092 ); 1144 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1145 f = Math.Sin(fl); 1146 g = gl.Scale(Math.Cos(fl)); 1147 break; 1093 1148 } 1094 1149 case "cos": { 1095 var x = InterpretRec(node.GetSubtree(0), nodeValues); 1096 return Tuple.Create( 1097 Math.Cos(x.Item1), 1098 -Vector.Sin(x.Item2) * x.Item2 1099 ); 1150 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1151 f = Math.Cos(fl); 1152 g = gl.Scale(-Math.Sin(fl)); 1153 break; 1100 1154 } 1101 1155 case "sqr": { 1102 var x = InterpretRec(node.GetSubtree(0), nodeValues); 1103 return Tuple.Create( 1104 x.Item1 * x.Item1, 1105 2.0 * x.Item1 * x.Item2 1106 ); 1156 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1157 f = fl * fl; 1158 g = gl.Scale(2.0 * fl); 1159 break; 1107 1160 } 1108 1161 default: { 1109 return nodeValues[node]; // value and gradient for constants and variables must be set by the caller 1162 var t = nodeValues[node]; 1163 f = t.Item1; 1164 g = Vector.CreateNew(t.Item2); 1165 break; 1110 1166 } 1111 1167 } … … 1223 1279 var newVariablesList = new CheckedItemList<StringValue>(ProblemData.Dataset.VariableNames.Select(str => new StringValue(str).AsReadOnly()).ToArray()).AsReadOnly(); 1224 1280 var matchingItems = newVariablesList.Where(item => currentlySelectedVariables.Contains(item.Value)).ToArray(); 1225 foreach (var matchingItem in matchingItems) { 1226 newVariablesList.SetItemCheckedState(matchingItem, true); 1281 foreach (var item in newVariablesList) { 1282 if (currentlySelectedVariables.Contains(item.Value)) { 1283 newVariablesList.SetItemCheckedState(item, true); 1284 } else { 1285 newVariablesList.SetItemCheckedState(item, false); 1286 } 1227 1287 } 1228 1288 TargetVariablesParameter.Value = newVariablesList; … … 1244 1304 // whenever ProblemData is changed we create a new grammar with the necessary symbols 1245 1305 var g = new SimpleSymbolicExpressionGrammar(); 1246 g.AddSymbols(FunctionSet.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray(), 2, 2);1247 1248 // TODO1249 //g.AddSymbols(new[] {1250 // "exp",1251 // "log", // log( <expr> ) // TODO: init a theta to ensure the value is always positive1252 // "exp_minus" // exp((-1) * <expr>1253 //}, 1, 1);1306 var unaryFunc = new string[] { "sin", "cos", "sqr" }; 1307 var binaryFunc = new string[] { "+", "-", "*", "%" }; 1308 foreach (var func in unaryFunc) { 1309 if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 1, 1); 1310 } 1311 foreach (var func in binaryFunc) { 1312 if (FunctionSet.CheckedItems.Any(ci => ci.Value.Value == func)) g.AddSymbol(func, 2, 2); 1313 } 1254 1314 1255 1315 foreach (var variableName in ProblemData.AllowedInputVariables.Union(TargetVariables.CheckedItems.Select(i => i.Value.Value))) -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/SolutionView.Designer.cs
r16400 r16597 90 90 this.forecastTextbox.Size = new System.Drawing.Size(100, 20); 91 91 this.forecastTextbox.TabIndex = 2; 92 this.forecastTextbox.Text = "1 80";92 this.forecastTextbox.Text = "1"; 93 93 // 94 94 // forecastLabel -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Vector.cs
r16398 r16597 9 9 if (a == Zero) return b; 10 10 if (b == Zero) return a; 11 Debug.Assert(a.arr.Length == b.arr.Length);11 Trace.Assert(a.arr.Length == b.arr.Length); 12 12 var res = new double[a.arr.Length]; 13 13 for (int i = 0; i < res.Length; i++) … … 16 16 } 17 17 18 public static Vector AddTo(Vector a, Vector b) { 19 if (b == Zero) return a; 20 if (a == Zero) { 21 var vArr = new double[b.Length]; 22 b.CopyTo(vArr); 23 return new Vector(vArr); 24 } else { 25 Trace.Assert(a.arr.Length == b.arr.Length); 26 for (int i = 0; i < a.arr.Length; i++) 27 a.arr[i] += b.arr[i]; 28 return a; 29 } 30 } 31 32 33 public static Vector Subtract(Vector a, Vector b) { 34 if (b == Zero) return a; 35 if (a == Zero) { 36 var vArr = new double[b.Length]; 37 a = new Vector(vArr); 38 } 39 Trace.Assert(a.arr.Length == b.arr.Length); 40 for (int i = 0; i < a.arr.Length; i++) 41 a.arr[i] -= b.arr[i]; 42 return a; 43 } 44 18 45 public static Vector operator -(Vector a, Vector b) { 19 46 if (b == Zero) return a; 20 47 if (a == Zero) return -b; 21 Debug.Assert(a.arr.Length == b.arr.Length);48 Trace.Assert(a.arr.Length == b.arr.Length); 22 49 var res = new double[a.arr.Length]; 23 50 for (int i = 0; i < res.Length; i++) … … 39 66 res[i] = s * v.arr[i]; 40 67 return new Vector(res); 68 } 69 70 public Vector Scale(double s) { 71 if (this != Zero) { 72 for (int i = 0; i < arr.Length; i++) { 73 arr[i] *= s; 74 } 75 } 76 77 return this; 41 78 } 42 79 … … 95 132 private readonly double[] arr; // backing array; 96 133 134 /// <summary> 135 /// 136 /// </summary> 137 /// <param name="v">Is not cloned!</param> 97 138 public Vector(double[] v) { 98 139 this.arr = v; … … 100 141 101 142 public void CopyTo(double[] target) { 102 Debug.Assert(arr.Length <= target.Length);143 Trace.Assert(arr.Length <= target.Length); 103 144 Array.Copy(arr, target, arr.Length); 145 } 146 147 148 /// <summary> 149 /// Creates a new vector 150 /// </summary> 151 /// <param name="a"> is cloned</param> 152 /// <returns></returns> 153 public static Vector CreateNew(double[] a) { 154 var arr = new double[a.Length]; 155 Array.Copy(a, arr, arr.Length); 156 return new Vector(arr); 157 } 158 159 /// <summary> 160 /// Creates a new vector 161 /// </summary> 162 /// <param name="v"> is cloned</param> 163 /// <returns></returns> 164 public static Vector CreateNew(Vector v) { 165 if (v == Zero) return Zero; 166 var arr = new double[v.arr.Length]; 167 Array.Copy(v.arr, arr, arr.Length); 168 return new Vector(arr); 104 169 } 105 170 }
Note: See TracChangeset
for help on using the changeset viewer.