Changeset 16604
- Timestamp:
- 02/13/19 17:29:49 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16603 r16604 428 428 var y = optimizationData.targetValues[i][trainIdx]; 429 429 fi[termIdx] = (y - f) * optimizationData.inverseStandardDeviation[i]; // scale of NMSE 430 if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[termIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i]; 430 if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[termIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i]; 431 431 432 432 termIdx++; … … 457 457 nodeValueLookup.UpdateParamValues(x); 458 458 459 var predictionEnumerator = Integrate(optimizationData).GetEnumerator();459 Integrate(optimizationData, fi, jac); 460 460 var trees = optimizationData.trees; 461 461 462 predictionEnumerator.MoveNext(); 463 var currentPrediction = predictionEnumerator.Current; 462 // update result with error 464 463 for (int trainIdx = 0; trainIdx < rows.Length; trainIdx++) { 465 var pred_i = currentPrediction;466 467 464 for (int i = 0; i < trees.Length; i++) { 468 465 var tree = trees[i]; 469 466 var y = optimizationData.targetValues[i][trainIdx]; 470 var f = pred_i[i].Item1; 471 var g = pred_i[i].Item2; 472 fi[outputIdx] = (y - f) * optimizationData.inverseStandardDeviation[i]; // scale for normalized squared error 473 if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[outputIdx, j] = -g[j] * optimizationData.inverseStandardDeviation[i]; 474 467 fi[outputIdx] = (y - fi[outputIdx]) * optimizationData.inverseStandardDeviation[i]; // scale for normalized squared error 468 if (jac != null) for (int j = 0; j < x.Length; j++) jac[outputIdx, j] = -jac[outputIdx, j] * optimizationData.inverseStandardDeviation[i]; 475 469 outputIdx++; 476 470 } 477 if (predictionEnumerator.MoveNext()) currentPrediction = predictionEnumerator.Current; // if we stopped early => just use the last prediction until the end478 471 } 479 472 } … … 606 599 var ressq = res * res; 607 600 f_i += ressq * invN; 608 g_i = g_i - 2.0 * res * y_pred[colIdx].Item2 * invN;601 g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res * invN)); 609 602 } 610 603 seRow.Values.Add(f_i); … … 652 645 var clonedTrees = new List<ISymbolicExpressionTree>(); 653 646 for (int idx = 0; idx < trees.Length; idx++) { 654 clonedTrees.Add((ISymbolicExpressionTree)trees[idx] );647 clonedTrees.Add((ISymbolicExpressionTree)trees[idx].Clone()); 655 648 } 656 649 var ds = problemData.Dataset; … … 689 682 } 690 683 691 692 public static double[] ExtractParametersFromTrees(ISymbolicExpressionTree[] trees) {693 return trees694 .SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>().Select(n => n.Value))695 .ToArray();696 }697 698 699 700 684 #region interpretation 701 685 … … 711 695 712 696 public static IEnumerable<Tuple<double, Vector>[]> Integrate(OptimizationData optimizationData) { 713 697 var nTargets = optimizationData.targetVariables.Length; 698 var n = optimizationData.rows.Length * optimizationData.targetVariables.Length; 699 var d = optimizationData.nodeValueLookup.ParameterCount; 700 double[] fi = new double[n]; 701 double[,] jac = new double[n, d]; 702 Integrate(optimizationData, fi, jac); 703 for (int i = 0;i < optimizationData.rows.Length;i++) { 704 var res = new Tuple<double, Vector>[nTargets]; 705 for (int j = 0;j<nTargets;j++) { 706 res[j] = Tuple.Create(fi[i * nTargets + j], Vector.CreateFromMatrixRow(jac, i * nTargets + j)); 707 } 708 yield return res; 709 } 710 } 711 712 public static void Integrate(OptimizationData optimizationData, double[] fi, double[,] jac) { 714 713 var trees = optimizationData.trees; 715 714 var dataset = optimizationData.problemData.Dataset; … … 730 729 731 730 var t0 = rows.First(); 731 var outputRowIdx = 0; 732 732 733 733 // initialize values for inputs and targets from dataset 734 foreach (var varName in inputVariables.Concat(targetVariables)) { 735 nodeValues.SetVariableValue(varName, dataset.GetDoubleValue(varName, t0), Vector.Zero); 734 foreach (var varName in inputVariables) { 735 var y0 = dataset.GetDoubleValue(varName, t0); 736 nodeValues.SetVariableValue(varName, y0, Vector.Zero); 737 } 738 foreach(var varName in targetVariables) { 739 var y0 = dataset.GetDoubleValue(varName, t0); 740 nodeValues.SetVariableValue(varName, y0, Vector.Zero); 741 742 // output starting value 743 fi[outputRowIdx] = y0; 744 Vector.Zero.CopyTo(jac, outputRowIdx); 745 746 outputRowIdx++; 736 747 } 737 748 … … 752 763 } 753 764 754 // return first value as stored in the dataset755 yield return calculatedVariables756 .Select(calcVarName => nodeValues.GetVariableValue(calcVarName))757 .ToArray();758 759 765 var prevT = t0; // TODO: here we should use a variable for t if it is available. Right now we assume equidistant measurements. 760 766 foreach (var t in rows.Skip(1)) { … … 767 773 prevT = t; 768 774 769 770 var res = calculatedVariables 771 .Select(targetVar => nodeValues.GetVariableValue(targetVar)) 772 .ToArray(); 773 774 // stop early if there are undefined values 775 if (res.Any(ri => double.IsNaN(ri.Item1) || double.IsInfinity(ri.Item1))) yield break; 776 yield return res; 775 for (int i = 0; i < calculatedVariables.Length; i++) { 776 var targetVar = calculatedVariables[i]; 777 var yt = nodeValues.GetVariableValue(targetVar); 778 779 // fill up remaining rows with last valid value if there are invalid values 780 if (double.IsNaN(yt.Item1) || double.IsInfinity(yt.Item1)) { 781 for (; outputRowIdx < fi.Length; outputRowIdx++) { 782 var prevIdx = outputRowIdx - calculatedVariables.Length; 783 fi[outputRowIdx] = fi[prevIdx]; // current <- prev 784 if (jac != null) for (int j = 0; j < jac.GetLength(1); j++) jac[outputRowIdx, j] = jac[prevIdx, j]; 785 } 786 return; 787 }; 788 789 fi[outputRowIdx] = yt.Item1; 790 var g = yt.Item2; 791 g.CopyTo(jac, outputRowIdx); 792 outputRowIdx++; 793 } 777 794 778 795 // update for next time step (only the inputs) … … 1087 1104 ISymbolicExpressionTree[] trees, 1088 1105 string[] calculatedVariables, // names of integrated variables 1089 // Dictionary<string, Tuple<double, Vector>> variableValues, //y (intput and output) input: y(t0), output: y(t0+1)1090 1106 NodeValueLookup nodeValues, 1091 // double[] parameterValues,1092 1107 int numericIntegrationSteps) { 1093 1108 … … 1114 1129 var varName = calculatedVariables[i]; 1115 1130 var oldVal = nodeValues.GetVariableValue(varName); 1116 nodeValues.SetVariableValue(varName, oldVal.Item1 + h * deltaF[i], oldVal.Item2 + deltaG[i].Scale(h));1131 nodeValues.SetVariableValue(varName, oldVal.Item1 + h * deltaF[i], oldVal.Item2.Add(deltaG[i].Scale(h))); 1117 1132 } 1118 1133 } … … 1123 1138 if (node is ConstantTreeNode) { 1124 1139 return ((ConstantTreeNode)node).Value; 1125 } else if (node is VariableTreeNode) {1140 } else if (node is VariableTreeNode) { 1126 1141 return nodeValues.NodeValue(node); 1127 1142 } else if (node.Symbol is Addition) { … … 1175 1190 if (node.Symbol is Constant || node.Symbol is Variable) { 1176 1191 z = nodeValues.NodeValue(node); 1177 dz = Vector.CreateNew(nodeValues.NodeGradient(node)); 1192 dz = Vector.CreateNew(nodeValues.NodeGradient(node)); // original gradient vectors are never changed by evaluation 1178 1193 } else if (node.Symbol is Addition) { 1179 1194 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1180 1195 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); 1181 1196 z = f + g; 1182 dz = df + dg; // Vector.AddTo(gl, gr); 1183 1197 dz = df.Add(dg); 1184 1198 } else if (node.Symbol is Multiplication) { 1185 1199 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1186 1200 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); 1187 1201 z = f * g; 1188 dz = df * g + f * dg; // Vector.AddTo(gl.Scale(fr), gr.Scale(fl));// f'*g + f*g'1202 dz = df.Scale(g).Add(dg.Scale(f)); // f'*g + f*g' 1189 1203 1190 1204 } else if (node.Symbol is Subtraction) { … … 1192 1206 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1193 1207 z = -f; 1194 dz = -df;1208 dz = df.Scale(-1.0); 1195 1209 } else { 1196 1210 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1197 1211 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); 1198 1199 1212 z = f - g; 1200 dz = df - dg;1213 dz = df.Subtract(dg); 1201 1214 } 1202 1215 … … 1210 1223 dz = Vector.Zero; 1211 1224 } else { 1212 z = f / g; 1213 dz = -f / (g * g) * dg + df / g; // Vector.AddTo(dg.Scale(f * -1.0 / (g * g)), df.Scale(1.0 / g)); // (f/g)' = f * (1/g)' + 1/g * f' = f * -1/g² * g' + 1/g * f' 1225 var inv_g = 1.0 / g; 1226 z = f * inv_g; 1227 1228 dz = dg.Scale(-f * inv_g * inv_g).Add(df.Scale(inv_g)); 1214 1229 } 1215 1230 … … 1217 1232 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1218 1233 z = Math.Sin(f); 1219 dz = Math.Cos(f) * df;1234 dz = df.Scale(Math.Cos(f)); 1220 1235 1221 1236 } else if (node.Symbol is Cosine) { 1222 1237 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1223 1238 z = Math.Cos(f); 1224 dz = -Math.Sin(f) * df;1239 dz = df.Scale(-Math.Sin(f)); 1225 1240 } else if (node.Symbol is Square) { 1226 1241 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1227 1242 z = f * f; 1228 dz = 2.0 * f * df;1243 dz = df.Scale(2.0 * f); 1229 1244 } else { 1230 1245 throw new NotSupportedException("unsupported symbol"); … … 1522 1537 this.odeSolver = odeSolver; 1523 1538 this.nodeValueLookup = new NodeValueLookup(trees); 1524 this.rows = 1539 this.rows = episodes.SelectMany(ep => Enumerable.Range(ep.Start, ep.Size)).ToArray(); 1525 1540 } 1526 1541 } … … 1541 1556 this.constantNodes = trees.SelectMany(t => t.IterateNodesPrefix().OfType<ConstantTreeNode>()).ToArray(); 1542 1557 constantGradientVectors = new Vector[constantNodes.Length]; 1543 for (int paramIdx = 0; paramIdx < constantNodes.Length; paramIdx++) {1558 for (int paramIdx = 0; paramIdx < constantNodes.Length; paramIdx++) { 1544 1559 constantGradientVectors[paramIdx] = Vector.CreateIndicator(length: constantNodes.Length, idx: paramIdx); 1545 1560 -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Vector.cs
r16601 r16604 17 17 } 18 18 19 // public static Vector AddTo(Vector a, Vector b) { 20 // if (b == Zero) return a; 21 // if (a == Zero) { 22 // var vArr = new double[b.Length]; 23 // b.CopyTo(vArr); 24 // return new Vector(vArr); 25 // } else { 26 // Trace.Assert(a.arr.Length == b.arr.Length); 27 // for (int i = 0; i < a.arr.Length; i++) 28 // a.arr[i] += b.arr[i]; 29 // return a; 30 // } 31 // } 19 public Vector Add(Vector b) { 20 var a = this; 21 if (b == Zero) return a; 22 if (a == Zero) { 23 var vArr = new double[b.Length]; 24 b.CopyTo(vArr); 25 return new Vector(vArr); 26 } else { 27 Trace.Assert(a.arr.Length == b.arr.Length); 28 for (int i = 0; i < a.arr.Length; i++) 29 a.arr[i] += b.arr[i]; 30 return a; 31 } 32 } 32 33 33 34 34 public static Vector Subtract(Vector a, Vector b) { 35 public Vector Subtract(Vector b) { 36 var a = this; 35 37 if (b == Zero) return a; 36 38 if (a == Zero) { … … 39 41 } 40 42 Trace.Assert(a.arr.Length == b.arr.Length); 41 var res = new double[a.arr.Length];42 43 for (int i = 0; i < a.arr.Length; i++) 43 res[i] = a.arr[i] -b.arr[i];44 return new Vector(res);44 a.arr[i] -= b.arr[i]; 45 return a; 45 46 } 46 47 47 public static Vector operator -(Vector a, Vector b) {48 return Subtract(a, b);49 }50 public static Vector operator -(Vector v) {51 return v.Scale(-1.0);52 }48 // public static Vector operator -(Vector a, Vector b) { 49 // return Subtract(a, b); 50 // } 51 // public static Vector operator -(Vector v) { 52 // return v.Scale(-1.0); 53 // } 53 54 54 public static Vector operator *(double s, Vector v) {55 return v.Scale(s);56 }55 //public static Vector operator *(double s, Vector v) { 56 // return v.Scale(s); 57 //} 57 58 58 59 public Vector Scale(double s) { 59 60 if (this == Zero) return Zero; 60 61 61 var res = new double[arr.Length];62 62 for (int i = 0; i < arr.Length; i++) { 63 res[i] = arr[i] *s;63 arr[i] *= s; 64 64 } 65 return new Vector(res); 66 65 return this; 67 66 } 68 67 69 public static Vector operator *(Vector v, double s) {70 return s * v;71 }68 // public static Vector operator *(Vector v, double s) { 69 // return s * v; 70 // } 72 71 73 72 public static Vector operator *(Vector u, Vector v) { … … 89 88 } 90 89 91 public static Vector operator /(Vector v, double s) { 92 return v.Scale(1.0 / s); 90 // public static Vector operator /(Vector v, double s) { 91 // return v.Scale(1.0 / s); 92 // } 93 94 public Vector Sin() { 95 for (int i = 0; i < arr.Length; i++) arr[i] = Math.Sin(arr[i]); 96 return this; 93 97 } 94 95 public static Vector Sin(Vector s) { 96 var res = new double[s.arr.Length]; 97 for (int i = 0; i < res.Length; i++) res[i] = Math.Sin(s.arr[i]); 98 return new Vector(res); 99 } 100 public static Vector Cos(Vector s) { 101 var res = new double[s.arr.Length]; 102 for (int i = 0; i < res.Length; i++) res[i] = Math.Cos(s.arr[i]); 103 return new Vector(res); 98 public Vector Cos() { 99 for (int i = 0; i < arr.Length; i++) arr[i] = Math.Cos(arr[i]); 100 return this; 104 101 } 105 102 … … 137 134 } 138 135 136 public void CopyTo(double[,] target, int rowIdx) { 137 if (target == null) return; 138 if (this == Zero) { 139 for (int j = 0; j < target.GetLength(1); j++) target[rowIdx, j] = 0.0; 140 } else { 141 for (int j = 0; j < target.GetLength(1); j++) target[rowIdx, j] = arr[j]; 142 } 143 } 139 144 /// <summary> 140 145 /// Creates a new vector … … 165 170 return new Vector(gArr); 166 171 } 172 173 internal static Vector CreateFromMatrixRow(double[,] jac, int rowIdx) { 174 var arr = new double[jac.GetLength(1)]; 175 for (int i = 0; i < arr.Length; i++) arr[i] = jac[rowIdx, i]; 176 return new Vector(arr); 177 } 167 178 } 168 179 }
Note: See TracChangeset
for help on using the changeset viewer.