Changeset 16616
- Timestamp:
- 02/20/19 07:43:25 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Encodings.SymbolicExpressionTreeEncoding/3.4/Crossovers/SubtreeCrossover.cs
r15583 r16616 43 43 private const string MaximumSymbolicExpressionTreeLengthParameterName = "MaximumSymbolicExpressionTreeLength"; 44 44 private const string MaximumSymbolicExpressionTreeDepthParameterName = "MaximumSymbolicExpressionTreeDepth"; 45 private const string CrossoverProbabilityParameterName = "CrossoverProbability"; 45 46 46 47 #region Parameter Properties … … 53 54 public IValueLookupParameter<IntValue> MaximumSymbolicExpressionTreeDepthParameter { 54 55 get { return (IValueLookupParameter<IntValue>)Parameters[MaximumSymbolicExpressionTreeDepthParameterName]; } 56 } 57 public IValueParameter<PercentValue> CrossoverProbabilityParameter { 58 get { return (IValueParameter<PercentValue>)Parameters[CrossoverProbabilityParameterName]; } 55 59 } 56 60 #endregion … … 64 68 public IntValue MaximumSymbolicExpressionTreeDepth { 65 69 get { return MaximumSymbolicExpressionTreeDepthParameter.ActualValue; } 70 } 71 public PercentValue CrossoverProbability { 72 get { return CrossoverProbabilityParameter.Value; } 66 73 } 67 74 #endregion … … 74 81 Parameters.Add(new ValueLookupParameter<IntValue>(MaximumSymbolicExpressionTreeDepthParameterName, "The maximal depth of the symbolic expression tree (a tree with one node has depth = 0).")); 75 82 Parameters.Add(new ValueLookupParameter<PercentValue>(InternalCrossoverPointProbabilityParameterName, "The probability to select an internal crossover point (instead of a leaf node).", new PercentValue(0.9))); 83 Parameters.Add(new ValueParameter<PercentValue>(CrossoverProbabilityParameterName, "The probability to apply crossover (default 100%).", new PercentValue(1))); 76 84 } 77 85 78 86 public override IDeepCloneable Clone(Cloner cloner) { 79 87 return new SubtreeCrossover(this, cloner); 88 } 89 90 [StorableHook(HookType.AfterDeserialization)] 91 private void AfterDeserialization() { 92 if (!Parameters.ContainsKey(CrossoverProbabilityParameterName)) { 93 Parameters.Add(new ValueParameter<PercentValue>(CrossoverProbabilityParameterName, "The probability to apply crossover (default 100%).", new PercentValue(1))); 94 } 80 95 } 81 96 … … 83 98 ISymbolicExpressionTree parent0, ISymbolicExpressionTree parent1) { 84 99 return Cross(random, parent0, parent1, InternalCrossoverPointProbability.Value, 85 MaximumSymbolicExpressionTreeLength.Value, MaximumSymbolicExpressionTreeDepth.Value );100 MaximumSymbolicExpressionTreeLength.Value, MaximumSymbolicExpressionTreeDepth.Value, CrossoverProbability.Value); 86 101 } 87 102 88 103 public static ISymbolicExpressionTree Cross(IRandom random, 89 104 ISymbolicExpressionTree parent0, ISymbolicExpressionTree parent1, 90 double internalCrossoverPointProbability, int maxTreeLength, int maxTreeDepth) { 105 double internalCrossoverPointProbability, int maxTreeLength, int maxTreeDepth, double crossoverProbability = 1.0) { 106 if (random.NextDouble() >= crossoverProbability) { 107 return random.NextDouble() < 0.5 ? parent0 : parent1; 108 } 109 91 110 // select a random crossover point in the first parent 92 111 CutPoint crossoverPoint0; -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16610 r16616 245 245 nmse = OptimizeParameters(trees, problemData, targetVars, latentVariables, episodes, maxParameterOptIterations, pretunedParameters, numericIntegrationSteps, odeSolver, 246 246 out double[] optTheta); 247 // var optTheta = pretunedParameters; 247 248 248 249 if (double.IsNaN(nmse) || … … 297 298 alglib.minlmreport report; 298 299 var p = new double[initialTheta[treeIdx].Length]; 299 var lowerBounds = Enumerable.Repeat(-100 .0, p.Length).ToArray();300 var upperBounds = Enumerable.Repeat(100 .0, p.Length).ToArray();300 var lowerBounds = Enumerable.Repeat(-1000.0, p.Length).ToArray(); 301 var upperBounds = Enumerable.Repeat(1000.0, p.Length).ToArray(); 301 302 Array.Copy(initialTheta[treeIdx], p, p.Length); 302 303 alglib.minlmcreatevj(targetValuesDiff.Count, p, out state); 303 304 alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 304 305 alglib.minlmsetbc(state, lowerBounds, upperBounds); 305 // alglib.minlmsetgradientcheck(state, 1.0e-3); 306 #if DEBUG 307 //alglib.minlmsetgradientcheck(state, 1.0e-7); 308 #endif 306 309 alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState); 307 310 308 311 alglib.minlmresults(state, out optTheta, out report); 309 if (report.terminationtype < 0) { optTheta = initialTheta[treeIdx]; } 312 if (report.terminationtype < 0) { 313 #if DEBUG 314 if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!"); 315 #endif 316 optTheta = initialTheta[treeIdx]; 317 } 310 318 } catch (alglib.alglibexception) { 311 319 optTheta = initialTheta[treeIdx]; … … 342 350 343 351 if (initialTheta.Length > 0) { 344 var lowerBounds = Enumerable.Repeat(-100 .0, initialTheta.Length).ToArray();345 var upperBounds = Enumerable.Repeat(100 .0, initialTheta.Length).ToArray();352 var lowerBounds = Enumerable.Repeat(-1000.0, initialTheta.Length).ToArray(); 353 var upperBounds = Enumerable.Repeat(1000.0, initialTheta.Length).ToArray(); 346 354 try { 347 355 alglib.minlmstate state; … … 350 358 alglib.minlmsetbc(state, lowerBounds, upperBounds); 351 359 alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 352 // alglib.minlmsetgradientcheck(state, 1.0e-3); 360 #if DEBUG 361 //alglib.minlmsetgradientcheck(state, 1.0e-7); 362 #endif 353 363 alglib.minlmoptimize(state, IntegrateAndEvaluateObjectiveVector, IntegrateAndEvaluateObjectiveVectorAndJacobian, null, myState); 354 364 … … 356 366 357 367 if (report.terminationtype < 0) { 358 // there was a problem: reset theta and evaluate for inital values 368 #if DEBUG 369 if (report.terminationtype == -7) throw new InvalidProgramException("gradient calculation fail!"); 370 #endif // there was a problem: reset theta and evaluate for inital values 359 371 optTheta = initialTheta; 360 372 } … … 397 409 var pred = InterpretRec(tree.Root.GetSubtree(0).GetSubtree(0), nodeValueLookup); 398 410 var y = optimizationData.targetValues[treeIdx][trainIdx]; 399 fi[outputIdx++] = y - pred;411 fi[outputIdx++] = (y - pred) * optimizationData.inverseStandardDeviation[treeIdx]; 400 412 } 401 413 } … … 602 614 var y = targetValues[colIdx][r]; 603 615 604 var res = (y - y_pred_f) ;616 var res = (y - y_pred_f) * optimizationData.inverseStandardDeviation[colIdx]; 605 617 var ressq = res * res; 606 f_i += ressq * optimizationData.inverseStandardDeviation[colIdx];607 g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res * optimizationData.inverseStandardDeviation[colIdx]));618 f_i += ressq; 619 g_i.Add(y_pred[colIdx].Item2.Scale(-2.0 * res)); 608 620 } 609 621 seRow.Values.Add(f_i); … … 858 870 859 871 var flag = CVODES.CVodeInit(cvode_mem, f, 0.0, y); 860 Debug.Assert(CVODES.CV_SUCCESS == flag);872 Assert(CVODES.CV_SUCCESS == flag); 861 873 862 874 double relTol = 1.0e-2; 863 875 double absTol = 1.0; 864 876 flag = CVODES.CVodeSStolerances(cvode_mem, relTol, absTol); // TODO: probably need to adjust absTol per variable 865 Debug.Assert(CVODES.CV_SUCCESS == flag);877 Assert(CVODES.CV_SUCCESS == flag); 866 878 867 879 A = CVODES.SUNDenseMatrix(numberOfEquations, numberOfEquations); 868 Debug.Assert(A != IntPtr.Zero);880 Assert(A != IntPtr.Zero); 869 881 870 882 linearSolver = CVODES.SUNDenseLinearSolver(y, A); 871 Debug.Assert(linearSolver != IntPtr.Zero);883 Assert(linearSolver != IntPtr.Zero); 872 884 873 885 flag = CVODES.CVDlsSetLinearSolver(cvode_mem, linearSolver, A); 874 Debug.Assert(CVODES.CV_SUCCESS == flag);886 Assert(CVODES.CV_SUCCESS == flag); 875 887 876 888 flag = CVODES.CVDlsSetJacFn(cvode_mem, jac); 877 Debug.Assert(CVODES.CV_SUCCESS == flag);889 Assert(CVODES.CV_SUCCESS == flag); 878 890 879 891 yS0 = CVODES.N_VCloneVectorArray_Serial(ns, y); // clone the output vector for each parameter … … 889 901 890 902 flag = CVODES.CVodeSensInit(cvode_mem, ns, CVODES.CV_SIMULTANEOUS, sensF, yS0); 891 Debug.Assert(CVODES.CV_SUCCESS == flag);903 Assert(CVODES.CV_SUCCESS == flag); 892 904 893 905 flag = CVODES.CVodeSensEEtolerances(cvode_mem); 894 Debug.Assert(CVODES.CV_SUCCESS == flag);906 Assert(CVODES.CV_SUCCESS == flag); 895 907 896 908 // make one forward integration step … … 898 910 flag = CVODES.CVode(cvode_mem, t, y, ref tout, CVODES.CV_NORMAL); 899 911 if (flag == CVODES.CV_SUCCESS) { 900 Debug.Assert(t == tout);912 Assert(t == tout); 901 913 902 914 // get sensitivities 903 915 flag = CVODES.CVodeGetSens(cvode_mem, ref tout, yS0); 904 Debug.Assert(CVODES.CV_SUCCESS == flag);916 Assert(CVODES.CV_SUCCESS == flag); 905 917 906 918 // update variableValues based on integration results … … 1149 1161 return nodeValues.NodeValue(node); 1150 1162 } else if (node.Symbol is Addition) { 1151 Debug.Assert(node.SubtreeCount == 2);1163 Assert(node.SubtreeCount == 2); 1152 1164 var f = InterpretRec(node.GetSubtree(0), nodeValues); 1153 1165 var g = InterpretRec(node.GetSubtree(1), nodeValues); 1154 1166 return f + g; 1155 1167 } else if (node.Symbol is Multiplication) { 1156 Debug.Assert(node.SubtreeCount == 2);1168 Assert(node.SubtreeCount == 2); 1157 1169 var f = InterpretRec(node.GetSubtree(0), nodeValues); 1158 1170 var g = InterpretRec(node.GetSubtree(1), nodeValues); 1159 1171 return f * g; 1160 1172 } else if (node.Symbol is Subtraction) { 1161 Debug.Assert(node.SubtreeCount <= 2);1173 Assert(node.SubtreeCount <= 2); 1162 1174 if (node.SubtreeCount == 1) { 1163 1175 var f = InterpretRec(node.GetSubtree(0), nodeValues); … … 1170 1182 } 1171 1183 } else if (node.Symbol is Division) { 1172 Debug.Assert(node.SubtreeCount <= 2);1184 Assert(node.SubtreeCount <= 2); 1173 1185 1174 1186 if (node.SubtreeCount == 1) { … … 1192 1204 } 1193 1205 } else if (node.Symbol is Sine) { 1194 Debug.Assert(node.SubtreeCount == 1);1206 Assert(node.SubtreeCount == 1); 1195 1207 1196 1208 var f = InterpretRec(node.GetSubtree(0), nodeValues); 1197 1209 return Math.Sin(f); 1198 1210 } else if (node.Symbol is Cosine) { 1199 Debug.Assert(node.SubtreeCount == 1);1211 Assert(node.SubtreeCount == 1); 1200 1212 1201 1213 var f = InterpretRec(node.GetSubtree(0), nodeValues); 1202 1214 return Math.Cos(f); 1203 1215 } else if (node.Symbol is Square) { 1204 Debug.Assert(node.SubtreeCount == 1);1216 Assert(node.SubtreeCount == 1); 1205 1217 1206 1218 var f = InterpretRec(node.GetSubtree(0), nodeValues); 1207 1219 return f * f; 1208 1220 } else if (node.Symbol is Exponential) { 1209 Debug.Assert(node.SubtreeCount == 1);1221 Assert(node.SubtreeCount == 1); 1210 1222 1211 1223 var f = InterpretRec(node.GetSubtree(0), nodeValues); 1212 1224 return Math.Exp(f); 1213 1225 } else if (node.Symbol is Logarithm) { 1214 Debug.Assert(node.SubtreeCount == 1);1226 Assert(node.SubtreeCount == 1); 1215 1227 1216 1228 var f = InterpretRec(node.GetSubtree(0), nodeValues); 1217 1229 return Math.Log(f); 1218 1230 } else throw new NotSupportedException("unsupported symbol"); 1231 } 1232 1233 private static void Assert(bool cond) { 1234 #if DEBUG 1235 if (!cond) throw new InvalidOperationException("Assertion failed"); 1236 #endif 1219 1237 } 1220 1238 … … 1231 1249 dz = Vector.CreateNew(nodeValues.NodeGradient(node)); // original gradient vectors are never changed by evaluation 1232 1250 } else if (node.Symbol is Addition) { 1251 1252 Assert(node.SubtreeCount == 2); 1233 1253 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1234 1254 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); … … 1236 1256 dz = df.Add(dg); 1237 1257 } else if (node.Symbol is Multiplication) { 1258 1259 Assert(node.SubtreeCount == 2); 1238 1260 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1239 1261 InterpretRec(node.GetSubtree(1), nodeValues, out g, out dg); … … 1242 1264 1243 1265 } else if (node.Symbol is Subtraction) { 1266 1267 Assert(node.SubtreeCount <= 2); 1244 1268 if (node.SubtreeCount == 1) { 1245 1269 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); … … 1254 1278 1255 1279 } else if (node.Symbol is Division) { 1280 1281 Assert(node.SubtreeCount <= 2); 1256 1282 if (node.SubtreeCount == 1) { 1257 1283 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); … … 1281 1307 1282 1308 } else if (node.Symbol is Sine) { 1309 1310 Assert(node.SubtreeCount == 1); 1283 1311 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1284 1312 z = Math.Sin(f); … … 1286 1314 1287 1315 } else if (node.Symbol is Cosine) { 1316 1317 Assert(node.SubtreeCount == 1); 1288 1318 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1289 1319 z = Math.Cos(f); 1290 1320 dz = df.Scale(-Math.Sin(f)); 1291 1321 } else if (node.Symbol is Square) { 1322 1323 Assert(node.SubtreeCount == 1); 1292 1324 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1293 1325 z = f * f; 1294 1326 dz = df.Scale(2.0 * f); 1295 1327 } else if (node.Symbol is Exponential) { 1328 1329 Assert(node.SubtreeCount == 1); 1296 1330 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1297 1331 z = Math.Exp(f); 1298 1332 dz = df.Scale(Math.Exp(f)); 1299 1333 } else if (node.Symbol is Logarithm) { 1334 1335 Assert(node.SubtreeCount == 1); 1300 1336 InterpretRec(node.GetSubtree(0), nodeValues, out f, out df); 1301 1337 z = Math.Log(f); … … 1443 1479 // make sure our multi-manipulator is the only manipulator 1444 1480 e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators); 1481 1482 // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time 1483 var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>(); 1484 foreach (var xover in subtreeCrossovers) { 1485 xover.CrossoverProbability.Value = 0.3; 1486 } 1487 1445 1488 encoding = encoding.Add(e); // only limit by length 1446 1489 } … … 1451 1494 // make sure our multi-manipulator is the only manipulator 1452 1495 e.Operators = new IOperator[] { multiManipulator }.Concat(filteredOperators); 1496 1497 // set the crossover probability to reduce likelihood that multiple trees are crossed at the same time 1498 var subtreeCrossovers = e.Operators.OfType<SubtreeCrossover>(); 1499 foreach (var xover in subtreeCrossovers) { 1500 xover.CrossoverProbability.Value = 0.3; 1501 } 1502 1453 1503 encoding = encoding.Add(e); 1454 1504 } … … 1589 1639 this.targetValues = targetValues; 1590 1640 this.variables = inputVariables; 1591 //if (targetValues != null) {1592 //this.inverseStandardDeviation = new double[targetValues.Length];1593 //for (int i = 0; i < targetValues.Length; i++) {1594 //// calculate variance for each episode separately and calc the average1595 //var epStartIdx = 0;1596 //var stdevs = new List<double>();1597 //foreach (var ep in episodes) {1598 //var epValues = targetValues[i].Skip(epStartIdx).Take(ep.Size);1599 //stdevs.Add(epValues.StandardDeviation());1600 //epStartIdx += ep.Size;1601 //}1602 //inverseStandardDeviation[i] = 1.0 / stdevs.Average();1603 //}1604 //} else1605 this.inverseStandardDeviation = Enumerable.Repeat(1.0, trees.Length).ToArray();1641 if (targetValues != null) { 1642 this.inverseStandardDeviation = new double[targetValues.Length]; 1643 for (int i = 0; i < targetValues.Length; i++) { 1644 // calculate variance for each episode separately and calc the average 1645 var epStartIdx = 0; 1646 var stdevs = new List<double>(); 1647 foreach (var ep in episodes) { 1648 var epValues = targetValues[i].Skip(epStartIdx).Take(ep.Size); 1649 stdevs.Add(epValues.StandardDeviation()); 1650 epStartIdx += ep.Size; 1651 } 1652 inverseStandardDeviation[i] = 1.0 / stdevs.Average(); 1653 } 1654 } else 1655 this.inverseStandardDeviation = Enumerable.Repeat(1.0, trees.Length).ToArray(); 1606 1656 this.episodes = episodes; 1607 1657 this.numericIntegrationSteps = numericIntegrationSteps;
Note: See TracChangeset
for help on using the changeset viewer.