Changeset 16599 for branches/2925_AutoDiffForDynamicalModels
- Timestamp:
- 02/11/19 18:12:03 (6 years ago)
- Location:
- branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Problem.cs
r16597 r16599 201 201 // retreive optimized parameters from nodes? 202 202 203 var problemData = ProblemData;203 var problemData = Standardize(ProblemData); 204 204 var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray(); 205 205 var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees … … 227 227 } 228 228 229 private IRegressionProblemData Standardize(IRegressionProblemData problemData) { 230 // var standardizedDataset = new Dataset( 231 // problemData.Dataset.DoubleVariables, 232 // problemData.Dataset.DoubleVariables.Select(v => Standardize(problemData.Dataset.GetReadOnlyDoubleValues(v)).ToList())); 233 // return new RegressionProblemData(standardizedDataset, problemData.AllowedInputVariables, problemData.TargetVariable); 234 return new RegressionProblemData(problemData.Dataset, problemData.AllowedInputVariables, problemData.TargetVariable); 235 } 236 229 237 public static void OptimizeForEpisodes( 230 238 ISymbolicExpressionTree[] trees, … … 242 250 var targetValues = new double[rows.Length, targetVars.Length]; 243 251 244 // collect values of all target variables 252 253 // collect values of all target variables 245 254 var colIdx = 0; 246 255 foreach (var targetVar in targetVars) { … … 266 275 var theta = new double[paramNodes.Count + latentVariables.Length * episodes.Count()]; 267 276 for (int i = 0; i < theta.Length; i++) 268 theta[i] = random.NextDouble() * 2.0 - 1.0;277 theta[i] = random.NextDouble() * 2.0e-2 - 1.0e-2; 269 278 270 279 optTheta = new double[0]; 271 280 if (theta.Length > 0) { 272 alglib.minlbfgsstate state; 273 alglib.minlbfgsreport report; 274 alglib.minlbfgscreate(Math.Min(theta.Length, 5), theta, out state); 275 alglib.minlbfgssetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 276 // alglib.minlbfgssetgradientcheck(state, 1e-4); 277 alglib.minlbfgsoptimize(state, EvaluateObjectiveAndGradient, null, 278 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); //TODO: create a type 279 280 alglib.minlbfgsresults(state, out optTheta, out report); 281 282 /* 283 * 284 * L-BFGS algorithm results 285 286 INPUT PARAMETERS: 287 State - algorithm state 288 289 OUTPUT PARAMETERS: 290 X - array[0..N-1], solution 291 Rep - optimization report: 292 * Rep.TerminationType completetion code: 293 * -7 gradient verification failed. 294 See MinLBFGSSetGradientCheck() for more information. 295 * -2 rounding errors prevent further improvement. 296 X contains best point found. 297 * -1 incorrect parameters were specified 298 * 1 relative function improvement is no more than 299 EpsF. 300 * 2 relative step is no more than EpsX. 301 * 4 gradient norm is no more than EpsG 302 * 5 MaxIts steps was taken 303 * 7 stopping conditions are too stringent, 304 further improvement is impossible 305 * Rep.IterationsCount contains iterations count 306 * NFEV countains number of function calculations 307 */ 281 alglib.minlmstate state; 282 alglib.minlmreport report; 283 alglib.minlmcreatevj(targetValues.Length, theta, out state); 284 alglib.minlmsetcond(state, 0.0, 0.0, 0.0, maxParameterOptIterations); 285 alglib.minlmsetgradientcheck(state, 1.0e-3); 286 //TODO: create a type 287 var myState = new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }; 288 alglib.minlmoptimize(state, EvaluateObjectiveVector, EvaluateObjectiveVectorAndJacobian, null, myState); 289 290 alglib.minlmresults(state, out optTheta, out report); 291 292 /************************************************************************* 293 Levenberg-Marquardt algorithm results 294 295 INPUT PARAMETERS: 296 State - algorithm state 297 298 OUTPUT PARAMETERS: 299 X - array[0..N-1], solution 300 Rep - optimization report; includes termination codes and 301 additional information. Termination codes are listed below, 302 see comments for this structure for more info. 303 Termination code is stored in rep.terminationtype field: 304 * -8 optimizer detected NAN/INF values either in the 305 function itself, or in its Jacobian 306 * -7 derivative correctness check failed; 307 see rep.funcidx, rep.varidx for 308 more information. 309 * -3 constraints are inconsistent 310 * 2 relative step is no more than EpsX. 311 * 5 MaxIts steps was taken 312 * 7 stopping conditions are too stringent, 313 further improvement is impossible 314 * 8 terminated by user who called minlmrequesttermination(). 315 X contains point which was "current accepted" when 316 termination request was submitted. 317 318 -- ALGLIB -- 319 Copyright 10.03.2009 by Bochkanov Sergey 320 *************************************************************************/ 308 321 if (report.terminationtype < 0) { nmse = 10.0; return; } 309 } 310 311 // perform evaluation for optimal theta to get quality value 312 double[] grad = new double[optTheta.Length]; 313 nmse = double.NaN; 314 EvaluateObjectiveAndGradient(optTheta, ref nmse, grad, 315 new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }); 316 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10.0; return; } // return a large value (TODO: be consistent by using NMSE) 317 } 318 319 private static void EvaluateObjectiveAndGradient(double[] x, ref double f, double[] grad, object obj) { 322 323 nmse = state.f; //TODO check 324 325 // var myState = new object[] { trees, targetVars, problemData, targetValues, episodes.ToArray(), numericIntegrationSteps, latentVariables, odeSolver }; 326 // EvaluateObjectiveVector(optTheta, ref nmse, grad,myState); 327 if (double.IsNaN(nmse) || double.IsInfinity(nmse)) { nmse = 10.0; return; } // return a large value (TODO: be consistent by using NMSE) 328 } else { 329 // no parameters 330 nmse = targetValues.Length; 331 } 332 333 } 334 335 // private static IEnumerable<double> Standardize(IEnumerable<double> xs) { 336 // var m = xs.Average(); 337 // var s = xs.StandardDeviationPop(); 338 // return xs.Select(xi => (xi - m) / s); 339 // } 340 341 alglib.ndimensional_fvec fvec; 342 alglib.ndimensional_jac jac; 343 344 private static void EvaluateObjectiveVector(double[] x, double[] fi, object obj) { 345 EvaluateObjectiveVectorAndJacobian(x, fi, null, obj); 346 } 347 348 private static void EvaluateObjectiveVectorAndJacobian(double[] x, double[] fi, double[,] jac, object obj) { 320 349 var trees = (ISymbolicExpressionTree[])((object[])obj)[0]; 321 350 var targetVariables = (string[])((object[])obj)[1]; … … 341 370 numericIntegrationSteps).ToArray(); 342 371 372 // clear all result data structures 373 for (int j = 0; j < fi.Length; j++) { 374 fi[j] = 10.0; 375 if (jac != null) Array.Clear(jac, 0, jac.Length); 376 } 377 343 378 if (predicted.Length != targetValues.GetLength(0)) { 344 f = 10.0; // TODO345 Array.Clear(grad, 0, grad.Length);346 379 return; 347 380 } … … 355 388 // .ToArray(); 356 389 357 double[] invVar = Enumerable.Repeat(1.0, targetVariables.Length).ToArray();390 // double[] invVar = Enumerable.Repeat(1.0, targetVariables.Length).ToArray(); 358 391 359 392 360 393 // objective function is NMSE 361 f = 0.0;362 394 int n = predicted.Length; 363 395 double invN = 1.0 / n; 364 var g = Vector.Zero;396 int i = 0; 365 397 int r = 0; 366 398 foreach (var y_pred in predicted) { … … 371 403 var y = targetValues[r, c]; 372 404 373 var res= (y - y_pred_f);374 var ressq = res * res;375 f += ressq * invN * invVar[c] /* * Math.Exp(-0.2 * r) */;376 g += -2.0 * res * y_pred[c].Item2 * invN * invVar[c] /* * Math.Exp(-0.2 * r) */;405 fi[i] = (y - y_pred_f); 406 var g = y_pred[c].Item2; 407 if (jac != null && g != Vector.Zero) for (int j = 0; j < g.Length; j++) jac[i, j] = -g[j]; 408 i++; // we put the errors for each target variable after each other 377 409 } 378 410 r++; 379 411 } 380 381 g.CopyTo(grad);382 412 } 383 413 … … 409 439 results["SNMSE"].Value = new DoubleValue(bestIndividualAndQuality.Item2); 410 440 411 var problemData = ProblemData;441 var problemData = Standardize(ProblemData); 412 442 var targetVars = TargetVariables.CheckedItems.OrderBy(i => i.Index).Select(i => i.Value.Value).ToArray(); 413 443 var latentVariables = Enumerable.Range(1, NumberOfLatentVariables).Select(i => "λ" + i).ToArray(); // TODO: must coincide with the variables which are actually defined in the grammar and also for which we actually have trees … … 1121 1151 1122 1152 case "-": { 1123 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1124 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr); 1125 f = fl - fr; 1126 g = Vector.Subtract(gl, gr); 1153 if (node.SubtreeCount == 1) { 1154 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1155 f = -fl; 1156 g = gl.Scale(-1.0); 1157 } else { 1158 InterpretRec(node.GetSubtree(0), nodeValues, out fl, out gl); 1159 InterpretRec(node.GetSubtree(1), nodeValues, out fr, out gr); 1160 1161 f = fl - fr; 1162 g = Vector.Subtract(gl, gr); 1163 } 1127 1164 break; 1128 1165 } -
branches/2925_AutoDiffForDynamicalModels/HeuristicLab.Problems.DynamicalSystemsModelling/3.3/Vector.cs
r16597 r16599 145 145 } 146 146 147 148 147 /// <summary> 149 148 /// Creates a new vector
Note: See TracChangeset
for help on using the changeset viewer.