Changeset 18179
- Timestamp:
- 01/04/22 12:01:17 (3 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/3136_Structural_GP/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/NMSESingleObjectiveConstraintsEvaluator.cs
r18178 r18179 300 300 var rowEvaluationsCounter = new EvaluationsCounter(); 301 301 302 alglib.lsfitstate state; 303 alglib.lsfitreport rep; 304 int retVal; 302 alglib.minlmreport rep; 305 303 306 304 IDataset ds = problemData.Dataset; 307 double[,] x = new double[rows.Count(), parameters.Count]; 305 int n = rows.Count(); 306 int k = parameters.Count; 307 308 double[,] x = new double[n, k]; 308 309 int row = 0; 309 310 foreach (var r in rows) { … … 320 321 } 321 322 double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray(); 322 int n = x.GetLength(0); 323 int m = x.GetLength(1); 324 int k = c.Length; 325 326 alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(func); 327 alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(func_grad); 323 324 328 325 alglib.ndimensional_rep xrep = (p, f, obj) => iterationCallback(p, f, obj); 329 326 330 327 try { 331 alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state); 332 alglib.lsfitsetcond(state, 0.0, maxIterations); 333 alglib.lsfitsetxrep(state, iterationCallback != null); 334 alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter); 335 alglib.lsfitresults(state, out retVal, out c, out rep); 328 alglib.minlmcreatevj(y.Length, c, out var lmstate); 329 alglib.minlmsetcond(lmstate, 0.0, maxIterations); 330 alglib.minlmsetxrep(lmstate, iterationCallback != null); 331 // alglib.minlmoptguardgradient(lmstate, 1e-5); // for debugging gradient calculation 332 alglib.minlmoptimize(lmstate, CreateFunc(func, x, y), CreateJac(func_grad, x, y), xrep, rowEvaluationsCounter); 333 alglib.minlmresults(lmstate, out c, out rep); 334 // alglib.minlmoptguardresults(lmstate, out var optGuardReport); 336 335 } catch (ArithmeticException) { 337 336 return originalQuality; … … 343 342 counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n; 344 343 345 //retVal == -7 => parameter optimization failed due to wrong gradient 346 // -8 => optimizer detected NAN / INF in the target 347 // function and/ or gradient 348 if (retVal != -7 && retVal != -8) { 344 // * TerminationType, completetion code: 345 // * -8 optimizer detected NAN/INF values either in the function itself, 346 // or in its Jacobian 347 // * -5 inappropriate solver was used: 348 // * solver created with minlmcreatefgh() used on problem with 349 // general linear constraints (set with minlmsetlc() call). 350 // * -3 constraints are inconsistent 351 // * 2 relative step is no more than EpsX. 352 // * 5 MaxIts steps was taken 353 // * 7 stopping conditions are too stringent, 354 // further improvement is impossible 355 // * 8 terminated by user who called MinLMRequestTermination(). 356 // X contains point which was "current accepted" when termination 357 // request was submitted. 358 if (rep.terminationtype < 0) { 349 359 UpdateParameters(tree, c, updateVariableWeights); 350 360 } … … 382 392 } 383 393 384 private static alglib.ndimensional_pfunc CreatePFunc(TreeToAutoDiffTermConverter.ParametricFunction func) { 385 return (double[] c, double[] x, ref double fx, object o) => { 386 fx = func(c, x); 394 private static alglib.ndimensional_fvec CreateFunc(TreeToAutoDiffTermConverter.ParametricFunction func, double[,] x, double[] y) { 395 int d = x.GetLength(1); 396 // row buffer 397 var xi = new double[d]; 398 // function must return residuals, alglib optimizes resid² 399 return (double[] c, double[] resid, object o) => { 400 for (int i = 0; i < y.Length; i++) { 401 Buffer.BlockCopy(x, i * d * sizeof(double), xi, 0, d*sizeof(double)); // copy row. We are using BlockCopy instead of Array.Copy because x has rank 2 402 resid[i] = func(c, xi) - y[i]; 403 } 387 404 var counter = (EvaluationsCounter)o; 388 counter.FunctionEvaluations ++;405 counter.FunctionEvaluations += y.Length; 389 406 }; 390 407 } 391 408 392 private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) { 393 return (double[] c, double[] x, ref double fx, double[] grad, object o) => { 394 var tuple = func_grad(c, x); 395 fx = tuple.Item2; 396 Array.Copy(tuple.Item1, grad, grad.Length); 409 private static alglib.ndimensional_jac CreateJac(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad, double[,] x, double[] y) { 410 int numParams = x.GetLength(1); 411 // row buffer 412 var xi = new double[numParams]; 413 return (double[] c, double[] resid, double[,] jac, object o) => { 414 int numVars = c.Length; 415 for (int i = 0; i < y.Length; i++) { 416 Buffer.BlockCopy(x, i * numParams * sizeof(double), xi, 0, numParams * sizeof(double)); // copy row 417 var tuple = func_grad(c, xi); 418 resid[i] = tuple.Item2 - y[i]; 419 Buffer.BlockCopy(tuple.Item1, 0, jac, i * numVars * sizeof(double), numVars * sizeof(double)); // copy the gradient to jac. BlockCopy because jac has rank 2. 420 } 397 421 var counter = (EvaluationsCounter)o; 398 counter.GradientEvaluations++; 422 counter.FunctionEvaluations += y.Length; 423 counter.GradientEvaluations += y.Length; 399 424 }; 400 425 }
Note: See TracChangeset
for help on using the changeset viewer.