Changeset 16727 for branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis.ConstrainedNonlinearRegression
- Timestamp:
- 03/29/19 15:01:47 (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis.ConstrainedNonlinearRegression/3.4/ConstrainedNonlinearRegression.cs
r16696 r16727 248 248 } 249 249 } 250 251 // var interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter();252 //253 // SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, tree, problemData, problemData.TrainingIndices,254 // applyLinearScaling: applyLinearScaling, maxIterations: maxIterations,255 // updateVariableWeights: false, updateConstantsInTree: true);256 257 258 250 var intervals = problemData.IntervalConstraints; 259 251 var constraintsParser = new IntervalConstraintsParser(); … … 315 307 } 316 308 309 // local function 317 310 void UpdateThetaValues(double[] theta) { 318 311 for (int i = 0; i < theta.Length; ++i) { … … 321 314 } 322 315 316 // buffers for calculate_jacobian 317 var target = problemData.TargetVariableTrainingValues.ToArray(); 318 var fi_eval = new double[target.Length]; 319 var jac_eval = new double[target.Length, thetaValues.Count]; 320 323 321 // define the callback used by the alglib optimizer 324 322 // the x argument for this callback represents our theta 323 // local function 325 324 void calculate_jacobian(double[] x, double[] fi, double[,] jac, object obj) { 326 325 UpdateThetaValues(x); … … 328 327 var autoDiffEval = new VectorAutoDiffEvaluator(); 329 328 autoDiffEval.Evaluate(preparedTree, problemData.Dataset, problemData.TrainingIndices.ToArray(), 330 GetParameterNodes(preparedTree, allThetaNodes), out double[] fi_eval, out double[,] jac_eval); 331 var target = problemData.TargetVariableTrainingValues.ToArray(); 329 GetParameterNodes(preparedTree, allThetaNodes), fi_eval, jac_eval); 332 330 333 331 // calc sum of squared errors and gradient … … 336 334 for (int i = 0; i < target.Length; i++) { 337 335 var res = target[i] - fi_eval[i]; 338 sse += res * res;336 sse += 0.5 * res * res; 339 337 for (int j = 0; j < g.Length; j++) { 340 g[j] += -2.0 *res * jac_eval[i, j];341 } 342 } 343 344 fi[0] = sse ;345 for (int j = 0; j < x.Length; j++) { jac[0, j] = g[j] ; }338 g[j] -= res * jac_eval[i, j]; 339 } 340 } 341 342 fi[0] = sse / target.Length; 343 for (int j = 0; j < x.Length; j++) { jac[0, j] = g[j] / target.Length; } 346 344 347 345 var intervalEvaluator = new IntervalEvaluator(); … … 358 356 } 359 357 358 359 360 360 // prepare alglib 361 361 alglib.minnlcstate state; 362 362 alglib.minnlcreport rep; 363 alglib.optguardreport optGuardRep; 363 364 var x0 = thetaValues.ToArray(); 364 365 365 366 alglib.minnlccreate(x0.Length, x0, out state); 366 double epsx = 1e-6; 367 int maxits = 0; 368 alglib.minnlcsetalgoslp(state); 369 alglib.minnlcsetcond(state, 0, maxits); 367 alglib.minnlcsetalgoslp(state); // SLP is more robust but slower 368 alglib.minnlcsetcond(state, 0, maxIterations); 370 369 var s = Enumerable.Repeat(1d, x0.Length).ToArray(); // scale is set to unit scale 371 370 alglib.minnlcsetscale(state, s); 372 371 373 // set boundary constraints374 // var boundaryLower = Enumerable.Repeat(-10d, n).ToArray();375 // var boundaryUpper = Enumerable.Repeat(10d, n).ToArray();376 // alglib.minnlcsetbc(state, boundaryLower, boundaryUpper);377 372 // set non-linear constraints: 0 equality constraints, 1 inequality constraint 378 373 alglib.minnlcsetnlc(state, 0, constraintTrees.Count); 379 374 375 alglib.minnlcoptguardsmoothness(state); 376 alglib.minnlcoptguardgradient(state, 0.001); 377 380 378 alglib.minnlcoptimize(state, calculate_jacobian, null, null); 381 379 alglib.minnlcresults(state, out double[] xOpt, out rep); 380 alglib.minnlcoptguardresults(state, out optGuardRep); 382 381 383 382 var interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter(); … … 416 415 var parent = n.Parent; 417 416 if(thetaNodes[thetaIdx].Any()) { 418 // HACK Y: REUSE CONSTANT TREE NODE IN SEVERAL TREES417 // HACK: REUSE CONSTANT TREE NODE IN SEVERAL TREES 419 418 // we use this trick to allow autodiff over thetas when thetas occurr multiple times in the tree (e.g. in derived trees) 420 419 var constNode = thetaNodes[thetaIdx].First(); … … 444 443 for (int i = 0; i < nodes.Count; ++i) { 445 444 var node = nodes[i]; 446 /*if (node is VariableTreeNode variableTreeNode) { 447 var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); 448 thetaVar.Weight = 1; 449 thetaVar.VariableName = $"θ{n++}"; 450 451 thetaNames.Add(thetaVar.VariableName); 452 thetaValues.Add(variableTreeNode.Weight); 453 variableTreeNode.Weight = 1; // set to unit weight 454 455 var parent = variableTreeNode.Parent; 456 var prod = MakeNode<Multiplication>(thetaVar, variableTreeNode); 457 if (parent != null) { 458 var index = parent.IndexOfSubtree(variableTreeNode); 459 parent.RemoveSubtree(index); 460 parent.InsertSubtree(index, prod); 461 } 462 } else*/ if (node is ConstantTreeNode constantTreeNode) { 445 if (node is ConstantTreeNode constantTreeNode) { 463 446 var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); 464 447 thetaVar.Weight = 1;
Note: See TracChangeset
for help on using the changeset viewer.