Free cookie consent management tool by TermsFeed Policy Generator

Changeset 17006


Ignore:
Timestamp:
06/11/19 23:41:18 (5 years ago)
Author:
gkronber
Message:

#2994: added option to use NLC (SLP) or Non-smooth solver

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/ConstrainedConstantOptimizationEvaluator.cs

    r17004 r17006  
    7474      get { return (IFixedValueParameter<BoolValue>)Parameters[CountEvaluationsParameterName]; }
    7575    }
     76    public IConstrainedValueParameter<StringValue> SolverParameter {
     77      get { return (IConstrainedValueParameter<StringValue>)Parameters["Solver"]; }
     78    }
    7679
    7780
     
    103106    }
    104107
     108    public string Solver {
     109      get { return SolverParameter.Value.Value; }
     110    }
    105111    public override bool Maximization {
    106112      get { return false; }
     
    122128
    123129      Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
     130      var validSolvers = new ItemSet<StringValue>(new[] { "non-smooth (minns)", "sequential linear programming (minnlc)" }.Select(s => new StringValue(s).AsReadOnly()));
     131      Parameters.Add(new ConstrainedValueParameter<StringValue>("Solver", "The solver algorithm", validSolvers, validSolvers.First()));
    124132      Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
    125133      Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     
    142150        var counter = new EvaluationsCounter();
    143151        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
    144            constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter);
     152           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, Solver, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter);
    145153
    146154        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
     
    210218    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
    211219      ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling,
     220      string solver,
    212221      int maxIterations, bool updateVariableWeights = true,
    213222      double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
     
    323332      }
    324333
    325 
    326 
    327       alglib.minnsstate state;
    328       alglib.minnsreport rep;
    329       try {
    330         alglib.minnscreate(thetaValues.Count, thetaValues.ToArray(), out state);
    331         // alglib.minnssetalgoslp(state);        // SLP is more robust but slower
    332         alglib.minnssetbc(state, thetaValues.Select(_ => -10000.0).ToArray(), thetaValues.Select(_ => +10000.0).ToArray());
    333         alglib.minnssetcond(state, 1E-7, maxIterations);
    334         var s = Enumerable.Repeat(1d, thetaValues.Count).ToArray();  // scale is set to unit scale
    335         alglib.minnssetscale(state, s);
    336 
    337         // set non-linear constraints: 0 equality constraints, constraintTrees inequality constraints
    338         alglib.minnssetnlc(state, 0, constraintTrees.Count);
    339 
    340         alglib.minnsoptimize(state, calculate_jacobian, null, null);
    341         alglib.minnsresults(state, out double[] xOpt, out rep);
    342 
    343 
    344         // counter.FunctionEvaluations += rep.nfev; TODO
    345         counter.GradientEvaluations += rep.nfev;
    346 
    347         if (rep.terminationtype != -8) {
    348           // update parameters in tree
    349           var pIdx = 0;
    350           foreach (var node in tree.IterateNodesPostfix()) {
    351             if(node is ConstantTreeNode constTreeNode) {
    352               constTreeNode.Value = xOpt[pIdx++];
    353             } else if(node is VariableTreeNode varTreeNode) {
    354               varTreeNode.Weight = xOpt[pIdx++];
     334      if (solver.Contains("minns")) {
     335        alglib.minnsstate state;
     336        alglib.minnsreport rep;
     337        try {
     338          alglib.minnscreate(thetaValues.Count, thetaValues.ToArray(), out state);
     339          // alglib.minnssetalgoslp(state);        // SLP is more robust but slower
     340          alglib.minnssetbc(state, thetaValues.Select(_ => -10000.0).ToArray(), thetaValues.Select(_ => +10000.0).ToArray());
     341          alglib.minnssetcond(state, 1E-7, maxIterations);
     342          var s = Enumerable.Repeat(1d, thetaValues.Count).ToArray();  // scale is set to unit scale
     343          alglib.minnssetscale(state, s);
     344
     345          // set non-linear constraints: 0 equality constraints, constraintTrees inequality constraints
     346          alglib.minnssetnlc(state, 0, constraintTrees.Count);
     347
     348          alglib.minnsoptimize(state, calculate_jacobian, null, null);
     349          alglib.minnsresults(state, out double[] xOpt, out rep);
     350
     351
     352          // counter.FunctionEvaluations += rep.nfev; TODO
     353          counter.GradientEvaluations += rep.nfev;
     354
     355          if (rep.terminationtype != -8) {
     356            // update parameters in tree
     357            var pIdx = 0;
     358            foreach (var node in tree.IterateNodesPostfix()) {
     359              if (node is ConstantTreeNode constTreeNode) {
     360                constTreeNode.Value = xOpt[pIdx++];
     361              } else if (node is VariableTreeNode varTreeNode) {
     362                varTreeNode.Weight = xOpt[pIdx++];
     363              }
    355364            }
    356           }
    357 
    358           // note: we keep the optimized constants even when the tree is worse.
    359         }
    360 
    361       } catch (ArithmeticException) {
    362         // eval MSE of original tree
    363         return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false);
    364 
    365       } catch (alglib.alglibexception) {
    366         // eval MSE of original tree
    367         return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false);
    368       }
    369 
     365
     366            // note: we keep the optimized constants even when the tree is worse.
     367          }
     368
     369        } catch (ArithmeticException) {
     370          // eval MSE of original tree
     371          return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false);
     372
     373        } catch (alglib.alglibexception) {
     374          // eval MSE of original tree
     375          return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false);
     376        }
     377      } else if (solver.Contains("minnlc")) {
     378        alglib.minnlcstate state;
     379        alglib.minnlcreport rep;
     380        alglib.optguardreport optGuardRep;
     381        try {
     382          alglib.minnlccreate(thetaValues.Count, thetaValues.ToArray(), out state);
     383          alglib.minnlcsetalgoslp(state);        // SLP is more robust but slower
     384          alglib.minnlcsetbc(state, thetaValues.Select(_ => -10000.0).ToArray(), thetaValues.Select(_ => +10000.0).ToArray());
     385          alglib.minnlcsetcond(state, 1E-7, maxIterations);
     386          var s = Enumerable.Repeat(1d, thetaValues.Count).ToArray();  // scale is set to unit scale
     387          alglib.minnlcsetscale(state, s);
     388
     389          // set non-linear constraints: 0 equality constraints, constraintTrees inequality constraints
     390          alglib.minnlcsetnlc(state, 0, constraintTrees.Count);
     391          alglib.minnlcoptguardsmoothness(state, 1);
     392
     393          alglib.minnlcoptimize(state, calculate_jacobian, null, null);
     394          alglib.minnlcresults(state, out double[] xOpt, out rep);
     395          alglib.minnlcoptguardresults(state, out optGuardRep);
     396          if (optGuardRep.nonc0suspected) throw new InvalidProgramException("optGuardRep.nonc0suspected");
     397          if (optGuardRep.nonc1suspected) throw new InvalidProgramException("optGuardRep.nonc1suspected");
     398
     399          // counter.FunctionEvaluations += rep.nfev; TODO
     400          counter.GradientEvaluations += rep.nfev;
     401
     402          if (rep.terminationtype != -8) {
     403            // update parameters in tree
     404            var pIdx = 0;
     405            foreach (var node in tree.IterateNodesPostfix()) {
     406              if (node is ConstantTreeNode constTreeNode) {
     407                constTreeNode.Value = xOpt[pIdx++];
     408              } else if (node is VariableTreeNode varTreeNode) {
     409                varTreeNode.Weight = xOpt[pIdx++];
     410              }
     411            }
     412
     413            // note: we keep the optimized constants even when the tree is worse.
     414          }
     415
     416        } catch (ArithmeticException) {
     417          // eval MSE of original tree
     418          return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false);
     419
     420        } catch (alglib.alglibexception) {
     421          // eval MSE of original tree
     422          return SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling: false);
     423        }
     424      } else {
     425        throw new ArgumentException($"Unknown solver {solver}");
     426      }
     427   
    370428
    371429      // evaluate tree with updated constants
Note: See TracChangeset for help on using the changeset viewer.