Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
03/29/19 15:01:47 (5 years ago)
Author:
gkronber
Message:

#2994: added a unit test and made some minor improvements to interpreters

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis.ConstrainedNonlinearRegression/3.4/ConstrainedNonlinearRegression.cs

    r16696 r16727  
    248248        }
    249249      }
    250 
    251       // var interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter();
    252       //
    253       // SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, tree, problemData, problemData.TrainingIndices,
    254       //   applyLinearScaling: applyLinearScaling, maxIterations: maxIterations,
    255       //   updateVariableWeights: false, updateConstantsInTree: true);
    256 
    257 
    258250      var intervals = problemData.IntervalConstraints;
    259251      var constraintsParser = new IntervalConstraintsParser();
     
    315307      }
    316308
     309      // local function
    317310      void UpdateThetaValues(double[] theta) {
    318311        for (int i = 0; i < theta.Length; ++i) {
     
    321314      }
    322315
     316      // buffers for calculate_jacobian
     317      var target = problemData.TargetVariableTrainingValues.ToArray();
     318      var fi_eval = new double[target.Length];
     319      var jac_eval = new double[target.Length, thetaValues.Count];
     320
    323321      // define the callback used by the alglib optimizer
    324322      // the x argument for this callback represents our theta
     323      // local function
    325324      void calculate_jacobian(double[] x, double[] fi, double[,] jac, object obj) {
    326325        UpdateThetaValues(x);
     
    328327        var autoDiffEval = new VectorAutoDiffEvaluator();
    329328        autoDiffEval.Evaluate(preparedTree, problemData.Dataset, problemData.TrainingIndices.ToArray(),
    330           GetParameterNodes(preparedTree, allThetaNodes), out double[] fi_eval, out double[,] jac_eval);
    331         var target = problemData.TargetVariableTrainingValues.ToArray();
     329          GetParameterNodes(preparedTree, allThetaNodes), fi_eval, jac_eval);
    332330
    333331        // calc sum of squared errors and gradient
     
    336334        for (int i = 0; i < target.Length; i++) {
    337335          var res = target[i] - fi_eval[i];
    338           sse += res * res;
     336          sse += 0.5 * res * res;
    339337          for (int j = 0; j < g.Length; j++) {
    340             g[j] += -2.0 * res * jac_eval[i, j];
    341           }
    342         }
    343 
    344         fi[0] = sse;
    345         for (int j = 0; j < x.Length; j++) { jac[0, j] = g[j]; }
     338            g[j] -= res * jac_eval[i, j];
     339          }
     340        }
     341
     342        fi[0] = sse / target.Length;
     343        for (int j = 0; j < x.Length; j++) { jac[0, j] = g[j] / target.Length; }
    346344
    347345        var intervalEvaluator = new IntervalEvaluator();
     
    358356      }
    359357
     358
     359
    360360      // prepare alglib
    361361      alglib.minnlcstate state;
    362362      alglib.minnlcreport rep;
     363      alglib.optguardreport optGuardRep;
    363364      var x0 = thetaValues.ToArray();
    364365
    365366      alglib.minnlccreate(x0.Length, x0, out state);
    366       double epsx = 1e-6;
    367       int maxits = 0;
    368       alglib.minnlcsetalgoslp(state);
    369       alglib.minnlcsetcond(state, 0, maxits);
     367      alglib.minnlcsetalgoslp(state);        // SLP is more robust but slower
     368      alglib.minnlcsetcond(state, 0, maxIterations);
    370369      var s = Enumerable.Repeat(1d, x0.Length).ToArray();  // scale is set to unit scale
    371370      alglib.minnlcsetscale(state, s);
    372371
    373       // set boundary constraints
    374       // var boundaryLower = Enumerable.Repeat(-10d, n).ToArray();
    375       // var boundaryUpper = Enumerable.Repeat(10d, n).ToArray();
    376       // alglib.minnlcsetbc(state, boundaryLower, boundaryUpper);
    377372      // set non-linear constraints: 0 equality constraints, 1 inequality constraint
    378373      alglib.minnlcsetnlc(state, 0, constraintTrees.Count);
    379374
     375      alglib.minnlcoptguardsmoothness(state);
     376      alglib.minnlcoptguardgradient(state, 0.001);
     377
    380378      alglib.minnlcoptimize(state, calculate_jacobian, null, null);
    381379      alglib.minnlcresults(state, out double[] xOpt, out rep);
     380      alglib.minnlcoptguardresults(state, out optGuardRep);
    382381
    383382      var interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter();
     
    416415            var parent = n.Parent;
    417416            if(thetaNodes[thetaIdx].Any()) {
    418               // HACKY: REUSE CONSTANT TREE NODE IN SEVERAL TREES
     417              // HACK: REUSE CONSTANT TREE NODE IN SEVERAL TREES
    419418              // we use this trick to allow autodiff over thetas when thetas occurr multiple times in the tree (e.g. in derived trees)
    420419              var constNode = thetaNodes[thetaIdx].First();
     
    444443      for (int i = 0; i < nodes.Count; ++i) {
    445444        var node = nodes[i];
    446         /*if (node is VariableTreeNode variableTreeNode) {
    447           var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode();
    448           thetaVar.Weight = 1;
    449           thetaVar.VariableName = $"θ{n++}";
    450 
    451           thetaNames.Add(thetaVar.VariableName);
    452           thetaValues.Add(variableTreeNode.Weight);
    453           variableTreeNode.Weight = 1; // set to unit weight
    454 
    455           var parent = variableTreeNode.Parent;
    456           var prod = MakeNode<Multiplication>(thetaVar, variableTreeNode);
    457           if (parent != null) {
    458             var index = parent.IndexOfSubtree(variableTreeNode);
    459             parent.RemoveSubtree(index);
    460             parent.InsertSubtree(index, prod);
    461           }
    462         } else*/ if (node is ConstantTreeNode constantTreeNode) {
     445        if (node is ConstantTreeNode constantTreeNode) {
    463446          var thetaVar = (VariableTreeNode)new Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode();
    464447          thetaVar.Weight = 1;
Note: See TracChangeset for help on using the changeset viewer.