Free cookie consent management tool by TermsFeed Policy Generator

Changeset 17204 for branches


Ignore:
Timestamp:
08/13/19 09:29:11 (5 years ago)
Author:
gkronber
Message:

#2994: added parameter for gradient checks and experimented with preconditioning

Location:
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/ConstrainedNLS.cs

    r17197 r17204  
    3838      get { return (IFixedValueParameter<DoubleValue>)Parameters["MaxTime"]; }
    3939    }
     40    public IFixedValueParameter<BoolValue> CheckGradientParameter {
     41      get { return (IFixedValueParameter<BoolValue>)Parameters["CheckGradient"]; }
     42    }
    4043    public int Iterations { get { return IterationsParameter.Value.Value; } set { IterationsParameter.Value.Value = value; } }
    4144
     
    4851      get { return ModelStructureParameter.Value.Value; }
    4952      set { ModelStructureParameter.Value.Value = value; }
     53    }
     54    public bool CheckGradient {
     55      get { return CheckGradientParameter.Value.Value; }
     56      set { CheckGradientParameter.Value.Value = value; }
    5057    }
    5158
     
    6471      Parameters.Add(new FixedValueParameter<DoubleValue>("FuncToleranceAbs", new DoubleValue(0)));
    6572      Parameters.Add(new FixedValueParameter<DoubleValue>("MaxTime", new DoubleValue(10)));
     73      Parameters.Add(new FixedValueParameter<BoolValue>("CheckGradient", "Flag to indicate whether the gradient should be checked using numeric approximation", new BoolValue(false)));
     74
     75      CheckGradientParameter.Hidden = true;
    6676    }
    6777
    6878    public ConstrainedNLS(ConstrainedNLS original, Cloner cloner) : base(original, cloner) {
     79    }
     80
     81    [StorableHook(HookType.AfterDeserialization)]
     82    public void AfterDeserializationHook() {
     83      if (!Parameters.ContainsKey("CheckGradient")) {
     84        Parameters.Add(new FixedValueParameter<BoolValue>("CheckGradient", "Flag to indicate whether the gradient should be checked using numeric approximation", new BoolValue(false)));
     85
     86        CheckGradientParameter.Hidden = true;
     87      }
    6988    }
    7089
     
    89108      Results.AddOrUpdateResult("Evaluations", functionEvaluations);
    90109      var bestError = new DoubleValue(double.MaxValue);
     110      var curError = new DoubleValue(double.MaxValue);
    91111      Results.AddOrUpdateResult("Best error", bestError);
     112      Results.AddOrUpdateResult("Current error", curError);
    92113      Results.AddOrUpdateResult("Tree", tree);
    93114      var qualitiesTable = new DataTable("Qualities");
     
    112133
    113134      var state = new ConstrainedNLSInternal(Solver.Value, tree, Iterations, problem.ProblemData, FuncToleranceRel, FuncToleranceAbs, MaxTime);
     135      if (CheckGradient) state.CheckGradient = true;
     136      int idx = 0;
     137      foreach(var constraintTree in state.constraintTrees) {
     138        Results.AddOrUpdateResult($"Constraint {idx++}", constraintTree);
     139      }
    114140
    115141      // we use a listener model here to get state from the solver     
     
    123149      bestQualityRow.Values.Add(bestError.Value);
    124150
    125 
    126       Results.AddOrUpdateResult("Best solution", CreateSolution(state.BestTree, problem.ProblemData));
     151      Results.AddOrUpdateResult("Best solution", CreateSolution((ISymbolicExpressionTree)state.BestTree.Clone(), problem.ProblemData));
     152      Results.AddOrUpdateResult("Best solution constraint values", new DoubleArray(state.BestConstraintValues));
    127153
    128154
     
    132158        functionEvaluations.Value++;
    133159        bestError.Value = state.BestError;
     160        curError.Value = state.CurError;
    134161        curQualityRow.Values.Add(state.CurError);
    135162        bestQualityRow.Values.Add(bestError.Value);
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/ConstrainedNLSInternal.cs

    r17200 r17204  
    3838    public ISymbolicExpressionTree BestTree => bestTree;
    3939
     40    private double[] bestConstraintValues;
     41    public double[] BestConstraintValues => bestConstraintValues;
     42
     43    public bool CheckGradient { get; internal set; }
     44
    4045    // begin internal state
    4146    private IntPtr nlopt;
    4247    private SymbolicDataAnalysisExpressionTreeLinearInterpreter interpreter;
    4348    private readonly NLOpt.nlopt_func calculateObjectiveDelegate; // must hold the delegate to prevent GC
     49    private readonly NLOpt.nlopt_precond preconditionDelegate;
    4450    private readonly IntPtr[] constraintDataPtr; // must hold the objects to prevent GC
    4551    private readonly NLOpt.nlopt_func[] calculateConstraintDelegates; // must hold the delegates to prevent GC
     
    5157    private readonly ISymbolicExpressionTreeNode[] preparedTreeParameterNodes;
    5258    private readonly List<ConstantTreeNode>[] allThetaNodes;
     59    public List<ISymbolicExpressionTree> constraintTrees;    // TODO make local in ctor (public for debugging)
     60   
    5361    private readonly double[] fi_eval;
    5462    private readonly double[,] jac_eval;
    5563    private readonly ISymbolicExpressionTree scaledTree;
     64    private readonly VectorAutoDiffEvaluator autoDiffEval;
     65    private readonly VectorEvaluator eval;
    5666
    5767    // end internal state
     
    7282      this.problemData = problemData;
    7383      this.interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter();
    74 
     84      this.autoDiffEval = new VectorAutoDiffEvaluator();
     85      this.eval = new VectorEvaluator();
     86
     87      CheckGradient = false;
    7588
    7689      var intervalConstraints = problemData.IntervalConstraints;
     
    103116      Dictionary<string, ISymbolicExpressionTree> derivatives = new Dictionary<string, ISymbolicExpressionTree>();
    104117      allThetaNodes = thetaNames.Select(_ => new List<ConstantTreeNode>()).ToArray();
    105       var constraintTrees = new List<ISymbolicExpressionTree>();
     118      constraintTrees = new List<ISymbolicExpressionTree>();
    106119      foreach (var constraint in intervalConstraints.Constraints) {
    107120        if (constraint.IsDerivation) {
     
    117130            // convert variables named theta back to constants
    118131            var df_prepared = ReplaceVarWithConst(df_smaller_upper, thetaNames, thetaValues, allThetaNodes);
    119             constraintTrees.Add(df_prepared);
     132            constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone());
    120133          }
    121134          if (constraint.Interval.LowerBound > double.NegativeInfinity) {
    122135            var df_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)df.Clone());
    123136            // convert variables named theta back to constants
    124             var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes);
    125             constraintTrees.Add(df_prepared);
     137            var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes);           
     138            constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone());
    126139          }
    127140        } else {
     
    130143            // convert variables named theta back to constants
    131144            var df_prepared = ReplaceVarWithConst(f_smaller_upper, thetaNames, thetaValues, allThetaNodes);
    132             constraintTrees.Add(df_prepared);
     145            constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone());
    133146          }
    134147          if (constraint.Interval.LowerBound > double.NegativeInfinity) {
     
    136149            // convert variables named theta back to constants
    137150            var df_prepared = ReplaceVarWithConst(f_larger_lower, thetaNames, thetaValues, allThetaNodes);
    138             constraintTrees.Add(df_prepared);
     151            constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone());
    139152          }
    140153        }
     
    149162
    150163
    151       var minVal = Math.Min(-1000.0, thetaValues.Min());
    152       var maxVal = Math.Max(1000.0, thetaValues.Max());
     164      var minVal = Math.Min(-100.0, thetaValues.Min());
     165      var maxVal = Math.Max(100.0, thetaValues.Max());
    153166      var lb = Enumerable.Repeat(minVal, thetaValues.Count).ToArray();
    154167      var up = Enumerable.Repeat(maxVal, thetaValues.Count).ToArray();
     
    158171      NLOpt.nlopt_set_upper_bounds(nlopt, up);
    159172      calculateObjectiveDelegate = new NLOpt.nlopt_func(CalculateObjective); // keep a reference to the delegate (see below)
    160       NLOpt.nlopt_set_min_objective(nlopt, calculateObjectiveDelegate, IntPtr.Zero);
     173      NLOpt.nlopt_set_min_objective(nlopt, calculateObjectiveDelegate, IntPtr.Zero); // --> without preconditioning
     174
     175      // preconditionDelegate = new NLOpt.nlopt_precond(PreconditionObjective);
     176      // NLOpt.nlopt_set_precond_min_objective(nlopt, calculateObjectiveDelegate, preconditionDelegate, IntPtr.Zero);
    161177
    162178
     
    169185        calculateConstraintDelegates[i] = new NLOpt.nlopt_func(CalculateConstraint);
    170186        NLOpt.nlopt_add_inequality_constraint(nlopt, calculateConstraintDelegates[i], constraintDataPtr[i], 1e-8);
     187        //NLOpt.nlopt_add_precond_inequality_constraint(nlopt, calculateConstraintDelegates[i], preconditionDelegate, constraintDataPtr[i], 1e-8);
    171188      }
    172189
     
    176193      NLOpt.nlopt_set_maxeval(nlopt, maxIterations);
    177194    }
     195
    178196
    179197    ~ConstrainedNLSInternal() {
     
    195213      bestError = minf;
    196214
    197       if (res < 0) {
     215      if (res < 0 && res != NLOpt.nlopt_result.NLOPT_FORCED_STOP) {
    198216        throw new InvalidOperationException($"NLOpt failed {res} {NLOpt.nlopt_get_errmsg(nlopt)}");
    199217      } else {
     218        // calculate constraints of final solution
     219        double[] _ = new double[x.Length];
     220        bestConstraintValues = new double[calculateConstraintDelegates.Length];
     221        for(int i=0;i<calculateConstraintDelegates.Length;i++) {
     222          bestConstraintValues[i] = calculateConstraintDelegates[i].Invoke((uint)x.Length, x, _, constraintDataPtr[i]);
     223        }
     224
    200225        // update parameters in tree
    201226        var pIdx = 0;
     
    218243
    219244      if (grad != null) {
    220         var autoDiffEval = new VectorAutoDiffEvaluator();
    221245        autoDiffEval.Evaluate(preparedTree, problemData.Dataset, trainingRows,
    222246          preparedTreeParameterNodes, fi_eval, jac_eval);
     
    235259
    236260        #region check gradient
    237         var eval = new VectorEvaluator();
    238         for (int i = 0; i < dim; i++) {
    239           // make two additional evaluations
    240           var xForNumDiff = (double[])curX.Clone();
    241           double delta = Math.Abs(xForNumDiff[i] * 1e-5);
    242           xForNumDiff[i] += delta;
    243           UpdateThetaValues(xForNumDiff);
    244           var evalHigh = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows);
    245           var mseHigh = MSE(target, evalHigh);
    246           xForNumDiff[i] = curX[i] - delta;
    247           UpdateThetaValues(xForNumDiff);
    248           var evalLow = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows);
    249           var mseLow = MSE(target, evalLow);
    250 
    251           var numericDiff = (mseHigh - mseLow) / (2 * delta);
    252           var autoDiff = grad[i];
    253           if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2)
    254             || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2))
    255             throw new InvalidProgramException();
     261        if (grad != null && CheckGradient) {
     262          for (int i = 0; i < dim; i++) {
     263            // make two additional evaluations
     264            var xForNumDiff = (double[])curX.Clone();
     265            double delta = Math.Abs(xForNumDiff[i] * 1e-5);
     266            xForNumDiff[i] += delta;
     267            UpdateThetaValues(xForNumDiff);
     268            var evalHigh = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows);
     269            var mseHigh = MSE(target, evalHigh);
     270            xForNumDiff[i] = curX[i] - delta;
     271            UpdateThetaValues(xForNumDiff);
     272            var evalLow = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows);
     273            var mseLow = MSE(target, evalLow);
     274
     275            var numericDiff = (mseHigh - mseLow) / (2 * delta);
     276            var autoDiff = grad[i];
     277            if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2)
     278              || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2))
     279              throw new InvalidProgramException();
     280          }
    256281        }
    257282        #endregion
     
    275300    }
    276301
     302    // // NOT WORKING YET? WHY is det(H) always zero?!
     303    // // TODO
     304    // private void PreconditionObjective(uint n, double[] x, double[] v, double[] vpre, IntPtr data) {
     305    //   UpdateThetaValues(x); // calc H(x)
     306    //   
     307    //   autoDiffEval.Evaluate(preparedTree, problemData.Dataset, trainingRows,
     308    //     preparedTreeParameterNodes, fi_eval, jac_eval);
     309    //   var k = jac_eval.GetLength(0);
     310    //   var h = new double[n, n];
     311    //   
     312    //   // calc residuals and scale jac_eval
     313    //   var f = -2.0 / k;
     314    //   for (int i = 0;i<k;i++) {
     315    //     var r = target[i] - fi_eval[i];
     316    //     for(int j = 0;j<n;j++) {
     317    //       jac_eval[i, j] *= f * r;
     318    //     }
     319    //   }
     320    //   
     321    //   // approximate hessian H(x) = J(x)^T * J(x)
     322    //   alglib.rmatrixgemm((int)n, (int)n, k,
     323    //     1.0, jac_eval, 0, 0, 1,  // transposed
     324    //     jac_eval, 0, 0, 0,
     325    //     0.0, ref h, 0, 0,
     326    //     null
     327    //     );
     328    //   
     329    //   // scale v
     330    //   alglib.rmatrixmv((int)n, (int)n, h, 0, 0, 0, v, 0, ref vpre, 0, alglib.serial);
     331    //   var det = alglib.matdet.rmatrixdet(h, (int)n, alglib.serial);
     332    // }
     333
     334
    277335    private double MSE(double[] a, double[] b) {
    278336      Trace.Assert(a.Length == b.Length);
     
    288346        bestSolution = (double[])curX.Clone();
    289347      }
     348      curError = curF;
    290349    }
    291350
     
    313372
    314373      #region check gradient
    315       for (int i = 0; i < dim; i++) {
    316         // make two additional evaluations
    317         var xForNumDiff = (double[])curX.Clone();
    318         double delta = Math.Abs(xForNumDiff[i] * 1e-5);
    319         xForNumDiff[i] += delta;
    320         UpdateThetaValues(xForNumDiff);
    321         var evalHigh = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes,
    322           out double[] unusedLowerGradientHigh, out double[] unusedUpperGradientHigh);
    323 
    324         xForNumDiff[i] = curX[i] - delta;
    325         UpdateThetaValues(xForNumDiff);
    326         var evalLow = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes,
    327           out double[] unusedLowerGradientLow, out double[] unusedUpperGradientLow);
    328        
    329         var numericDiff = (evalHigh.UpperBound - evalLow.UpperBound) / (2 * delta);
    330         var autoDiff = grad[i];
    331         // XXX GRADIENT FOR UPPER BOUND FOR FUNCTION IS ZERO FOR THE FIRST SET OF VARIABLES? GRADIENT FOR ADDITION INCORRECT?!
    332         if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2)
    333           || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2))
    334           throw new InvalidProgramException();
    335       }
     374      if (grad != null && CheckGradient)
     375        for (int i = 0; i < dim; i++) {
     376          // make two additional evaluations
     377          var xForNumDiff = (double[])curX.Clone();
     378          double delta = Math.Abs(xForNumDiff[i] * 1e-5);
     379          xForNumDiff[i] += delta;
     380          UpdateThetaValues(xForNumDiff);
     381          var evalHigh = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes,
     382            out double[] unusedLowerGradientHigh, out double[] unusedUpperGradientHigh);
     383
     384          xForNumDiff[i] = curX[i] - delta;
     385          UpdateThetaValues(xForNumDiff);
     386          var evalLow = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes,
     387            out double[] unusedLowerGradientLow, out double[] unusedUpperGradientLow);
     388
     389          var numericDiff = (evalHigh.UpperBound - evalLow.UpperBound) / (2 * delta);
     390          var autoDiff = grad[i];
     391
     392          if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2)
     393            || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2))
     394            throw new InvalidProgramException();
     395        }
    336396      #endregion
    337397
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/NLOpt.cs

    r17197 r17204  
    1818    /* A preconditioner, which preconditions v at x to return vpre.
    1919       (The meaning of "preconditioning" is algorithm-dependent.) */
    20     public delegate IntPtr nlopt_precond(uint n,
     20    public delegate void nlopt_precond(uint n,
    2121      [In] [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 0)] double[] x,
    2222      [In] [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 0)] double[] v,
Note: See TracChangeset for help on using the changeset viewer.