- Timestamp:
- 08/13/19 09:29:11 (5 years ago)
- Location:
- branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/ConstrainedNLS.cs
r17197 r17204 38 38 get { return (IFixedValueParameter<DoubleValue>)Parameters["MaxTime"]; } 39 39 } 40 public IFixedValueParameter<BoolValue> CheckGradientParameter { 41 get { return (IFixedValueParameter<BoolValue>)Parameters["CheckGradient"]; } 42 } 40 43 public int Iterations { get { return IterationsParameter.Value.Value; } set { IterationsParameter.Value.Value = value; } } 41 44 … … 48 51 get { return ModelStructureParameter.Value.Value; } 49 52 set { ModelStructureParameter.Value.Value = value; } 53 } 54 public bool CheckGradient { 55 get { return CheckGradientParameter.Value.Value; } 56 set { CheckGradientParameter.Value.Value = value; } 50 57 } 51 58 … … 64 71 Parameters.Add(new FixedValueParameter<DoubleValue>("FuncToleranceAbs", new DoubleValue(0))); 65 72 Parameters.Add(new FixedValueParameter<DoubleValue>("MaxTime", new DoubleValue(10))); 73 Parameters.Add(new FixedValueParameter<BoolValue>("CheckGradient", "Flag to indicate whether the gradient should be checked using numeric approximation", new BoolValue(false))); 74 75 CheckGradientParameter.Hidden = true; 66 76 } 67 77 68 78 public ConstrainedNLS(ConstrainedNLS original, Cloner cloner) : base(original, cloner) { 79 } 80 81 [StorableHook(HookType.AfterDeserialization)] 82 public void AfterDeserializationHook() { 83 if (!Parameters.ContainsKey("CheckGradient")) { 84 Parameters.Add(new FixedValueParameter<BoolValue>("CheckGradient", "Flag to indicate whether the gradient should be checked using numeric approximation", new BoolValue(false))); 85 86 CheckGradientParameter.Hidden = true; 87 } 69 88 } 70 89 … … 89 108 Results.AddOrUpdateResult("Evaluations", functionEvaluations); 90 109 var bestError = new DoubleValue(double.MaxValue); 110 var curError = new DoubleValue(double.MaxValue); 91 111 Results.AddOrUpdateResult("Best error", bestError); 112 Results.AddOrUpdateResult("Current error", curError); 92 113 Results.AddOrUpdateResult("Tree", tree); 93 114 var qualitiesTable = new DataTable("Qualities"); … … 112 133 113 134 var state = new ConstrainedNLSInternal(Solver.Value, tree, Iterations, problem.ProblemData, FuncToleranceRel, FuncToleranceAbs, MaxTime); 135 if (CheckGradient) state.CheckGradient = true; 136 int idx = 0; 137 foreach(var constraintTree in state.constraintTrees) { 138 Results.AddOrUpdateResult($"Constraint {idx++}", constraintTree); 139 } 114 140 115 141 // we use a listener model here to get state from the solver … … 123 149 bestQualityRow.Values.Add(bestError.Value); 124 150 125 126 Results.AddOrUpdateResult("Best solution ", CreateSolution(state.BestTree, problem.ProblemData));151 Results.AddOrUpdateResult("Best solution", CreateSolution((ISymbolicExpressionTree)state.BestTree.Clone(), problem.ProblemData)); 152 Results.AddOrUpdateResult("Best solution constraint values", new DoubleArray(state.BestConstraintValues)); 127 153 128 154 … … 132 158 functionEvaluations.Value++; 133 159 bestError.Value = state.BestError; 160 curError.Value = state.CurError; 134 161 curQualityRow.Values.Add(state.CurError); 135 162 bestQualityRow.Values.Add(bestError.Value); -
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/ConstrainedNLSInternal.cs
r17200 r17204 38 38 public ISymbolicExpressionTree BestTree => bestTree; 39 39 40 private double[] bestConstraintValues; 41 public double[] BestConstraintValues => bestConstraintValues; 42 43 public bool CheckGradient { get; internal set; } 44 40 45 // begin internal state 41 46 private IntPtr nlopt; 42 47 private SymbolicDataAnalysisExpressionTreeLinearInterpreter interpreter; 43 48 private readonly NLOpt.nlopt_func calculateObjectiveDelegate; // must hold the delegate to prevent GC 49 private readonly NLOpt.nlopt_precond preconditionDelegate; 44 50 private readonly IntPtr[] constraintDataPtr; // must hold the objects to prevent GC 45 51 private readonly NLOpt.nlopt_func[] calculateConstraintDelegates; // must hold the delegates to prevent GC … … 51 57 private readonly ISymbolicExpressionTreeNode[] preparedTreeParameterNodes; 52 58 private readonly List<ConstantTreeNode>[] allThetaNodes; 59 public List<ISymbolicExpressionTree> constraintTrees; // TODO make local in ctor (public for debugging) 60 53 61 private readonly double[] fi_eval; 54 62 private readonly double[,] jac_eval; 55 63 private readonly ISymbolicExpressionTree scaledTree; 64 private readonly VectorAutoDiffEvaluator autoDiffEval; 65 private readonly VectorEvaluator eval; 56 66 57 67 // end internal state … … 72 82 this.problemData = problemData; 73 83 this.interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter(); 74 84 this.autoDiffEval = new VectorAutoDiffEvaluator(); 85 this.eval = new VectorEvaluator(); 86 87 CheckGradient = false; 75 88 76 89 var intervalConstraints = problemData.IntervalConstraints; … … 103 116 Dictionary<string, ISymbolicExpressionTree> derivatives = new Dictionary<string, ISymbolicExpressionTree>(); 104 117 allThetaNodes = thetaNames.Select(_ => new List<ConstantTreeNode>()).ToArray(); 105 varconstraintTrees = new List<ISymbolicExpressionTree>();118 constraintTrees = new List<ISymbolicExpressionTree>(); 106 119 foreach (var constraint in intervalConstraints.Constraints) { 107 120 if (constraint.IsDerivation) { … … 117 130 // convert variables named theta back to constants 118 131 var df_prepared = ReplaceVarWithConst(df_smaller_upper, thetaNames, thetaValues, allThetaNodes); 119 constraintTrees.Add( df_prepared);132 constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone()); 120 133 } 121 134 if (constraint.Interval.LowerBound > double.NegativeInfinity) { 122 135 var df_larger_lower = Subtract(CreateConstant(constraint.Interval.LowerBound), (ISymbolicExpressionTree)df.Clone()); 123 136 // convert variables named theta back to constants 124 var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes); 125 constraintTrees.Add( df_prepared);137 var df_prepared = ReplaceVarWithConst(df_larger_lower, thetaNames, thetaValues, allThetaNodes); 138 constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone()); 126 139 } 127 140 } else { … … 130 143 // convert variables named theta back to constants 131 144 var df_prepared = ReplaceVarWithConst(f_smaller_upper, thetaNames, thetaValues, allThetaNodes); 132 constraintTrees.Add( df_prepared);145 constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone()); 133 146 } 134 147 if (constraint.Interval.LowerBound > double.NegativeInfinity) { … … 136 149 // convert variables named theta back to constants 137 150 var df_prepared = ReplaceVarWithConst(f_larger_lower, thetaNames, thetaValues, allThetaNodes); 138 constraintTrees.Add( df_prepared);151 constraintTrees.Add((ISymbolicExpressionTree)df_prepared.Clone()); 139 152 } 140 153 } … … 149 162 150 163 151 var minVal = Math.Min(-100 0.0, thetaValues.Min());152 var maxVal = Math.Max(100 0.0, thetaValues.Max());164 var minVal = Math.Min(-100.0, thetaValues.Min()); 165 var maxVal = Math.Max(100.0, thetaValues.Max()); 153 166 var lb = Enumerable.Repeat(minVal, thetaValues.Count).ToArray(); 154 167 var up = Enumerable.Repeat(maxVal, thetaValues.Count).ToArray(); … … 158 171 NLOpt.nlopt_set_upper_bounds(nlopt, up); 159 172 calculateObjectiveDelegate = new NLOpt.nlopt_func(CalculateObjective); // keep a reference to the delegate (see below) 160 NLOpt.nlopt_set_min_objective(nlopt, calculateObjectiveDelegate, IntPtr.Zero); 173 NLOpt.nlopt_set_min_objective(nlopt, calculateObjectiveDelegate, IntPtr.Zero); // --> without preconditioning 174 175 // preconditionDelegate = new NLOpt.nlopt_precond(PreconditionObjective); 176 // NLOpt.nlopt_set_precond_min_objective(nlopt, calculateObjectiveDelegate, preconditionDelegate, IntPtr.Zero); 161 177 162 178 … … 169 185 calculateConstraintDelegates[i] = new NLOpt.nlopt_func(CalculateConstraint); 170 186 NLOpt.nlopt_add_inequality_constraint(nlopt, calculateConstraintDelegates[i], constraintDataPtr[i], 1e-8); 187 //NLOpt.nlopt_add_precond_inequality_constraint(nlopt, calculateConstraintDelegates[i], preconditionDelegate, constraintDataPtr[i], 1e-8); 171 188 } 172 189 … … 176 193 NLOpt.nlopt_set_maxeval(nlopt, maxIterations); 177 194 } 195 178 196 179 197 ~ConstrainedNLSInternal() { … … 195 213 bestError = minf; 196 214 197 if (res < 0 ) {215 if (res < 0 && res != NLOpt.nlopt_result.NLOPT_FORCED_STOP) { 198 216 throw new InvalidOperationException($"NLOpt failed {res} {NLOpt.nlopt_get_errmsg(nlopt)}"); 199 217 } else { 218 // calculate constraints of final solution 219 double[] _ = new double[x.Length]; 220 bestConstraintValues = new double[calculateConstraintDelegates.Length]; 221 for(int i=0;i<calculateConstraintDelegates.Length;i++) { 222 bestConstraintValues[i] = calculateConstraintDelegates[i].Invoke((uint)x.Length, x, _, constraintDataPtr[i]); 223 } 224 200 225 // update parameters in tree 201 226 var pIdx = 0; … … 218 243 219 244 if (grad != null) { 220 var autoDiffEval = new VectorAutoDiffEvaluator();221 245 autoDiffEval.Evaluate(preparedTree, problemData.Dataset, trainingRows, 222 246 preparedTreeParameterNodes, fi_eval, jac_eval); … … 235 259 236 260 #region check gradient 237 var eval = new VectorEvaluator(); 238 for (int i = 0; i < dim; i++) { 239 // make two additional evaluations 240 var xForNumDiff = (double[])curX.Clone(); 241 double delta = Math.Abs(xForNumDiff[i] * 1e-5); 242 xForNumDiff[i] += delta; 243 UpdateThetaValues(xForNumDiff); 244 var evalHigh = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); 245 var mseHigh = MSE(target, evalHigh); 246 xForNumDiff[i] = curX[i] - delta; 247 UpdateThetaValues(xForNumDiff); 248 var evalLow = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); 249 var mseLow = MSE(target, evalLow); 250 251 var numericDiff = (mseHigh - mseLow) / (2 * delta); 252 var autoDiff = grad[i]; 253 if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) 254 || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) 255 throw new InvalidProgramException(); 261 if (grad != null && CheckGradient) { 262 for (int i = 0; i < dim; i++) { 263 // make two additional evaluations 264 var xForNumDiff = (double[])curX.Clone(); 265 double delta = Math.Abs(xForNumDiff[i] * 1e-5); 266 xForNumDiff[i] += delta; 267 UpdateThetaValues(xForNumDiff); 268 var evalHigh = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); 269 var mseHigh = MSE(target, evalHigh); 270 xForNumDiff[i] = curX[i] - delta; 271 UpdateThetaValues(xForNumDiff); 272 var evalLow = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); 273 var mseLow = MSE(target, evalLow); 274 275 var numericDiff = (mseHigh - mseLow) / (2 * delta); 276 var autoDiff = grad[i]; 277 if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) 278 || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) 279 throw new InvalidProgramException(); 280 } 256 281 } 257 282 #endregion … … 275 300 } 276 301 302 // // NOT WORKING YET? WHY is det(H) always zero?! 303 // // TODO 304 // private void PreconditionObjective(uint n, double[] x, double[] v, double[] vpre, IntPtr data) { 305 // UpdateThetaValues(x); // calc H(x) 306 // 307 // autoDiffEval.Evaluate(preparedTree, problemData.Dataset, trainingRows, 308 // preparedTreeParameterNodes, fi_eval, jac_eval); 309 // var k = jac_eval.GetLength(0); 310 // var h = new double[n, n]; 311 // 312 // // calc residuals and scale jac_eval 313 // var f = -2.0 / k; 314 // for (int i = 0;i<k;i++) { 315 // var r = target[i] - fi_eval[i]; 316 // for(int j = 0;j<n;j++) { 317 // jac_eval[i, j] *= f * r; 318 // } 319 // } 320 // 321 // // approximate hessian H(x) = J(x)^T * J(x) 322 // alglib.rmatrixgemm((int)n, (int)n, k, 323 // 1.0, jac_eval, 0, 0, 1, // transposed 324 // jac_eval, 0, 0, 0, 325 // 0.0, ref h, 0, 0, 326 // null 327 // ); 328 // 329 // // scale v 330 // alglib.rmatrixmv((int)n, (int)n, h, 0, 0, 0, v, 0, ref vpre, 0, alglib.serial); 331 // var det = alglib.matdet.rmatrixdet(h, (int)n, alglib.serial); 332 // } 333 334 277 335 private double MSE(double[] a, double[] b) { 278 336 Trace.Assert(a.Length == b.Length); … … 288 346 bestSolution = (double[])curX.Clone(); 289 347 } 348 curError = curF; 290 349 } 291 350 … … 313 372 314 373 #region check gradient 315 for (int i = 0; i < dim; i++) { 316 // make two additional evaluations 317 var xForNumDiff = (double[])curX.Clone(); 318 double delta = Math.Abs(xForNumDiff[i] * 1e-5); 319 xForNumDiff[i] += delta; 320 UpdateThetaValues(xForNumDiff); 321 var evalHigh = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, 322 out double[] unusedLowerGradientHigh, out double[] unusedUpperGradientHigh); 323 324 xForNumDiff[i] = curX[i] - delta; 325 UpdateThetaValues(xForNumDiff); 326 var evalLow = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, 327 out double[] unusedLowerGradientLow, out double[] unusedUpperGradientLow); 328 329 var numericDiff = (evalHigh.UpperBound - evalLow.UpperBound) / (2 * delta); 330 var autoDiff = grad[i]; 331 // XXX GRADIENT FOR UPPER BOUND FOR FUNCTION IS ZERO FOR THE FIRST SET OF VARIABLES? GRADIENT FOR ADDITION INCORRECT?! 332 if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) 333 || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) 334 throw new InvalidProgramException(); 335 } 374 if (grad != null && CheckGradient) 375 for (int i = 0; i < dim; i++) { 376 // make two additional evaluations 377 var xForNumDiff = (double[])curX.Clone(); 378 double delta = Math.Abs(xForNumDiff[i] * 1e-5); 379 xForNumDiff[i] += delta; 380 UpdateThetaValues(xForNumDiff); 381 var evalHigh = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, 382 out double[] unusedLowerGradientHigh, out double[] unusedUpperGradientHigh); 383 384 xForNumDiff[i] = curX[i] - delta; 385 UpdateThetaValues(xForNumDiff); 386 var evalLow = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, 387 out double[] unusedLowerGradientLow, out double[] unusedUpperGradientLow); 388 389 var numericDiff = (evalHigh.UpperBound - evalLow.UpperBound) / (2 * delta); 390 var autoDiff = grad[i]; 391 392 if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) 393 || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) 394 throw new InvalidProgramException(); 395 } 336 396 #endregion 337 397 -
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/NLOpt.cs
r17197 r17204 18 18 /* A preconditioner, which preconditions v at x to return vpre. 19 19 (The meaning of "preconditioning" is algorithm-dependent.) */ 20 public delegate IntPtrnlopt_precond(uint n,20 public delegate void nlopt_precond(uint n, 21 21 [In] [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 0)] double[] x, 22 22 [In] [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 0)] double[] v,
Note: See TracChangeset
for help on using the changeset viewer.