Changeset 17200 for branches/2994-AutoDiffForIntervals
- Timestamp:
- 08/12/19 10:50:02 (5 years ago)
- Location:
- branches/2994-AutoDiffForIntervals
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Regression.Symbolic.Extensions/ConstrainedNLSInternal.cs
r17197 r17200 1 1 using System; 2 2 using System.Collections.Generic; 3 using System.Diagnostics; 3 4 using System.Linq; 4 5 using System.Runtime.InteropServices; … … 193 194 bestSolution = x; 194 195 bestError = minf; 195 196 196 197 if (res < 0) { 197 198 throw new InvalidOperationException($"NLOpt failed {res} {NLOpt.nlopt_get_errmsg(nlopt)}"); … … 232 233 // average 233 234 for (int j = 0; j < grad.Length; j++) { grad[j] /= target.Length; } 235 236 #region check gradient 237 var eval = new VectorEvaluator(); 238 for (int i = 0; i < dim; i++) { 239 // make two additional evaluations 240 var xForNumDiff = (double[])curX.Clone(); 241 double delta = Math.Abs(xForNumDiff[i] * 1e-5); 242 xForNumDiff[i] += delta; 243 UpdateThetaValues(xForNumDiff); 244 var evalHigh = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); 245 var mseHigh = MSE(target, evalHigh); 246 xForNumDiff[i] = curX[i] - delta; 247 UpdateThetaValues(xForNumDiff); 248 var evalLow = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); 249 var mseLow = MSE(target, evalLow); 250 251 var numericDiff = (mseHigh - mseLow) / (2 * delta); 252 var autoDiff = grad[i]; 253 if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) 254 || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) 255 throw new InvalidProgramException(); 256 } 257 #endregion 234 258 } else { 235 259 var eval = new VectorEvaluator(); 236 260 var prediction = eval.Evaluate(preparedTree, problemData.Dataset, trainingRows); 237 261 238 // calc sum of squared errors and gradient262 // calc sum of squared errors 239 263 sse = 0.0; 240 264 for (int i = 0; i < target.Length; i++) { … … 249 273 if (double.IsNaN(sse)) return double.MaxValue; 250 274 return sse / target.Length; 275 } 276 277 private double MSE(double[] a, double[] b) { 278 Trace.Assert(a.Length == b.Length); 279 var sse = 0.0; 280 for (int i = 0; i < a.Length; i++) sse += (a[i] - b[i]) * (a[i] - b[i]); 281 return sse / a.Length; 251 282 } 252 283 … … 280 311 // we transformed this to a constraint c(x) <= 0, so only the upper bound is relevant for us 281 312 if (grad != null) for (int j = 0; j < grad.Length; j++) { grad[j] = upperGradient[j]; } 313 314 #region check gradient 315 for (int i = 0; i < dim; i++) { 316 // make two additional evaluations 317 var xForNumDiff = (double[])curX.Clone(); 318 double delta = Math.Abs(xForNumDiff[i] * 1e-5); 319 xForNumDiff[i] += delta; 320 UpdateThetaValues(xForNumDiff); 321 var evalHigh = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, 322 out double[] unusedLowerGradientHigh, out double[] unusedUpperGradientHigh); 323 324 xForNumDiff[i] = curX[i] - delta; 325 UpdateThetaValues(xForNumDiff); 326 var evalLow = intervalEvaluator.Evaluate(constraintData.Tree, dataIntervals, constraintData.ParameterNodes, 327 out double[] unusedLowerGradientLow, out double[] unusedUpperGradientLow); 328 329 var numericDiff = (evalHigh.UpperBound - evalLow.UpperBound) / (2 * delta); 330 var autoDiff = grad[i]; 331 // XXX GRADIENT FOR UPPER BOUND FOR FUNCTION IS ZERO FOR THE FIRST SET OF VARIABLES? GRADIENT FOR ADDITION INCORRECT?! 332 if ((Math.Abs(autoDiff) < 1e-10 && Math.Abs(numericDiff) > 1e-2) 333 || (Math.Abs(autoDiff) >= 1e-10 && Math.Abs((numericDiff - autoDiff) / numericDiff) > 1e-2)) 334 throw new InvalidProgramException(); 335 } 336 #endregion 337 338 282 339 UpdateConstraintViolations(constraintData.Idx, interval.UpperBound); 283 340 if (double.IsNaN(interval.UpperBound)) return double.MaxValue; -
branches/2994-AutoDiffForIntervals/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interpreter/Interpreter.cs
r17131 r17200 788 788 [DebuggerBrowsable(DebuggerBrowsableState.Never)] 789 789 public AlgebraicInterval Zero => new AlgebraicInterval(0.0, 0.0); 790 [DebuggerBrowsable(DebuggerBrowsableState.Never)] 790 791 public AlgebraicInterval One => new AlgebraicInterval(1.0, 1.0); 792 791 793 public AlgebraicInterval Add(AlgebraicInterval a) { 792 794 low.Add(a.low); … … 818 820 public AlgebraicInterval Div(AlgebraicInterval a) { 819 821 if (a.Contains(0.0)) { 820 if (a.low.Value.Value .IsAlmost(0.0) && a.high.Value.Value.IsAlmost(0.0)) {822 if (a.low.Value.Value == 0 && a.high.Value.Value == 0) { 821 823 low = new MultivariateDual<AlgebraicDouble>(double.NegativeInfinity); 822 824 high = new MultivariateDual<AlgebraicDouble>(double.PositiveInfinity); 823 } else if (a.low.Value.Value .IsAlmost(0.0))825 } else if (a.low.Value.Value == 0) 824 826 Mul(new AlgebraicInterval(a.Clone().high.Inv(), new MultivariateDual<AlgebraicDouble>(double.PositiveInfinity))); 825 827 else … … 945 947 //divide the interval by PI/2 so that the optima lie at x element of N (0,1,2,3,4,...) 946 948 double Pihalf = Math.PI / 2; 947 var scaled = this.Clone().Scale(1.0 / Pihalf);949 var scaled = a.Clone().Scale(1.0 / Pihalf); 948 950 //move to positive scale 949 951 if (scaled.LowerBound.Value.Value < 0) { … … 965 967 } 966 968 967 low = new MultivariateDual<AlgebraicDouble>(sinValues.Min()); 969 low = new MultivariateDual<AlgebraicDouble>(sinValues.Min()); // TODO, XXX FIX CALCULATION TO SUPPORT GRADIENTS! 968 970 high = new MultivariateDual<AlgebraicDouble>(sinValues.Max()); 969 971 return this;
Note: See TracChangeset
for help on using the changeset viewer.