- Timestamp:
- 07/05/16 21:34:18 (8 years ago)
- Location:
- stable
- Files:
-
- 10 edited
Legend:
- Unmodified
- Added
- Removed
-
stable
- Property svn:mergeinfo changed
/trunk/sources merged: 13670,13869,13900,13916
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression merged: 13670,13869,13900,13916
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4 merged: 13670,13869,13900,13916
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/PearsonRSquaredNestedTreeSizeEvaluator.cs
r13310 r14004 55 55 56 56 if (UseConstantOptimization) { 57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights,lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 58 58 } 59 59 -
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/PearsonRSquaredNumberOfVariablesEvaluator.cs
r13310 r14004 55 55 56 56 if (UseConstantOptimization) { 57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 58 58 } 59 59 double[] qualities = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value, DecimalPlaces); -
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/PearsonRSquaredTreeComplexityEvaluator.cs
r13310 r14004 54 54 55 55 if (UseConstantOptimization) { 56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 57 57 } 58 58 double[] qualities = Calculate(interpreter, solution, estimationLimits.Lower, estimationLimits.Upper, problemData, rows, applyLinearScaling, DecimalPlaces); -
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/SymbolicRegressionMultiObjectiveEvaluator.cs
r13310 r14004 34 34 private const string ConstantOptimizationIterationsParameterName = "Constant optimization iterations"; 35 35 36 private const string ConstantOptimizationUpdateVariableWeightsParameterName = 37 "Constant optimization update variable weights"; 38 36 39 public IFixedValueParameter<IntValue> DecimalPlacesParameter { 37 40 get { return (IFixedValueParameter<IntValue>)Parameters[DecimalPlacesParameterName]; } … … 45 48 } 46 49 50 public IFixedValueParameter<BoolValue> ConstantOptimizationUpdateVariableWeightsParameter { 51 get { return (IFixedValueParameter<BoolValue>)Parameters[ConstantOptimizationUpdateVariableWeightsParameterName]; } 52 } 47 53 48 54 public int DecimalPlaces { … … 58 64 set { ConstantOptimizationIterationsParameter.Value.Value = value; } 59 65 } 66 public bool ConstantOptimizationUpdateVariableWeights { 67 get { return ConstantOptimizationUpdateVariableWeightsParameter.Value.Value; } 68 set { ConstantOptimizationUpdateVariableWeightsParameter.Value.Value = value; } 69 } 60 70 61 71 [StorableConstructor] … … 70 80 Parameters.Add(new FixedValueParameter<BoolValue>(UseConstantOptimizationParameterName, "", new BoolValue(false))); 71 81 Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "The number of iterations constant optimization should be applied.", new IntValue(5))); 82 Parameters.Add(new FixedValueParameter<BoolValue>(ConstantOptimizationUpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized during constant optimization.", new BoolValue(true)) { Hidden = true }); 72 83 } 73 84 … … 83 94 Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "The number of iterations constant optimization should be applied.", new IntValue(5))); 84 95 } 96 if (!Parameters.ContainsKey(ConstantOptimizationUpdateVariableWeightsParameterName)) { 97 Parameters.Add(new FixedValueParameter<BoolValue>(ConstantOptimizationUpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized during constant optimization.", new BoolValue(true))); 98 } 85 99 } 86 100 } -
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/SymbolicRegressionMultiObjectiveMeanSquaredErrorTreeSizeEvaluator.cs
r13310 r14004 54 54 55 55 if (UseConstantOptimization) { 56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 57 57 } 58 58 -
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/SymbolicRegressionMultiObjectivePearsonRSquaredTreeSizeEvaluator.cs
r13310 r14004 54 54 55 55 if (UseConstantOptimization) { 56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 57 57 } 58 58 double[] qualities = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value, DecimalPlaces); -
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs
r13310 r14004 40 40 private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage"; 41 41 private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree"; 42 private const string UpdateVariableWeightsParameterName = "Update Variable Weights"; 42 43 43 44 public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter { … … 56 57 get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; } 57 58 } 59 public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter { 60 get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; } 61 } 62 58 63 59 64 public IntValue ConstantOptimizationIterations { … … 72 77 get { return UpdateConstantsInTreeParameter.Value.Value; } 73 78 set { UpdateConstantsInTreeParameter.Value.Value = value; } 79 } 80 81 public bool UpdateVariableWeights { 82 get { return UpdateVariableWeightsParameter.Value.Value; } 83 set { UpdateVariableWeightsParameter.Value.Value = value; } 74 84 } 75 85 … … 86 96 : base() { 87 97 Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10), true)); 88 Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true) );98 Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true) { Hidden = true }); 89 99 Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true)); 90 100 Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true)); 91 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true))); 101 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true }); 102 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)) { Hidden = true }); 92 103 } 93 104 … … 100 111 if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName)) 101 112 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true))); 113 if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName)) 114 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true))); 102 115 } 103 116 … … 108 121 IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value); 109 122 quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue, 110 constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, 111 EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, UpdateConstantsInTree); 123 constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree); 112 124 113 125 if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) { … … 164 176 165 177 166 // TODO: swap positions of lowerEstimationLimit and upperEstimationLimit parameters 167 public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, 168 IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, bool updateConstantsInTree = true) { 178 public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, bool updateVariableWeights = true, double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, bool updateConstantsInTree = true) { 169 179 170 180 List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>(); … … 173 183 174 184 AutoDiff.Term func; 175 if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, out func))185 if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out func)) 176 186 throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree."); 177 187 if (variableNames.Count == 0) return 0.0; 178 188 179 AutoDiff.IParametricCompiledTerm compiledFunc = AutoDiff.TermUtils.Compile(func, variables.ToArray(), parameters.ToArray()); 180 181 List<SymbolicExpressionTreeTerminalNode> terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList(); 189 AutoDiff.IParametricCompiledTerm compiledFunc = func.Compile(variables.ToArray(), parameters.ToArray()); 190 191 List<SymbolicExpressionTreeTerminalNode> terminalNodes = null; 192 if (updateVariableWeights) 193 terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList(); 194 else 195 terminalNodes = new List<SymbolicExpressionTreeTerminalNode>(tree.Root.IterateNodesPrefix().OfType<ConstantTreeNode>()); 196 197 //extract inital constants 182 198 double[] c = new double[variables.Count]; 183 184 199 { 185 200 c[0] = 0.0; 186 201 c[1] = 1.0; 187 //extract inital constants188 202 int i = 2; 189 203 foreach (var node in terminalNodes) { … … 192 206 if (constantTreeNode != null) 193 207 c[i++] = constantTreeNode.Value; 194 else if ( variableTreeNode != null)208 else if (updateVariableWeights && variableTreeNode != null) 195 209 c[i++] = variableTreeNode.Weight; 196 210 } … … 235 249 236 250 //info == -7 => constant optimization failed due to wrong gradient 237 if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray() );251 if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights); 238 252 var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 239 253 240 if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray() );254 if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights); 241 255 if (originalQuality - quality > 0.001 || double.IsNaN(quality)) { 242 UpdateConstants(tree, originalConstants.Skip(2).ToArray() );256 UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights); 243 257 return originalQuality; 244 258 } … … 246 260 } 247 261 248 private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants ) {262 private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) { 249 263 int i = 0; 250 264 foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) { … … 253 267 if (constantTreeNode != null) 254 268 constantTreeNode.Value = constants[i++]; 255 else if ( variableTreeNode != null)269 else if (updateVariableWeights && variableTreeNode != null) 256 270 variableTreeNode.Weight = constants[i++]; 257 271 } … … 272 286 } 273 287 274 private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, out AutoDiff.Term term) {288 private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, bool updateVariableWeights, out AutoDiff.Term term) { 275 289 if (node.Symbol is Constant) { 276 290 var var = new AutoDiff.Variable(); … … 284 298 parameters.Add(par); 285 299 variableNames.Add(varNode.VariableName); 286 var w = new AutoDiff.Variable(); 287 variables.Add(w); 288 term = AutoDiff.TermBuilder.Product(w, par); 300 301 if (updateVariableWeights) { 302 var w = new AutoDiff.Variable(); 303 variables.Add(w); 304 term = AutoDiff.TermBuilder.Product(w, par); 305 } else { 306 term = par; 307 } 289 308 return true; 290 309 } … … 293 312 foreach (var subTree in node.Subtrees) { 294 313 AutoDiff.Term t; 295 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out t)) {314 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out t)) { 296 315 term = null; 297 316 return false; … … 306 325 for (int i = 0; i < node.SubtreeCount; i++) { 307 326 AutoDiff.Term t; 308 if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) {327 if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, updateVariableWeights, out t)) { 309 328 term = null; 310 329 return false; … … 317 336 } 318 337 if (node.Symbol is Multiplication) { 319 AutoDiff.Term a, b; 320 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) || 321 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) { 322 term = null; 323 return false; 324 } else { 325 List<AutoDiff.Term> factors = new List<Term>(); 326 foreach (var subTree in node.Subtrees.Skip(2)) { 327 AutoDiff.Term f; 328 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) { 329 term = null; 330 return false; 331 } 332 factors.Add(f); 338 List<AutoDiff.Term> terms = new List<Term>(); 339 foreach (var subTree in node.Subtrees) { 340 AutoDiff.Term t; 341 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out t)) { 342 term = null; 343 return false; 333 344 } 334 term = AutoDiff.TermBuilder.Product(a, b, factors.ToArray()); 335 return true; 336 } 345 terms.Add(t); 346 } 347 if (terms.Count == 1) term = terms[0]; 348 else term = terms.Aggregate((a, b) => new AutoDiff.Product(a, b)); 349 return true; 350 337 351 } 338 352 if (node.Symbol is Division) { 339 // only works for at least two subtrees 340 AutoDiff.Term a, b; 341 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) || 342 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) { 343 term = null; 344 return false; 345 } else { 346 List<AutoDiff.Term> factors = new List<Term>(); 347 foreach (var subTree in node.Subtrees.Skip(2)) { 348 AutoDiff.Term f; 349 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) { 350 term = null; 351 return false; 352 } 353 factors.Add(1.0 / f); 353 List<AutoDiff.Term> terms = new List<Term>(); 354 foreach (var subTree in node.Subtrees) { 355 AutoDiff.Term t; 356 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out t)) { 357 term = null; 358 return false; 354 359 } 355 term = AutoDiff.TermBuilder.Product(a, 1.0 / b, factors.ToArray()); 356 return true; 357 } 360 terms.Add(t); 361 } 362 if (terms.Count == 1) term = 1.0 / terms[0]; 363 else term = terms.Aggregate((a, b) => new AutoDiff.Product(a, 1.0 / b)); 364 return true; 358 365 } 359 366 if (node.Symbol is Logarithm) { 360 367 AutoDiff.Term t; 361 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {368 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 362 369 term = null; 363 370 return false; … … 369 376 if (node.Symbol is Exponential) { 370 377 AutoDiff.Term t; 371 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {378 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 372 379 term = null; 373 380 return false; … … 379 386 if (node.Symbol is Square) { 380 387 AutoDiff.Term t; 381 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {388 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 382 389 term = null; 383 390 return false; … … 386 393 return true; 387 394 } 388 } if (node.Symbol is SquareRoot) { 389 AutoDiff.Term t; 390 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) { 395 } 396 if (node.Symbol is SquareRoot) { 397 AutoDiff.Term t; 398 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 391 399 term = null; 392 400 return false; … … 395 403 return true; 396 404 } 397 } if (node.Symbol is Sine) { 398 AutoDiff.Term t; 399 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) { 405 } 406 if (node.Symbol is Sine) { 407 AutoDiff.Term t; 408 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 400 409 term = null; 401 410 return false; … … 404 413 return true; 405 414 } 406 } if (node.Symbol is Cosine) { 407 AutoDiff.Term t; 408 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) { 415 } 416 if (node.Symbol is Cosine) { 417 AutoDiff.Term t; 418 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 409 419 term = null; 410 420 return false; … … 413 423 return true; 414 424 } 415 } if (node.Symbol is Tangent) { 416 AutoDiff.Term t; 417 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) { 425 } 426 if (node.Symbol is Tangent) { 427 AutoDiff.Term t; 428 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 418 429 term = null; 419 430 return false; … … 422 433 return true; 423 434 } 424 } if (node.Symbol is Erf) { 425 AutoDiff.Term t; 426 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) { 435 } 436 if (node.Symbol is Erf) { 437 AutoDiff.Term t; 438 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 427 439 term = null; 428 440 return false; … … 431 443 return true; 432 444 } 433 } if (node.Symbol is Norm) { 434 AutoDiff.Term t; 435 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) { 445 } 446 if (node.Symbol is Norm) { 447 AutoDiff.Term t; 448 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 436 449 term = null; 437 450 return false; … … 447 460 variables.Add(alpha); 448 461 AutoDiff.Term branchTerm; 449 if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out branchTerm)) {462 if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out branchTerm)) { 450 463 term = branchTerm * alpha + beta; 451 464 return true;
Note: See TracChangeset
for help on using the changeset viewer.