Free cookie consent management tool by TermsFeed Policy Generator

source: branches/3136_Structural_GP/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs @ 18103

Last change on this file since 18103 was 18103, checked in by dpiringe, 2 years ago

#3136

  • refactor the evaluation logic of NMSESingleObjectiveConstraintsEvaluator
  • refactor the new method Evaluate for PearsonRSquaredAverageSimilarityEvaluator
  • change the parameter order of some evaluate/calculate methods
File size: 21.9 KB
RevLine 
[6256]1#region License Information
2/* HeuristicLab
[17180]3 * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[6256]4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
[8704]22using System;
[6256]23using System.Collections.Generic;
24using System.Linq;
[16875]25using HEAL.Attic;
[6256]26using HeuristicLab.Common;
27using HeuristicLab.Core;
28using HeuristicLab.Data;
29using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
[15448]30using HeuristicLab.Optimization;
[6256]31using HeuristicLab.Parameters;
[18095]32using HeuristicLab.Random;
[6256]33
34namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
[6555]35  [Item("Constant Optimization Evaluator", "Calculates Pearson R² of a symbolic regression solution and optimizes the constant used.")]
[16565]36  [StorableType("24B68851-036D-4446-BD6F-3823E9028FF4")]
[6256]37  public class SymbolicRegressionConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
38    private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations";
39    private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement";
40    private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
41    private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
[8823]42    private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
[13670]43    private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
[6256]44
[15448]45    private const string FunctionEvaluationsResultParameterName = "Constants Optimization Function Evaluations";
46    private const string GradientEvaluationsResultParameterName = "Constants Optimization Gradient Evaluations";
[15483]47    private const string CountEvaluationsParameterName = "Count Function and Gradient Evaluations";
[15448]48
[6256]49    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
50      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
51    }
52    public IFixedValueParameter<DoubleValue> ConstantOptimizationImprovementParameter {
53      get { return (IFixedValueParameter<DoubleValue>)Parameters[ConstantOptimizationImprovementParameterName]; }
54    }
55    public IFixedValueParameter<PercentValue> ConstantOptimizationProbabilityParameter {
56      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationProbabilityParameterName]; }
57    }
58    public IFixedValueParameter<PercentValue> ConstantOptimizationRowsPercentageParameter {
59      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
60    }
[8823]61    public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
62      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
63    }
[13670]64    public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
65      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
66    }
[6256]67
[15448]68    public IResultParameter<IntValue> FunctionEvaluationsResultParameter {
69      get { return (IResultParameter<IntValue>)Parameters[FunctionEvaluationsResultParameterName]; }
70    }
71    public IResultParameter<IntValue> GradientEvaluationsResultParameter {
72      get { return (IResultParameter<IntValue>)Parameters[GradientEvaluationsResultParameterName]; }
73    }
[15483]74    public IFixedValueParameter<BoolValue> CountEvaluationsParameter {
75      get { return (IFixedValueParameter<BoolValue>)Parameters[CountEvaluationsParameterName]; }
76    }
[13670]77
[15448]78
[6256]79    public IntValue ConstantOptimizationIterations {
80      get { return ConstantOptimizationIterationsParameter.Value; }
81    }
82    public DoubleValue ConstantOptimizationImprovement {
83      get { return ConstantOptimizationImprovementParameter.Value; }
84    }
85    public PercentValue ConstantOptimizationProbability {
86      get { return ConstantOptimizationProbabilityParameter.Value; }
87    }
88    public PercentValue ConstantOptimizationRowsPercentage {
89      get { return ConstantOptimizationRowsPercentageParameter.Value; }
90    }
[8823]91    public bool UpdateConstantsInTree {
92      get { return UpdateConstantsInTreeParameter.Value.Value; }
93      set { UpdateConstantsInTreeParameter.Value.Value = value; }
94    }
[6256]95
[13670]96    public bool UpdateVariableWeights {
97      get { return UpdateVariableWeightsParameter.Value.Value; }
98      set { UpdateVariableWeightsParameter.Value.Value = value; }
99    }
100
[15483]101    public bool CountEvaluations {
102      get { return CountEvaluationsParameter.Value.Value; }
103      set { CountEvaluationsParameter.Value.Value = value; }
104    }
105
[6256]106    public override bool Maximization {
107      get { return true; }
108    }
109
110    [StorableConstructor]
[16565]111    protected SymbolicRegressionConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
[6256]112    protected SymbolicRegressionConstantOptimizationEvaluator(SymbolicRegressionConstantOptimizationEvaluator original, Cloner cloner)
113      : base(original, cloner) {
114    }
115    public SymbolicRegressionConstantOptimizationEvaluator()
116      : base() {
[16875]117      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10)));
118      Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0)) { Hidden = true });
119      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1)));
120      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1)));
[13916]121      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
122      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)) { Hidden = true });
[15448]123
[15483]124      Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
[15448]125      Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
126      Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
[6256]127    }
128
129    public override IDeepCloneable Clone(Cloner cloner) {
130      return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner);
131    }
132
[8823]133    [StorableHook(HookType.AfterDeserialization)]
134    private void AfterDeserialization() {
135      if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
136        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
[13670]137      if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
138        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)));
[15448]139
[15483]140      if (!Parameters.ContainsKey(CountEvaluationsParameterName))
141        Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
142
[15448]143      if (!Parameters.ContainsKey(FunctionEvaluationsResultParameterName))
144        Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
145      if (!Parameters.ContainsKey(GradientEvaluationsResultParameterName))
146        Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
[8823]147    }
148
[15448]149    private static readonly object locker = new object();
[10291]150    public override IOperation InstrumentedApply() {
[18103]151      var tree = SymbolicExpressionTreeParameter.ActualValue;
[6256]152      double quality;
153      if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
154        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
[15448]155        var counter = new EvaluationsCounter();
[18103]156        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, ProblemDataParameter.ActualValue,
[15448]157           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter);
[8938]158
[6256]159        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
160          var evaluationRows = GenerateRowsToEvaluate();
[18103]161          quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(
162            tree, ProblemDataParameter.ActualValue,
163            evaluationRows, SymbolicDataAnalysisTreeInterpreterParameter.ActualValue,
164            ApplyLinearScalingParameter.ActualValue.Value,
165            EstimationLimitsParameter.ActualValue.Lower,
166            EstimationLimitsParameter.ActualValue.Upper);
[6256]167        }
[15448]168
[15483]169        if (CountEvaluations) {
170          lock (locker) {
171            FunctionEvaluationsResultParameter.ActualValue.Value += counter.FunctionEvaluations;
172            GradientEvaluationsResultParameter.ActualValue.Value += counter.GradientEvaluations;
173          }
[15448]174        }
175
[6256]176      } else {
177        var evaluationRows = GenerateRowsToEvaluate();
[18103]178        quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(
179          tree, ProblemDataParameter.ActualValue,
180          evaluationRows, SymbolicDataAnalysisTreeInterpreterParameter.ActualValue,
181          ApplyLinearScalingParameter.ActualValue.Value,
182          EstimationLimitsParameter.ActualValue.Lower,
183          EstimationLimitsParameter.ActualValue.Upper);
[6256]184      }
185      QualityParameter.ActualValue = new DoubleValue(quality);
186
[10291]187      return base.InstrumentedApply();
[6256]188    }
189
[18103]190    public override double Evaluate(
191      ISymbolicExpressionTree tree,
192      IRegressionProblemData problemData,
193      IEnumerable<int> rows,
[18095]194      ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
195      bool applyLinearScaling = true,
196      double lowerEstimationLimit = double.MinValue,
197      double upperEstimationLimit = double.MaxValue) {
198
[18103]199      var random = RandomParameter.ActualValue;
200      double quality = double.NaN;
[18095]201
202      var propability = random.NextDouble();
203      if (propability < ConstantOptimizationProbability.Value) {
[18103]204        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
[18095]205        quality = OptimizeConstants(
[18103]206          interpreter, tree,
207          problemData, constantOptimizationRows,
208          applyLinearScaling,
209          ConstantOptimizationIterations.Value,
210          updateVariableWeights: UpdateVariableWeights,
211          lowerEstimationLimit: lowerEstimationLimit,
212          upperEstimationLimit: upperEstimationLimit,
213          updateConstantsInTree: UpdateConstantsInTree);
214      }
215      if (double.IsNaN(quality) || ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
[18095]216        quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(
[18103]217          tree, problemData,
218          rows, interpreter,
219          applyLinearScaling,
[18095]220          lowerEstimationLimit,
[18103]221          upperEstimationLimit);
[18095]222      }
223      return quality;
224    }
225
[6256]226    public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
227      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
228      EstimationLimitsParameter.ExecutionContext = context;
[8664]229      ApplyLinearScalingParameter.ExecutionContext = context;
[15448]230      FunctionEvaluationsResultParameter.ExecutionContext = context;
231      GradientEvaluationsResultParameter.ExecutionContext = context;
[6256]232
[9209]233      // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
234      // because Evaluate() is used to get the quality of evolved models on
235      // different partitions of the dataset (e.g., best validation model)
[18103]236      double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(
237        tree, problemData, rows,
238        SymbolicDataAnalysisTreeInterpreterParameter.ActualValue,
239        ApplyLinearScalingParameter.ActualValue.Value,
240        EstimationLimitsParameter.ActualValue.Lower,
241        EstimationLimitsParameter.ActualValue.Upper);
[6256]242
243      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
244      EstimationLimitsParameter.ExecutionContext = null;
[9209]245      ApplyLinearScalingParameter.ExecutionContext = null;
[15448]246      FunctionEvaluationsResultParameter.ExecutionContext = null;
247      GradientEvaluationsResultParameter.ExecutionContext = null;
[6256]248
249      return r2;
250    }
251
[15448]252    public class EvaluationsCounter {
253      public int FunctionEvaluations = 0;
254      public int GradientEvaluations = 0;
255    }
256
[14826]257    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
258      ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling,
259      int maxIterations, bool updateVariableWeights = true,
260      double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
[15448]261      bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
[8704]262
[17817]263      // Numeric constants in the tree become variables for parameter optimization.
264      // Variables in the tree become parameters (fixed values) for parameter optimization.
265      // For each parameter (variable in the original tree) we store the
[14826]266      // variable name, variable value (for factor vars) and lag as a DataForVariable object.
267      // A dictionary is used to find parameters
[14840]268      double[] initialConstants;
[14843]269      var parameters = new List<TreeToAutoDiffTermConverter.DataForVariable>();
[14826]270
[14843]271      TreeToAutoDiffTermConverter.ParametricFunction func;
272      TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
[15447]273      if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad))
[8828]274        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
[17817]275      if (parameters.Count == 0) return 0.0; // constant expressions always have a R² of 0.0
[14826]276      var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
[14400]277
[17817]278      // extract inital constants
[15447]279      double[] c;
280      if (applyLinearScaling) {
281        c = new double[initialConstants.Length + 2];
[15481]282        c[0] = 0.0;
283        c[1] = 1.0;
284        Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
[15447]285      } else {
286        c = (double[])initialConstants.Clone();
[14400]287      }
[15447]288
[18103]289      double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(
290        tree, problemData, rows,
291        interpreter, applyLinearScaling,
292        lowerEstimationLimit,
293        upperEstimationLimit);
[6256]294
[15448]295      if (counter == null) counter = new EvaluationsCounter();
296      var rowEvaluationsCounter = new EvaluationsCounter();
297
[8704]298      alglib.lsfitstate state;
299      alglib.lsfitreport rep;
[14826]300      int retVal;
[6256]301
[12509]302      IDataset ds = problemData.Dataset;
[14826]303      double[,] x = new double[rows.Count(), parameters.Count];
[8704]304      int row = 0;
305      foreach (var r in rows) {
[14826]306        int col = 0;
[14840]307        foreach (var info in parameterEntries) {
[14826]308          if (ds.VariableHasType<double>(info.variableName)) {
[14946]309            x[row, col] = ds.GetDoubleValue(info.variableName, r + info.lag);
[14826]310          } else if (ds.VariableHasType<string>(info.variableName)) {
311            x[row, col] = ds.GetStringValue(info.variableName, r) == info.variableValue ? 1 : 0;
312          } else throw new InvalidProgramException("found a variable of unknown type");
313          col++;
[8704]314        }
315        row++;
316      }
317      double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
318      int n = x.GetLength(0);
319      int m = x.GetLength(1);
320      int k = c.Length;
[6256]321
[14840]322      alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(func);
323      alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(func_grad);
[15371]324      alglib.ndimensional_rep xrep = (p, f, obj) => iterationCallback(p, f, obj);
[6256]325
[8704]326      try {
327        alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
[17931]328        alglib.lsfitsetcond(state, 0.0, maxIterations);
[15371]329        alglib.lsfitsetxrep(state, iterationCallback != null);
[15448]330        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter);
[14826]331        alglib.lsfitresults(state, out retVal, out c, out rep);
[15447]332      } catch (ArithmeticException) {
[8984]333        return originalQuality;
[15447]334      } catch (alglib.alglibexception) {
[8984]335        return originalQuality;
[8704]336      }
[8823]337
[15448]338      counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n;
339      counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;
340
[14826]341      //retVal == -7  => constant optimization failed due to wrong gradient
[17944]342      //          -8  => optimizer detected  NAN / INF  in  the target
343      //                 function and/ or gradient
344      if (retVal != -7 && retVal != -8) {
[15481]345        if (applyLinearScaling) {
346          var tmp = new double[c.Length - 2];
347          Array.Copy(c, 2, tmp, 0, tmp.Length);
348          UpdateConstants(tree, tmp, updateVariableWeights);
349        } else UpdateConstants(tree, c, updateVariableWeights);
[15447]350      }
[18103]351      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(
352        tree, problemData, rows,
353        interpreter, applyLinearScaling,
354        lowerEstimationLimit, upperEstimationLimit);
[8938]355
[15480]356      if (!updateConstantsInTree) UpdateConstants(tree, initialConstants, updateVariableWeights);
[15447]357
[8938]358      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
[15480]359        UpdateConstants(tree, initialConstants, updateVariableWeights);
[8938]360        return originalQuality;
[8704]361      }
[8938]362      return quality;
[6256]363    }
364
[13670]365    private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) {
[8938]366      int i = 0;
367      foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
368        ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
[14951]369        VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase;
[14826]370        FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode;
[17817]371        if (constantTreeNode != null) {
[18095]372          if (constantTreeNode.Parent.Symbol is Power
[17817]373              && constantTreeNode.Parent.GetSubtree(1) == constantTreeNode) continue; // exponents in powers are not optimizated (see TreeToAutoDiffTermConverter)
[8938]374          constantTreeNode.Value = constants[i++];
[17817]375        } else if (updateVariableWeights && variableTreeNodeBase != null)
[14951]376          variableTreeNodeBase.Weight = constants[i++];
[14826]377        else if (factorVarTreeNode != null) {
378          for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
379            factorVarTreeNode.Weights[j] = constants[i++];
380        }
[8938]381      }
382    }
383
[14843]384    private static alglib.ndimensional_pfunc CreatePFunc(TreeToAutoDiffTermConverter.ParametricFunction func) {
[14840]385      return (double[] c, double[] x, ref double fx, object o) => {
386        fx = func(c, x);
[15448]387        var counter = (EvaluationsCounter)o;
388        counter.FunctionEvaluations++;
[8704]389      };
390    }
[6256]391
[14843]392    private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) {
[14840]393      return (double[] c, double[] x, ref double fx, double[] grad, object o) => {
[15480]394        var tuple = func_grad(c, x);
395        fx = tuple.Item2;
396        Array.Copy(tuple.Item1, grad, grad.Length);
[15448]397        var counter = (EvaluationsCounter)o;
398        counter.GradientEvaluations++;
[6256]399      };
400    }
[8730]401    public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
[14843]402      return TreeToAutoDiffTermConverter.IsCompatible(tree);
[8730]403    }
[6256]404  }
405}
Note: See TracBrowser for help on using the repository browser.