Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
12/11/21 12:26:27 (14 months ago)
Author:
chaider
Message:

#3140

  • Refactored ConstantOptimization ==> ParameterOptimization
File:
1 moved

Legend:

Unmodified
Added
Removed
  • branches/3140_NumberSymbol/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionParameterOptimizationEvaluator.cs

    r18112 r18113  
    3232
    3333namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
    34   [Item("Constant Optimization Evaluator", "Calculates Pearson R² of a symbolic regression solution and optimizes the constant used.")]
     34  [Item("Parameter Optimization Evaluator", "Calculates Pearson R² of a symbolic regression solution and optimizes the parameters used.")]
    3535  [StorableType("24B68851-036D-4446-BD6F-3823E9028FF4")]
    36   public class SymbolicRegressionConstantOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
    37     private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations";
    38     private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement";
    39     private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
    40     private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
    41     private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
     36  public class SymbolicRegressionParameterOptimizationEvaluator : SymbolicRegressionSingleObjectiveEvaluator {
     37    private const string ParameterOptimizationIterationsParameterName = "ParameterOptimizationIterations";
     38    private const string ParameterOptimizationImprovementParameterName = "ParameterOptimizationImprovement";
     39    private const string ParameterOptimizationProbabilityParameterName = "ParameterOptimizationProbability";
     40    private const string ParameterOptimizationRowsPercentageParameterName = "ParameterOptimizationRowsPercentage";
     41    private const string UpdateParametersInTreeParameterName = "UpdateParametersInSymbolicExpressionTree";
    4242    private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
    4343
    44     private const string FunctionEvaluationsResultParameterName = "Constants Optimization Function Evaluations";
    45     private const string GradientEvaluationsResultParameterName = "Constants Optimization Gradient Evaluations";
     44    private const string FunctionEvaluationsResultParameterName = "Parameters Optimization Function Evaluations";
     45    private const string GradientEvaluationsResultParameterName = "Parameters Optimization Gradient Evaluations";
    4646    private const string CountEvaluationsParameterName = "Count Function and Gradient Evaluations";
    4747
    48     public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
    49       get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
    50     }
    51     public IFixedValueParameter<DoubleValue> ConstantOptimizationImprovementParameter {
    52       get { return (IFixedValueParameter<DoubleValue>)Parameters[ConstantOptimizationImprovementParameterName]; }
    53     }
    54     public IFixedValueParameter<PercentValue> ConstantOptimizationProbabilityParameter {
    55       get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationProbabilityParameterName]; }
    56     }
    57     public IFixedValueParameter<PercentValue> ConstantOptimizationRowsPercentageParameter {
    58       get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
    59     }
    60     public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
    61       get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
     48    public IFixedValueParameter<IntValue> ParameterOptimizationIterationsParameter {
     49      get { return (IFixedValueParameter<IntValue>)Parameters[ParameterOptimizationIterationsParameterName]; }
     50    }
     51    public IFixedValueParameter<DoubleValue> ParameterOptimizationImprovementParameter {
     52      get { return (IFixedValueParameter<DoubleValue>)Parameters[ParameterOptimizationImprovementParameterName]; }
     53    }
     54    public IFixedValueParameter<PercentValue> ParameterOptimizationProbabilityParameter {
     55      get { return (IFixedValueParameter<PercentValue>)Parameters[ParameterOptimizationProbabilityParameterName]; }
     56    }
     57    public IFixedValueParameter<PercentValue> ParameterOptimizationRowsPercentageParameter {
     58      get { return (IFixedValueParameter<PercentValue>)Parameters[ParameterOptimizationRowsPercentageParameterName]; }
     59    }
     60    public IFixedValueParameter<BoolValue> UpdateParametersInTreeParameter {
     61      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateParametersInTreeParameterName]; }
    6262    }
    6363    public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
     
    7676
    7777
    78     public IntValue ConstantOptimizationIterations {
    79       get { return ConstantOptimizationIterationsParameter.Value; }
    80     }
    81     public DoubleValue ConstantOptimizationImprovement {
    82       get { return ConstantOptimizationImprovementParameter.Value; }
    83     }
    84     public PercentValue ConstantOptimizationProbability {
    85       get { return ConstantOptimizationProbabilityParameter.Value; }
    86     }
    87     public PercentValue ConstantOptimizationRowsPercentage {
    88       get { return ConstantOptimizationRowsPercentageParameter.Value; }
    89     }
    90     public bool UpdateConstantsInTree {
    91       get { return UpdateConstantsInTreeParameter.Value.Value; }
    92       set { UpdateConstantsInTreeParameter.Value.Value = value; }
     78    public IntValue ParameterOptimizationIterations {
     79      get { return ParameterOptimizationIterationsParameter.Value; }
     80    }
     81    public DoubleValue ParameterOptimizationImprovement {
     82      get { return ParameterOptimizationImprovementParameter.Value; }
     83    }
     84    public PercentValue ParameterOptimizationProbability {
     85      get { return ParameterOptimizationProbabilityParameter.Value; }
     86    }
     87    public PercentValue ParameterOptimizationRowsPercentage {
     88      get { return ParameterOptimizationRowsPercentageParameter.Value; }
     89    }
     90    public bool UpdateParametersInTree {
     91      get { return UpdateParametersInTreeParameter.Value.Value; }
     92      set { UpdateParametersInTreeParameter.Value.Value = value; }
    9393    }
    9494
     
    108108
    109109    [StorableConstructor]
    110     protected SymbolicRegressionConstantOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
    111     protected SymbolicRegressionConstantOptimizationEvaluator(SymbolicRegressionConstantOptimizationEvaluator original, Cloner cloner)
     110    protected SymbolicRegressionParameterOptimizationEvaluator(StorableConstructorFlag _) : base(_) { }
     111    protected SymbolicRegressionParameterOptimizationEvaluator(SymbolicRegressionParameterOptimizationEvaluator original, Cloner cloner)
    112112      : base(original, cloner) {
    113113    }
    114     public SymbolicRegressionConstantOptimizationEvaluator()
     114    public SymbolicRegressionParameterOptimizationEvaluator()
    115115      : base() {
    116       Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10)));
    117       Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0)) { Hidden = true });
    118       Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1)));
    119       Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1)));
    120       Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
     116      Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the parameter of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10)));
     117      Parameters.Add(new FixedValueParameter<DoubleValue>(ParameterOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the parameter optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0)) { Hidden = true });
     118      Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationProbabilityParameterName, "Determines the probability that the parameters are optimized", new PercentValue(1)));
     119      Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for parameter optimization", new PercentValue(1)));
     120      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateParametersInTreeParameterName, "Determines if the parameters in the tree should be overwritten by the optimized parameters.", new BoolValue(true)) { Hidden = true });
    121121      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)) { Hidden = true });
    122122
    123123      Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
    124       Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
    125       Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     124      Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
     125      Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
    126126    }
    127127
    128128    public override IDeepCloneable Clone(Cloner cloner) {
    129       return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner);
     129      return new SymbolicRegressionParameterOptimizationEvaluator(this, cloner);
    130130    }
    131131
    132132    [StorableHook(HookType.AfterDeserialization)]
    133133    private void AfterDeserialization() {
    134       if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
    135         Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
     134      if (!Parameters.ContainsKey(UpdateParametersInTreeParameterName)) {
     135        if (Parameters.ContainsKey("UpdateConstantsInSymbolicExpressionTree")) {
     136          Parameters.Add(new FixedValueParameter<BoolValue>(UpdateParametersInTreeParameterName, "Determines if the parameters in the tree should be overwritten by the optimized parameters.", (BoolValue)Parameters["UpdateConstantsInSymbolicExpressionTree"].ActualValue));
     137          Parameters.Remove("UpdateConstantsInSymbolicExpressionTree");
     138        } else {
     139          Parameters.Add(new FixedValueParameter<BoolValue>(UpdateParametersInTreeParameterName, "Determines if the parameters in the tree should be overwritten by the optimized parameters.", new BoolValue(true)));
     140        }
     141      }
     142
    136143      if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
    137144        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)));
     
    140147        Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
    141148
    142       if (!Parameters.ContainsKey(FunctionEvaluationsResultParameterName))
    143         Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
    144       if (!Parameters.ContainsKey(GradientEvaluationsResultParameterName))
    145         Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     149      if (!Parameters.ContainsKey(FunctionEvaluationsResultParameterName)) {
     150        if (Parameters.ContainsKey("Constants Optimization Function Evaluations")) {
     151          Parameters.Remove("Constants Optimization Function Evaluations");
     152        }
     153        Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
     154      }
     155
     156      if (!Parameters.ContainsKey(GradientEvaluationsResultParameterName)) {
     157        if (Parameters.ContainsKey("Constants Optimization Gradient Evaluations")) {
     158          Parameters.Remove("Constants Optimization Gradient Evaluations");
     159        }
     160        Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the parameters optimization evaluator", "Results", new IntValue()));
     161      }
     162
     163      if (!Parameters.ContainsKey(ParameterOptimizationIterationsParameterName)) {
     164        if (Parameters.ContainsKey("ConstantOptimizationIterations")) {
     165          Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the parameter of a symbolic expression tree (0 indicates other or default stopping criterion).", (IntValue)Parameters["ConstantOptimizationIterations"].ActualValue));
     166          Parameters.Remove("ConstantOptimizationIterations");
     167        } else {
     168          Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the parameter of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10)));
     169        }
     170      }
     171
     172      if (!Parameters.ContainsKey(ParameterOptimizationImprovementParameterName)) {
     173        if (Parameters.ContainsKey("CosntantOptimizationImprovement")) {
     174          Parameters.Add(new FixedValueParameter<DoubleValue>(ParameterOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the parameter optimization to continue with it (0 indicates other or default stopping criterion).",
     175            (DoubleValue)Parameters["CosntantOptimizationImprovement"].ActualValue) { Hidden = true });
     176          Parameters.Remove("CosntantOptimizationImprovement");
     177        } else {
     178          Parameters.Add(new FixedValueParameter<DoubleValue>(ParameterOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the parameter optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0)) { Hidden = true });
     179        }
     180      }
     181
     182      if (!Parameters.ContainsKey(ParameterOptimizationProbabilityParameterName)) {
     183        if (Parameters.ContainsKey("ConstantOptimizationProbability")) {
     184          Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationProbabilityParameterName, "Determines the probability that the parameters are optimized",
     185            (PercentValue)Parameters["ConstantOptimizationProbability"].ActualValue));
     186          Parameters.Remove("ConstantOptimizationProbability");
     187        } else {
     188          Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationProbabilityParameterName, "Determines the probability that the parameters are optimized", new PercentValue(1)));
     189        }
     190      }
     191
     192      if (!Parameters.ContainsKey(ParameterOptimizationRowsPercentageParameterName)) {
     193        if (Parameters.ContainsKey("ConstantOptimizationRowsPercentage")) {
     194          Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for parameter optimization", (PercentValue)Parameters["ConstantOptimizationRowsPercentage"].ActualValue));
     195          Parameters.Remove("ConstantOptimizationRowsPercentage");
     196        } else {
     197          Parameters.Add(new FixedValueParameter<PercentValue>(ParameterOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for parameter optimization", new PercentValue(1)));
     198        }
     199       
     200      }
    146201    }
    147202
     
    150205      var solution = SymbolicExpressionTreeParameter.ActualValue;
    151206      double quality;
    152       if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
    153         IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
     207      if (RandomParameter.ActualValue.NextDouble() < ParameterOptimizationProbability.Value) {
     208        IEnumerable<int> parameterOptimizationRows = GenerateRowsToEvaluate(ParameterOptimizationRowsPercentage.Value);
    154209        var counter = new EvaluationsCounter();
    155         quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
    156            constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter);
    157 
    158         if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
     210        quality = OptimizeParameters(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
     211           parameterOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ParameterOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateParametersInTree: UpdateParametersInTree, counter: counter);
     212
     213        if (ParameterOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
    159214          var evaluationRows = GenerateRowsToEvaluate();
    160215          quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
     
    203258    }
    204259
    205     public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
     260    public static double OptimizeParameters(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
    206261      ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling,
    207262      int maxIterations, bool updateVariableWeights = true,
    208263      double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
    209       bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
    210 
    211       // Numeric constants in the tree become variables for parameter optimization.
     264      bool updateParametersInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
     265
     266      // Numeric parameters in the tree become variables for parameter optimization.
    212267      // Variables in the tree become parameters (fixed values) for parameter optimization.
    213268      // For each parameter (variable in the original tree) we store the
    214269      // variable name, variable value (for factor vars) and lag as a DataForVariable object.
    215270      // A dictionary is used to find parameters
    216       double[] initialConstants;
     271      double[] initialParameters;
    217272      var parameters = new List<TreeToAutoDiffTermConverter.DataForVariable>();
    218273
    219274      TreeToAutoDiffTermConverter.ParametricFunction func;
    220275      TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
    221       if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad))
    222         throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
     276      if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialParameters, out func, out func_grad))
     277        throw new NotSupportedException("Could not optimize parameters of symbolic expression tree due to not supported symbols used in the tree.");
    223278      if (parameters.Count == 0) return 0.0; // constant expressions always have a R² of 0.0
    224279      var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
    225280
    226       // extract inital constants
     281      // extract inital parameters
    227282      double[] c;
    228283      if (applyLinearScaling) {
    229         c = new double[initialConstants.Length + 2];
     284        c = new double[initialParameters.Length + 2];
    230285        c[0] = 0.0;
    231286        c[1] = 1.0;
    232         Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
     287        Array.Copy(initialParameters, 0, c, 2, initialParameters.Length);
    233288      } else {
    234         c = (double[])initialConstants.Clone();
     289        c = (double[])initialParameters.Clone();
    235290      }
    236291
     
    283338      counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;
    284339
    285       //retVal == -7  => constant optimization failed due to wrong gradient
     340      //retVal == -7  => parameter optimization failed due to wrong gradient
    286341      //          -8  => optimizer detected  NAN / INF  in  the target
    287342      //                 function and/ or gradient
     
    290345          var tmp = new double[c.Length - 2];
    291346          Array.Copy(c, 2, tmp, 0, tmp.Length);
    292           UpdateConstants(tree, tmp, updateVariableWeights);
    293         } else UpdateConstants(tree, c, updateVariableWeights);
     347          UpdateParameters(tree, tmp, updateVariableWeights);
     348        } else UpdateParameters(tree, c, updateVariableWeights);
    294349      }
    295350      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    296351
    297       if (!updateConstantsInTree) UpdateConstants(tree, initialConstants, updateVariableWeights);
     352      if (!updateParametersInTree) UpdateParameters(tree, initialParameters, updateVariableWeights);
    298353
    299354      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
    300         UpdateConstants(tree, initialConstants, updateVariableWeights);
     355        UpdateParameters(tree, initialParameters, updateVariableWeights);
    301356        return originalQuality;
    302357      }
     
    304359    }
    305360
    306     private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) {
     361    private static void UpdateParameters(ISymbolicExpressionTree tree, double[] parameters, bool updateVariableWeights) {
    307362      int i = 0;
    308363      foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
     
    313368          if (numberTreeNode.Parent.Symbol is Power
    314369              && numberTreeNode.Parent.GetSubtree(1) == numberTreeNode) continue; // exponents in powers are not optimizated (see TreeToAutoDiffTermConverter)
    315           numberTreeNode.Value = constants[i++];
     370          numberTreeNode.Value = parameters[i++];
    316371        } else if (updateVariableWeights && variableTreeNodeBase != null)
    317           variableTreeNodeBase.Weight = constants[i++];
     372          variableTreeNodeBase.Weight = parameters[i++];
    318373        else if (factorVarTreeNode != null) {
    319374          for (int j = 0; j < factorVarTreeNode.Weights.Length; j++)
    320             factorVarTreeNode.Weights[j] = constants[i++];
     375            factorVarTreeNode.Weights[j] = parameters[i++];
    321376        }
    322377      }
     
    340395      };
    341396    }
    342     public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
     397    public static bool CanOptimizeParameters(ISymbolicExpressionTree tree) {
    343398      return TreeToAutoDiffTermConverter.IsCompatible(tree);
    344399    }
Note: See TracChangeset for help on using the changeset viewer.