Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
11/19/21 16:07:45 (2 years ago)
Author:
mkommend
Message:

#2521: Merged trunk changes into branch.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/2521_ProblemRefactoring/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r17226 r18086  
    209209      bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
    210210
    211       // numeric constants in the tree become variables for constant opt
    212       // variables in the tree become parameters (fixed values) for constant opt
    213       // for each parameter (variable in the original tree) we store the
     211      // Numeric constants in the tree become variables for parameter optimization.
     212      // Variables in the tree become parameters (fixed values) for parameter optimization.
     213      // For each parameter (variable in the original tree) we store the
    214214      // variable name, variable value (for factor vars) and lag as a DataForVariable object.
    215215      // A dictionary is used to find parameters
     
    221221      if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad))
    222222        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
    223       if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0
     223      if (parameters.Count == 0) return 0.0; // constant expressions always have a R² of 0.0
    224224      var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
    225225
    226       //extract inital constants
     226      // extract inital constants
    227227      double[] c;
    228228      if (applyLinearScaling) {
     
    270270      try {
    271271        alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
    272         alglib.lsfitsetcond(state, 0.0, 0.0, maxIterations);
     272        alglib.lsfitsetcond(state, 0.0, maxIterations);
    273273        alglib.lsfitsetxrep(state, iterationCallback != null);
    274         //alglib.lsfitsetgradientcheck(state, 0.001);
    275274        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter);
    276275        alglib.lsfitresults(state, out retVal, out c, out rep);
     
    285284
    286285      //retVal == -7  => constant optimization failed due to wrong gradient
    287       if (retVal != -7) {
     286      //          -8  => optimizer detected  NAN / INF  in  the target
     287      //                 function and/ or gradient
     288      if (retVal != -7 && retVal != -8) {
    288289        if (applyLinearScaling) {
    289290          var tmp = new double[c.Length - 2];
     
    309310        VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase;
    310311        FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode;
    311         if (constantTreeNode != null)
     312        if (constantTreeNode != null) {
     313          if (constantTreeNode.Parent.Symbol is Power
     314              && constantTreeNode.Parent.GetSubtree(1) == constantTreeNode) continue; // exponents in powers are not optimizated (see TreeToAutoDiffTermConverter)
    312315          constantTreeNode.Value = constants[i++];
    313         else if (updateVariableWeights && variableTreeNodeBase != null)
     316        } else if (updateVariableWeights && variableTreeNodeBase != null)
    314317          variableTreeNodeBase.Weight = constants[i++];
    315318        else if (factorVarTreeNode != null) {
Note: See TracChangeset for help on using the changeset viewer.