Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
11/03/17 15:28:23 (6 years ago)
Author:
mkommend
Message:

#2852: Adapted constants optimization and auto diff converter to not add linear scaling terms.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r15371 r15447  
    168168      TreeToAutoDiffTermConverter.ParametricFunction func;
    169169      TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
    170       if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, out parameters, out initialConstants, out func, out func_grad))
     170      if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad))
    171171        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
    172172      if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0
     
    175175
    176176      //extract inital constants
    177       double[] c = new double[initialConstants.Length + 2];
    178       {
    179         c[0] = 0.0;
    180         c[1] = 1.0;
    181         Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
    182       }
    183       double[] originalConstants = (double[])c.Clone();
     177      double[] c;
     178      if (applyLinearScaling) {
     179        c = new double[initialConstants.Length + 2];
     180        {
     181          c[0] = 0.0;
     182          c[1] = 1.0;
     183          Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
     184        }
     185      } else {
     186        c = (double[])initialConstants.Clone();
     187      }
     188
    184189      double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    185190
     
    219224        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, null);
    220225        alglib.lsfitresults(state, out retVal, out c, out rep);
    221       }
    222       catch (ArithmeticException) {
     226      } catch (ArithmeticException) {
    223227        return originalQuality;
    224       }
    225       catch (alglib.alglibexception) {
     228      } catch (alglib.alglibexception) {
    226229        return originalQuality;
    227230      }
    228231
    229232      //retVal == -7  => constant optimization failed due to wrong gradient
    230       if (retVal != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
     233      if (retVal != -7) {
     234        if (applyLinearScaling) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
     235        else UpdateConstants(tree, c.ToArray(), updateVariableWeights);
     236      }
    231237      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    232238
    233       if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
     239      if (!updateConstantsInTree) UpdateConstants(tree, initialConstants.ToArray(), updateVariableWeights);
     240
    234241      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
    235         UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
     242        UpdateConstants(tree, initialConstants.ToArray(), updateVariableWeights);
    236243        return originalQuality;
    237244      }
Note: See TracChangeset for help on using the changeset viewer.