Ignore:
Timestamp:
11/20/17 20:36:29 (4 years ago)
Author:
gkronber
Message:

#2852: moved the scaling parameters to the end of the parameter vector to be able to remove the c.Skip(2).ToArray() call and removed unnecessary .ToArray() calls

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r15448 r15480  
    214214        c = new double[initialConstants.Length + 2];
    215215        {
    216           c[0] = 0.0;
    217           c[1] = 1.0;
    218           Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
     216          Array.Copy(initialConstants, 0, c, 0, initialConstants.Length);
     217          c[c.Length - 2] = 0.0;
     218          c[c.Length - 1] = 1.0;
    219219        }
    220220      } else {
     
    273273      //retVal == -7  => constant optimization failed due to wrong gradient
    274274      if (retVal != -7) {
    275         if (applyLinearScaling) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
    276         else UpdateConstants(tree, c.ToArray(), updateVariableWeights);
     275        if (applyLinearScaling) UpdateConstants(tree, c, updateVariableWeights);
     276        else UpdateConstants(tree, c, updateVariableWeights);
    277277      }
    278278      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    279279
    280       if (!updateConstantsInTree) UpdateConstants(tree, initialConstants.ToArray(), updateVariableWeights);
     280      if (!updateConstantsInTree) UpdateConstants(tree, initialConstants, updateVariableWeights);
    281281
    282282      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
    283         UpdateConstants(tree, initialConstants.ToArray(), updateVariableWeights);
     283        UpdateConstants(tree, initialConstants, updateVariableWeights);
    284284        return originalQuality;
    285285      }
     
    314314    private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) {
    315315      return (double[] c, double[] x, ref double fx, double[] grad, object o) => {
    316         var tupel = func_grad(c, x);
    317         fx = tupel.Item2;
    318         Array.Copy(tupel.Item1, grad, grad.Length);
     316        var tuple = func_grad(c, x);
     317        fx = tuple.Item2;
     318        Array.Copy(tuple.Item1, grad, grad.Length);
    319319        var counter = (EvaluationsCounter)o;
    320320        counter.GradientEvaluations++;
Note: See TracChangeset for help on using the changeset viewer.