Free cookie consent management tool by TermsFeed Policy Generator

Changeset 15447 for trunk


Ignore:
Timestamp:
11/03/17 15:28:23 (6 years ago)
Author:
mkommend
Message:

#2852: Adapted constants optimization and auto diff converter to not add linear scaling terms.

Location:
trunk/sources
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r15371 r15447  
    168168      TreeToAutoDiffTermConverter.ParametricFunction func;
    169169      TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
    170       if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, out parameters, out initialConstants, out func, out func_grad))
     170      if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad))
    171171        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
    172172      if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0
     
    175175
    176176      //extract inital constants
    177       double[] c = new double[initialConstants.Length + 2];
    178       {
    179         c[0] = 0.0;
    180         c[1] = 1.0;
    181         Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
    182       }
    183       double[] originalConstants = (double[])c.Clone();
     177      double[] c;
     178      if (applyLinearScaling) {
     179        c = new double[initialConstants.Length + 2];
     180        {
     181          c[0] = 0.0;
     182          c[1] = 1.0;
     183          Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
     184        }
     185      } else {
     186        c = (double[])initialConstants.Clone();
     187      }
     188
    184189      double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    185190
     
    219224        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, null);
    220225        alglib.lsfitresults(state, out retVal, out c, out rep);
    221       }
    222       catch (ArithmeticException) {
     226      } catch (ArithmeticException) {
    223227        return originalQuality;
    224       }
    225       catch (alglib.alglibexception) {
     228      } catch (alglib.alglibexception) {
    226229        return originalQuality;
    227230      }
    228231
    229232      //retVal == -7  => constant optimization failed due to wrong gradient
    230       if (retVal != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
     233      if (retVal != -7) {
     234        if (applyLinearScaling) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
     235        else UpdateConstants(tree, c.ToArray(), updateVariableWeights);
     236      }
    231237      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    232238
    233       if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
     239      if (!updateConstantsInTree) UpdateConstants(tree, initialConstants.ToArray(), updateVariableWeights);
     240
    234241      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
    235         UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
     242        UpdateConstants(tree, initialConstants.ToArray(), updateVariableWeights);
    236243        return originalQuality;
    237244      }
  • trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Converters/TreeToAutoDiffTermConverter.cs

    r14950 r15447  
    8787    #endregion
    8888
    89     public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree, bool makeVariableWeightsVariable,
     89    public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree, bool makeVariableWeightsVariable, bool addLinearScalingTerms,
    9090      out List<DataForVariable> parameters, out double[] initialConstants,
    9191      out ParametricFunction func,
     
    9393
    9494      // use a transformator object which holds the state (variable list, parameter list, ...) for recursive transformation of the tree
    95       var transformator = new TreeToAutoDiffTermConverter(makeVariableWeightsVariable);
     95      var transformator = new TreeToAutoDiffTermConverter(makeVariableWeightsVariable, addLinearScalingTerms);
    9696      AutoDiff.Term term;
    9797      try {
     
    120120    private readonly List<AutoDiff.Variable> variables;
    121121    private readonly bool makeVariableWeightsVariable;
    122 
    123     private TreeToAutoDiffTermConverter(bool makeVariableWeightsVariable) {
     122    private readonly bool addLinearScalingTerms;
     123
     124    private TreeToAutoDiffTermConverter(bool makeVariableWeightsVariable, bool addLinearScalingTerms) {
    124125      this.makeVariableWeightsVariable = makeVariableWeightsVariable;
     126      this.addLinearScalingTerms = addLinearScalingTerms;
    125127      this.initialConstants = new List<double>();
    126128      this.parameters = new Dictionary<DataForVariable, AutoDiff.Variable>();
     
    248250      }
    249251      if (node.Symbol is StartSymbol) {
    250         var alpha = new AutoDiff.Variable();
    251         var beta = new AutoDiff.Variable();
    252         variables.Add(beta);
    253         variables.Add(alpha);
    254         return ConvertToAutoDiff(node.GetSubtree(0)) * alpha + beta;
     252        if (addLinearScalingTerms) {
     253          var alpha = new AutoDiff.Variable();
     254          var beta = new AutoDiff.Variable();
     255          variables.Add(beta);
     256          variables.Add(alpha);
     257          return ConvertToAutoDiff(node.GetSubtree(0)) * alpha + beta;
     258        } else return ConvertToAutoDiff(node.GetSubtree(0));
    255259      }
    256260      throw new ConversionException();
Note: See TracChangeset for help on using the changeset viewer.