Free cookie consent management tool by TermsFeed Policy Generator

Changeset 16073


Ignore:
Timestamp:
08/13/18 08:58:58 (6 years ago)
Author:
bburlacu
Message:

#2886: Implement restarts for constant optimization in the RSquaredEvaluator

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/2886_SymRegGrammarEnumeration/HeuristicLab.Algorithms.DataAnalysis.SymRegGrammarEnumeration/GrammarEnumeration/RSquaredEvaluator.cs

    r16053 r16073  
    2020#endregion
    2121
     22using System.Linq;
    2223using HeuristicLab.Common;
    2324using HeuristicLab.Core;
     
    2930using HeuristicLab.Problems.DataAnalysis.Symbolic;
    3031using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
     32using HeuristicLab.Random;
    3133
    3234namespace HeuristicLab.Algorithms.DataAnalysis.SymRegGrammarEnumeration {
     
    3739    private readonly string ApplyLinearScalingParameterName = "Apply Linear Scaling";
    3840    private readonly string ConstantOptimizationIterationsParameterName = "Constant Optimization Iterations";
     41    private readonly string RestartsParameterName = "Restarts";
     42    private readonly string SeedParameterName = "Seed"; // seed for the random number generator
     43
     44    private readonly MersenneTwister random = new MersenneTwister();
    3945
    4046    #region parameter properties
     
    4955    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
    5056      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
     57    }
     58
     59    private IFixedValueParameter<IntValue> RestartsParameter {
     60      get { return (IFixedValueParameter<IntValue>)Parameters[RestartsParameterName]; }
     61    }
     62
     63    private int Restarts {
     64      get { return RestartsParameter.Value.Value; }
     65      set { RestartsParameter.Value.Value = value; }
    5166    }
    5267
     
    7287      Parameters.Add(new FixedValueParameter<BoolValue>(OptimizeConstantsParameterName, "Run constant optimization in sentence evaluation.", new BoolValue(false)));
    7388      Parameters.Add(new FixedValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Apply linear scaling on the tree model during evaluation.", new BoolValue(false)));
    74       Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, new IntValue(10)));
     89      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Number of gradient descent iterations.", new IntValue(10)));
     90      Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "Number of restarts for gradient descent.", new IntValue(10)));
     91
     92      var seedParameter = new FixedValueParameter<IntValue>(SeedParameterName, "Seed value for random restarts.", new IntValue(0));
     93      seedParameter.Value.ValueChanged += (sender, args) => random.Seed((uint)seedParameter.Value.Value);
     94      random.Seed(0u);
     95
     96      Parameters.Add(seedParameter);
    7597    }
    7698
     
    87109    public double Evaluate(IRegressionProblemData problemData, Grammar grammar, SymbolList sentence) {
    88110      var tree = grammar.ParseSymbolicExpressionTree(sentence);
    89       return Evaluate(problemData, tree, OptimizeConstants, ConstantOptimizationIterations, ApplyLinearScaling);
     111      return Evaluate(problemData, tree);
    90112    }
    91113
    92114    public double Evaluate(IRegressionProblemData problemData, ISymbolicExpressionTree tree) {
    93       return Evaluate(problemData, tree, OptimizeConstants, ConstantOptimizationIterations, ApplyLinearScaling);
     115      return Evaluate(problemData, tree, random, OptimizeConstants, ConstantOptimizationIterations, ApplyLinearScaling, Restarts);
    94116    }
    95117
    96     public static double Evaluate(IRegressionProblemData problemData, ISymbolicExpressionTree tree, bool optimizeConstants = true, int maxIterations = 10, bool applyLinearScaling = false) {
    97       double r2;
    98 
    99       // TODO: Initialize constant values randomly
    100       // TODO: Restarts
    101       if (optimizeConstants) {
    102         r2 = SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(expressionTreeLinearInterpreter,
    103           tree,
    104           problemData,
    105           problemData.TrainingIndices,
    106           applyLinearScaling: applyLinearScaling,
    107           maxIterations: maxIterations,
    108           updateVariableWeights: false,
    109           updateConstantsInTree: true);
    110 
    111         foreach (var symbolicExpressionTreeNode in tree.IterateNodesPostfix()) {
    112           ConstantTreeNode constTreeNode = symbolicExpressionTreeNode as ConstantTreeNode;
    113           if (constTreeNode != null && constTreeNode.Value.IsAlmost(0.0)) {
    114             constTreeNode.Value = 0.0;
    115           }
    116         }
    117       } else {
    118         r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(expressionTreeLinearInterpreter,
     118    public static double Evaluate(IRegressionProblemData problemData, ISymbolicExpressionTree tree, IRandom random, bool optimizeConstants = true, int maxIterations = 10, bool applyLinearScaling = false, int restarts = 1) {
     119      // we begin with an evaluation without constant optimization (relatively small speed penalty compared to const opt)
     120      // this value will be used as a baseline to decide if an improvement was achieved via const opt
     121      double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(expressionTreeLinearInterpreter,
    119122          tree,
    120123          double.MinValue,
     
    123126          problemData.TrainingIndices,
    124127          applyLinearScaling: applyLinearScaling);
     128
     129      // restart const opt and try to obtain an improved r2 value
     130      if (optimizeConstants) {
     131        int count = 0;
     132        double optimized = r2;
     133        do {
     134          foreach (var constantNode in tree.IterateNodesPrefix().OfType<ConstantTreeNode>()) {
     135            constantNode.ResetLocalParameters(random);
     136          }
     137
     138          optimized = SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(
     139            expressionTreeLinearInterpreter,
     140            tree,
     141            problemData,
     142            problemData.TrainingIndices,
     143            applyLinearScaling,
     144            maxIterations,
     145            false,
     146            double.MinValue,
     147            double.MaxValue,
     148            true);
     149        } while (optimized <= r2 && ++count < restarts);
     150
     151        // do not update constants if quality is not improved
     152        if (optimized > r2) {
     153          r2 = optimized;
     154
     155          // is this code really necessary ?
     156          foreach (var symbolicExpressionTreeNode in tree.IterateNodesPostfix()) {
     157            ConstantTreeNode constTreeNode = symbolicExpressionTreeNode as ConstantTreeNode;
     158            if (constTreeNode != null && constTreeNode.Value.IsAlmost(0.0)) {
     159              constTreeNode.Value = 0.0;
     160            }
     161          }
     162        }
    125163      }
    126       return double.IsNaN(r2) ? 0.0 : r2;
     164      return double.IsNaN(r2) || double.IsInfinity(r2) ? 0.0 : r2;
    127165    }
    128166  }
Note: See TracChangeset for help on using the changeset viewer.