Free cookie consent management tool by TermsFeed Policy Generator

Changeset 15448


Ignore:
Timestamp:
11/03/17 17:07:58 (6 years ago)
Author:
mkommend
Message:

#2852: Added counts of function and gradient evaluations by constants optimization as results.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r15447 r15448  
    2727using HeuristicLab.Data;
    2828using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
     29using HeuristicLab.Optimization;
    2930using HeuristicLab.Parameters;
    3031using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     
    4142    private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
    4243
     44    private const string FunctionEvaluationsResultParameterName = "Constants Optimization Function Evaluations";
     45    private const string GradientEvaluationsResultParameterName = "Constants Optimization Gradient Evaluations";
     46
    4347    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
    4448      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
     
    5862    public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
    5963      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
     64    }
     65
     66    public IResultParameter<IntValue> FunctionEvaluationsResultParameter {
     67      get { return (IResultParameter<IntValue>)Parameters[FunctionEvaluationsResultParameterName]; }
     68    }
     69    public IResultParameter<IntValue> GradientEvaluationsResultParameter {
     70      get { return (IResultParameter<IntValue>)Parameters[GradientEvaluationsResultParameterName]; }
    6071    }
    6172
     
    100111      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
    101112      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)) { Hidden = true });
     113
     114      Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     115      Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
    102116    }
    103117
     
    112126      if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
    113127        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)));
    114     }
    115 
     128
     129      if (!Parameters.ContainsKey(FunctionEvaluationsResultParameterName))
     130        Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     131      if (!Parameters.ContainsKey(GradientEvaluationsResultParameterName))
     132        Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     133    }
     134
     135    private static readonly object locker = new object();
    116136    public override IOperation InstrumentedApply() {
    117137      var solution = SymbolicExpressionTreeParameter.ActualValue;
     
    119139      if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
    120140        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
     141        var counter = new EvaluationsCounter();
    121142        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
    122            constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree);
     143           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter);
    123144
    124145        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
     
    126147          quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
    127148        }
     149
     150        lock (locker) {
     151          FunctionEvaluationsResultParameter.ActualValue.Value += counter.FunctionEvaluations;
     152          GradientEvaluationsResultParameter.ActualValue.Value += counter.GradientEvaluations;
     153        }
     154
    128155      } else {
    129156        var evaluationRows = GenerateRowsToEvaluate();
     
    139166      EstimationLimitsParameter.ExecutionContext = context;
    140167      ApplyLinearScalingParameter.ExecutionContext = context;
     168      FunctionEvaluationsResultParameter.ExecutionContext = context;
     169      GradientEvaluationsResultParameter.ExecutionContext = context;
    141170
    142171      // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
     
    148177      EstimationLimitsParameter.ExecutionContext = null;
    149178      ApplyLinearScalingParameter.ExecutionContext = null;
     179      FunctionEvaluationsResultParameter.ExecutionContext = null;
     180      GradientEvaluationsResultParameter.ExecutionContext = null;
    150181
    151182      return r2;
     183    }
     184
     185    public class EvaluationsCounter {
     186      public int FunctionEvaluations = 0;
     187      public int GradientEvaluations = 0;
    152188    }
    153189
     
    156192      int maxIterations, bool updateVariableWeights = true,
    157193      double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
    158       bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null) {
     194      bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
    159195
    160196      // numeric constants in the tree become variables for constant opt
     
    171207        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
    172208      if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0
    173 
    174209      var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
    175210
     
    188223
    189224      double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
     225
     226      if (counter == null) counter = new EvaluationsCounter();
     227      var rowEvaluationsCounter = new EvaluationsCounter();
    190228
    191229      alglib.lsfitstate state;
     
    222260        alglib.lsfitsetxrep(state, iterationCallback != null);
    223261        //alglib.lsfitsetgradientcheck(state, 0.001);
    224         alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, null);
     262        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter);
    225263        alglib.lsfitresults(state, out retVal, out c, out rep);
    226264      } catch (ArithmeticException) {
     
    229267        return originalQuality;
    230268      }
     269
     270      counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n;
     271      counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;
    231272
    232273      //retVal == -7  => constant optimization failed due to wrong gradient
     
    266307      return (double[] c, double[] x, ref double fx, object o) => {
    267308        fx = func(c, x);
     309        var counter = (EvaluationsCounter)o;
     310        counter.FunctionEvaluations++;
    268311      };
    269312    }
     
    274317        fx = tupel.Item2;
    275318        Array.Copy(tupel.Item1, grad, grad.Length);
     319        var counter = (EvaluationsCounter)o;
     320        counter.GradientEvaluations++;
    276321      };
    277322    }
Note: See TracChangeset for help on using the changeset viewer.