Free cookie consent management tool by TermsFeed Policy Generator

Changeset 15515


Ignore:
Timestamp:
12/12/17 16:16:12 (7 years ago)
Author:
mkommend
Message:

#2852: Merged r15447, r15448, r15480, r15481, r15483 into stable.

Location:
stable
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • stable

  • stable/HeuristicLab.Problems.DataAnalysis.Symbolic

  • stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression

  • stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4

  • stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r15406 r15515  
    2727using HeuristicLab.Data;
    2828using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
     29using HeuristicLab.Optimization;
    2930using HeuristicLab.Parameters;
    3031using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     
    4142    private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
    4243
     44    private const string FunctionEvaluationsResultParameterName = "Constants Optimization Function Evaluations";
     45    private const string GradientEvaluationsResultParameterName = "Constants Optimization Gradient Evaluations";
     46    private const string CountEvaluationsParameterName = "Count Function and Gradient Evaluations";
     47
    4348    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
    4449      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
     
    5863    public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
    5964      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
     65    }
     66
     67    public IResultParameter<IntValue> FunctionEvaluationsResultParameter {
     68      get { return (IResultParameter<IntValue>)Parameters[FunctionEvaluationsResultParameterName]; }
     69    }
     70    public IResultParameter<IntValue> GradientEvaluationsResultParameter {
     71      get { return (IResultParameter<IntValue>)Parameters[GradientEvaluationsResultParameterName]; }
     72    }
     73    public IFixedValueParameter<BoolValue> CountEvaluationsParameter {
     74      get { return (IFixedValueParameter<BoolValue>)Parameters[CountEvaluationsParameterName]; }
    6075    }
    6176
     
    8196      get { return UpdateVariableWeightsParameter.Value.Value; }
    8297      set { UpdateVariableWeightsParameter.Value.Value = value; }
     98    }
     99
     100    public bool CountEvaluations {
     101      get { return CountEvaluationsParameter.Value.Value; }
     102      set { CountEvaluationsParameter.Value.Value = value; }
    83103    }
    84104
     
    100120      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
    101121      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)) { Hidden = true });
     122
     123      Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
     124      Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     125      Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
    102126    }
    103127
     
    112136      if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
    113137        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)));
    114     }
    115 
     138
     139      if (!Parameters.ContainsKey(CountEvaluationsParameterName))
     140        Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false)));
     141      if (Parameters.ContainsKey(FunctionEvaluationsResultParameterName) && Parameters.ContainsKey(GradientEvaluationsResultParameterName))
     142        CountEvaluations = true;
     143
     144      if (!Parameters.ContainsKey(FunctionEvaluationsResultParameterName))
     145        Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     146      if (!Parameters.ContainsKey(GradientEvaluationsResultParameterName))
     147        Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue()));
     148    }
     149
     150    private static readonly object locker = new object();
    116151    public override IOperation InstrumentedApply() {
    117152      var solution = SymbolicExpressionTreeParameter.ActualValue;
     
    119154      if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
    120155        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
     156        var counter = new EvaluationsCounter();
    121157        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
    122            constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree);
     158           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter);
    123159
    124160        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
     
    126162          quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
    127163        }
     164
     165        if (CountEvaluations) {
     166          lock (locker) {
     167            FunctionEvaluationsResultParameter.ActualValue.Value += counter.FunctionEvaluations;
     168            GradientEvaluationsResultParameter.ActualValue.Value += counter.GradientEvaluations;
     169          }
     170        }
     171
    128172      } else {
    129173        var evaluationRows = GenerateRowsToEvaluate();
     
    139183      EstimationLimitsParameter.ExecutionContext = context;
    140184      ApplyLinearScalingParameter.ExecutionContext = context;
     185      FunctionEvaluationsResultParameter.ExecutionContext = context;
     186      GradientEvaluationsResultParameter.ExecutionContext = context;
    141187
    142188      // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
     
    148194      EstimationLimitsParameter.ExecutionContext = null;
    149195      ApplyLinearScalingParameter.ExecutionContext = null;
     196      FunctionEvaluationsResultParameter.ExecutionContext = null;
     197      GradientEvaluationsResultParameter.ExecutionContext = null;
    150198
    151199      return r2;
     200    }
     201
     202    public class EvaluationsCounter {
     203      public int FunctionEvaluations = 0;
     204      public int GradientEvaluations = 0;
    152205    }
    153206
     
    156209      int maxIterations, bool updateVariableWeights = true,
    157210      double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue,
    158       bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null) {
     211      bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) {
    159212
    160213      // numeric constants in the tree become variables for constant opt
     
    168221      TreeToAutoDiffTermConverter.ParametricFunction func;
    169222      TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;
    170       if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, out parameters, out initialConstants, out func, out func_grad))
     223      if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad))
    171224        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
    172225      if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0
    173 
    174226      var parameterEntries = parameters.ToArray(); // order of entries must be the same for x
    175227
    176228      //extract inital constants
    177       double[] c = new double[initialConstants.Length + 2];
    178       {
     229      double[] c;
     230      if (applyLinearScaling) {
     231        c = new double[initialConstants.Length + 2];
    179232        c[0] = 0.0;
    180233        c[1] = 1.0;
    181234        Array.Copy(initialConstants, 0, c, 2, initialConstants.Length);
    182       }
    183       double[] originalConstants = (double[])c.Clone();
     235      } else {
     236        c = (double[])initialConstants.Clone();
     237      }
     238
    184239      double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
     240
     241      if (counter == null) counter = new EvaluationsCounter();
     242      var rowEvaluationsCounter = new EvaluationsCounter();
    185243
    186244      alglib.lsfitstate state;
     
    217275        alglib.lsfitsetxrep(state, iterationCallback != null);
    218276        //alglib.lsfitsetgradientcheck(state, 0.001);
    219         alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, null);
     277        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter);
    220278        alglib.lsfitresults(state, out retVal, out c, out rep);
    221       }
    222       catch (ArithmeticException) {
     279      } catch (ArithmeticException) {
    223280        return originalQuality;
    224       }
    225       catch (alglib.alglibexception) {
     281      } catch (alglib.alglibexception) {
    226282        return originalQuality;
    227283      }
    228284
     285      counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n;
     286      counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n;
     287
    229288      //retVal == -7  => constant optimization failed due to wrong gradient
    230       if (retVal != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
     289      if (retVal != -7) {
     290        if (applyLinearScaling) {
     291          var tmp = new double[c.Length - 2];
     292          Array.Copy(c, 2, tmp, 0, tmp.Length);
     293          UpdateConstants(tree, tmp, updateVariableWeights);
     294        } else UpdateConstants(tree, c, updateVariableWeights);
     295      }
    231296      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    232297
    233       if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
     298      if (!updateConstantsInTree) UpdateConstants(tree, initialConstants, updateVariableWeights);
     299
    234300      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
    235         UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
     301        UpdateConstants(tree, initialConstants, updateVariableWeights);
    236302        return originalQuality;
    237303      }
     
    259325      return (double[] c, double[] x, ref double fx, object o) => {
    260326        fx = func(c, x);
     327        var counter = (EvaluationsCounter)o;
     328        counter.FunctionEvaluations++;
    261329      };
    262330    }
     
    264332    private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) {
    265333      return (double[] c, double[] x, ref double fx, double[] grad, object o) => {
    266         var tupel = func_grad(c, x);
    267         fx = tupel.Item2;
    268         Array.Copy(tupel.Item1, grad, grad.Length);
     334        var tuple = func_grad(c, x);
     335        fx = tuple.Item2;
     336        Array.Copy(tuple.Item1, grad, grad.Length);
     337        var counter = (EvaluationsCounter)o;
     338        counter.GradientEvaluations++;
    269339      };
    270340    }
  • stable/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Converters/TreeToAutoDiffTermConverter.cs

    r15145 r15515  
    8787    #endregion
    8888
    89     public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree, bool makeVariableWeightsVariable,
     89    public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree, bool makeVariableWeightsVariable, bool addLinearScalingTerms,
    9090      out List<DataForVariable> parameters, out double[] initialConstants,
    9191      out ParametricFunction func,
     
    9393
    9494      // use a transformator object which holds the state (variable list, parameter list, ...) for recursive transformation of the tree
    95       var transformator = new TreeToAutoDiffTermConverter(makeVariableWeightsVariable);
     95      var transformator = new TreeToAutoDiffTermConverter(makeVariableWeightsVariable, addLinearScalingTerms);
    9696      AutoDiff.Term term;
    9797      try {
     
    120120    private readonly List<AutoDiff.Variable> variables;
    121121    private readonly bool makeVariableWeightsVariable;
    122 
    123     private TreeToAutoDiffTermConverter(bool makeVariableWeightsVariable) {
     122    private readonly bool addLinearScalingTerms;
     123
     124    private TreeToAutoDiffTermConverter(bool makeVariableWeightsVariable, bool addLinearScalingTerms) {
    124125      this.makeVariableWeightsVariable = makeVariableWeightsVariable;
     126      this.addLinearScalingTerms = addLinearScalingTerms;
    125127      this.initialConstants = new List<double>();
    126128      this.parameters = new Dictionary<DataForVariable, AutoDiff.Variable>();
     
    248250      }
    249251      if (node.Symbol is StartSymbol) {
    250         var alpha = new AutoDiff.Variable();
    251         var beta = new AutoDiff.Variable();
    252         variables.Add(beta);
    253         variables.Add(alpha);
    254         return ConvertToAutoDiff(node.GetSubtree(0)) * alpha + beta;
     252        if (addLinearScalingTerms) {
     253          // scaling variables α, β are given at the beginning of the parameter vector
     254          var alpha = new AutoDiff.Variable();
     255          var beta = new AutoDiff.Variable();
     256          variables.Add(beta);
     257          variables.Add(alpha);
     258          var t = ConvertToAutoDiff(node.GetSubtree(0));
     259          return t * alpha + beta;
     260        } else return ConvertToAutoDiff(node.GetSubtree(0));
    255261      }
    256262      throw new ConversionException();
Note: See TracChangeset for help on using the changeset viewer.