Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
10/19/12 15:00:31 (12 years ago)
Author:
mkommend
Message:

#1976: Updated constant optimization evaluator to allow optimization of subtraction and added a parameter stating if constants should be updated in the tree.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r8730 r8823  
    3939    private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
    4040    private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
     41    private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
    4142
    4243    private const string EvaluatedTreesResultName = "EvaluatedTrees";
     
    6263      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
    6364    }
     65    public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
     66      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
     67    }
    6468
    6569    public IntValue ConstantOptimizationIterations {
     
    7478    public PercentValue ConstantOptimizationRowsPercentage {
    7579      get { return ConstantOptimizationRowsPercentageParameter.Value; }
     80    }
     81    public bool UpdateConstantsInTree {
     82      get { return UpdateConstantsInTreeParameter.Value.Value; }
     83      set { UpdateConstantsInTreeParameter.Value.Value = value; }
    7684    }
    7785
     
    9199      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
    92100      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
     101      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
    93102
    94103      Parameters.Add(new LookupParameter<IntValue>(EvaluatedTreesResultName));
     
    98107    public override IDeepCloneable Clone(Cloner cloner) {
    99108      return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner);
     109    }
     110
     111    [StorableHook(HookType.AfterDeserialization)]
     112    private void AfterDeserialization() {
     113      if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
     114        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
    100115    }
    101116
     
    108123        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
    109124           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value,
    110            EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower,
     125           EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, UpdateConstantsInTree,
    111126          EvaluatedTreesParameter.ActualValue, EvaluatedTreeNodesParameter.ActualValue);
    112127        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
     
    157172    }
    158173
     174    #region derivations of functions
    159175    // create function factory for arctangent
    160176    private readonly Func<Term, UnaryFunc> arctan = UnaryFunc.Factory(
    161         x => Math.Atan(x),      // evaluate
    162         x => 1 / (1 + x * x));  // derivative of atan
    163 
     177      eval: Math.Atan,
     178      diff: x => 1 / (1 + x * x));
    164179    private static readonly Func<Term, UnaryFunc> sin = UnaryFunc.Factory(
    165       x => Math.Sin(x),
    166       x => Math.Cos(x));
     180      eval: Math.Sin,
     181      diff: Math.Cos);
    167182    private static readonly Func<Term, UnaryFunc> cos = UnaryFunc.Factory(
    168       x => Math.Cos(x),
    169       x => -Math.Sin(x));
     183       eval: Math.Cos,
     184       diff: x => -Math.Sin(x));
    170185    private static readonly Func<Term, UnaryFunc> tan = UnaryFunc.Factory(
    171       x => Math.Tan(x),
    172       x => 1 + Math.Tan(x) * Math.Tan(x));
     186      eval: Math.Tan,
     187      diff: x => 1 + Math.Tan(x) * Math.Tan(x));
    173188    private static readonly Func<Term, UnaryFunc> square = UnaryFunc.Factory(
    174       x => x * x,
    175       x => 2 * x);
     189       eval: x => x * x,
     190       diff: x => 2 * x);
    176191    private static readonly Func<Term, UnaryFunc> erf = UnaryFunc.Factory(
    177       x => alglib.errorfunction(x),
    178       x => 2.0 * Math.Exp(-(x * x)) / Math.Sqrt(Math.PI));
    179 
     192      eval: alglib.errorfunction,
     193      diff: x => 2.0 * Math.Exp(-(x * x)) / Math.Sqrt(Math.PI));
    180194    private static readonly Func<Term, UnaryFunc> norm = UnaryFunc.Factory(
    181       x => alglib.normaldistribution(x),
    182       x => -(Math.Exp(-(x * x)) * Math.Sqrt(Math.Exp(x * x)) * x) / Math.Sqrt(2 * Math.PI)
    183       );
     195      eval: alglib.normaldistribution,
     196      diff: x => -(Math.Exp(-(x * x)) * Math.Sqrt(Math.Exp(x * x)) * x) / Math.Sqrt(2 * Math.PI));
     197    #endregion
    184198
    185199
    186200    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData,
    187       IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) {
     201      IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, bool updateConstantsInTree = true, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) {
    188202
    189203      List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>();
     
    249263        return 0.0;
    250264      }
     265      var newTree = tree;
     266      if (!updateConstantsInTree) newTree = (ISymbolicExpressionTree)tree.Clone();
    251267      {
    252268        // only when no error occurred
    253269        // set constants in tree
    254270        int i = 2;
    255         foreach (var node in terminalNodes) {
     271        foreach (var node in newTree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
    256272          ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
    257273          VariableTreeNode variableTreeNode = node as VariableTreeNode;
     
    261277            variableTreeNode.Weight = c[i++];
    262278        }
    263       }
    264 
    265       return SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
     279
     280      }
     281      return SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, newTree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    266282    }
    267283
     
    310326            return false;
    311327          }
     328          terms.Add(t);
     329        }
     330        term = AutoDiff.TermBuilder.Sum(terms);
     331        return true;
     332      }
     333      if (node.Symbol is Subtraction) {
     334        List<AutoDiff.Term> terms = new List<Term>();
     335        for (int i = 0; i < node.SubtreeCount; i++) {
     336          AutoDiff.Term t;
     337          if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) {
     338            term = null;
     339            return false;
     340          }
     341          if (i > 0) t = -t;
    312342          terms.Add(t);
    313343        }
Note: See TracChangeset for help on using the changeset viewer.