Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
04/16/13 13:13:41 (12 years ago)
Author:
spimming
Message:

#1888:

  • Merged revisions from trunk
Location:
branches/OaaS
Files:
14 edited
1 copied

Legend:

Unmodified
Added
Removed
  • branches/OaaS

  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression

  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4

    • Property svn:ignore
      •  

        old new  
         1*.user
         2Plugin.cs
        13bin
        2 *.user
        3 HeuristicLabProblemsDataAnalysisSymbolicRegressionPlugin.cs
        44obj
        5 *.vs10x
        6 Plugin.cs
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r8053 r9363  
    2020#endregion
    2121
     22using System;
    2223using System.Collections.Generic;
    2324using System.Linq;
     25using AutoDiff;
    2426using HeuristicLab.Common;
    2527using HeuristicLab.Core;
     
    3739    private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
    3840    private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
    39 
    40     private const string EvaluatedTreesResultName = "EvaluatedTrees";
    41     private const string EvaluatedTreeNodesResultName = "EvaluatedTreeNodes";
    42 
    43     public ILookupParameter<IntValue> EvaluatedTreesParameter {
    44       get { return (ILookupParameter<IntValue>)Parameters[EvaluatedTreesResultName]; }
    45     }
    46     public ILookupParameter<IntValue> EvaluatedTreeNodesParameter {
    47       get { return (ILookupParameter<IntValue>)Parameters[EvaluatedTreeNodesResultName]; }
    48     }
     41    private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
    4942
    5043    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
     
    6053      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
    6154    }
     55    public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
     56      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
     57    }
    6258
    6359    public IntValue ConstantOptimizationIterations {
     
    7268    public PercentValue ConstantOptimizationRowsPercentage {
    7369      get { return ConstantOptimizationRowsPercentageParameter.Value; }
     70    }
     71    public bool UpdateConstantsInTree {
     72      get { return UpdateConstantsInTreeParameter.Value.Value; }
     73      set { UpdateConstantsInTreeParameter.Value.Value = value; }
    7474    }
    7575
     
    8585    public SymbolicRegressionConstantOptimizationEvaluator()
    8686      : base() {
    87       Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(3), true));
     87      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10), true));
    8888      Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true));
    8989      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
    9090      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
    91 
    92       Parameters.Add(new LookupParameter<IntValue>(EvaluatedTreesResultName));
    93       Parameters.Add(new LookupParameter<IntValue>(EvaluatedTreeNodesResultName));
     91      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
    9492    }
    9593
     
    9896    }
    9997
     98    [StorableHook(HookType.AfterDeserialization)]
     99    private void AfterDeserialization() {
     100      if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
     101        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
     102    }
     103
    100104    public override IOperation Apply() {
    101       AddResults();
    102       int seed = RandomParameter.ActualValue.Next();
    103105      var solution = SymbolicExpressionTreeParameter.ActualValue;
    104106      double quality;
     
    106108        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
    107109        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
    108            constantOptimizationRows, ConstantOptimizationImprovement.Value, ConstantOptimizationIterations.Value, 0.001,
    109            EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower,
    110           EvaluatedTreesParameter.ActualValue, EvaluatedTreeNodesParameter.ActualValue);
     110           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value,
     111           EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, UpdateConstantsInTree);
     112
    111113        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
    112114          var evaluationRows = GenerateRowsToEvaluate();
    113           quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows);
     115          quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
    114116        }
    115117      } else {
    116118        var evaluationRows = GenerateRowsToEvaluate();
    117         quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows);
     119        quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
    118120      }
    119121      QualityParameter.ActualValue = new DoubleValue(quality);
    120       EvaluatedTreesParameter.ActualValue.Value += 1;
    121       EvaluatedTreeNodesParameter.ActualValue.Value += solution.Length;
    122122
    123123      if (Successor != null)
     
    127127    }
    128128
    129     private void AddResults() {
    130       if (EvaluatedTreesParameter.ActualValue == null) {
    131         var scope = ExecutionContext.Scope;
    132         while (scope.Parent != null)
    133           scope = scope.Parent;
    134         scope.Variables.Add(new Core.Variable(EvaluatedTreesResultName, new IntValue()));
    135       }
    136       if (EvaluatedTreeNodesParameter.ActualValue == null) {
    137         var scope = ExecutionContext.Scope;
    138         while (scope.Parent != null)
    139           scope = scope.Parent;
    140         scope.Variables.Add(new Core.Variable(EvaluatedTreeNodesResultName, new IntValue()));
    141       }
    142     }
    143 
    144129    public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
    145130      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
    146131      EstimationLimitsParameter.ExecutionContext = context;
    147 
    148       double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows);
     132      ApplyLinearScalingParameter.ExecutionContext = context;
     133
     134      // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
     135      // because Evaluate() is used to get the quality of evolved models on
     136      // different partitions of the dataset (e.g., best validation model)
     137      double r2 = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
    149138
    150139      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
    151140      EstimationLimitsParameter.ExecutionContext = null;
     141      ApplyLinearScalingParameter.ExecutionContext = null;
    152142
    153143      return r2;
    154144    }
    155145
     146    #region derivations of functions
     147    // create function factory for arctangent
     148    private readonly Func<Term, UnaryFunc> arctan = UnaryFunc.Factory(
     149      eval: Math.Atan,
     150      diff: x => 1 / (1 + x * x));
     151    private static readonly Func<Term, UnaryFunc> sin = UnaryFunc.Factory(
     152      eval: Math.Sin,
     153      diff: Math.Cos);
     154    private static readonly Func<Term, UnaryFunc> cos = UnaryFunc.Factory(
     155       eval: Math.Cos,
     156       diff: x => -Math.Sin(x));
     157    private static readonly Func<Term, UnaryFunc> tan = UnaryFunc.Factory(
     158      eval: Math.Tan,
     159      diff: x => 1 + Math.Tan(x) * Math.Tan(x));
     160    private static readonly Func<Term, UnaryFunc> square = UnaryFunc.Factory(
     161       eval: x => x * x,
     162       diff: x => 2 * x);
     163    private static readonly Func<Term, UnaryFunc> erf = UnaryFunc.Factory(
     164      eval: alglib.errorfunction,
     165      diff: x => 2.0 * Math.Exp(-(x * x)) / Math.Sqrt(Math.PI));
     166    private static readonly Func<Term, UnaryFunc> norm = UnaryFunc.Factory(
     167      eval: alglib.normaldistribution,
     168      diff: x => -(Math.Exp(-(x * x)) * Math.Sqrt(Math.Exp(x * x)) * x) / Math.Sqrt(2 * Math.PI));
     169    #endregion
     170
     171
    156172    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData,
    157       IEnumerable<int> rows, double improvement, int iterations, double differentialStep, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) {
     173      IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, bool updateConstantsInTree = true) {
     174
     175      List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>();
     176      List<AutoDiff.Variable> parameters = new List<AutoDiff.Variable>();
     177      List<string> variableNames = new List<string>();
     178
     179      AutoDiff.Term func;
     180      if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, out func))
     181        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
     182      if (variableNames.Count == 0) return 0.0;
     183
     184      AutoDiff.IParametricCompiledTerm compiledFunc = AutoDiff.TermUtils.Compile(func, variables.ToArray(), parameters.ToArray());
     185
    158186      List<SymbolicExpressionTreeTerminalNode> terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList();
    159       double[] c = new double[terminalNodes.Count];
    160       int treeLength = tree.Length;
    161 
    162       //extract inital constants
    163       for (int i = 0; i < terminalNodes.Count; i++) {
    164         ConstantTreeNode constantTreeNode = terminalNodes[i] as ConstantTreeNode;
    165         if (constantTreeNode != null) c[i] = constantTreeNode.Value;
    166         VariableTreeNode variableTreeNode = terminalNodes[i] as VariableTreeNode;
    167         if (variableTreeNode != null) c[i] = variableTreeNode.Weight;
    168       }
    169 
    170       double epsg = 0;
    171       double epsf = improvement;
    172       double epsx = 0;
    173       int maxits = iterations;
    174       double diffstep = differentialStep;
    175 
    176       alglib.minlmstate state;
    177       alglib.minlmreport report;
    178 
    179       alglib.minlmcreatev(1, c, diffstep, out state);
    180       alglib.minlmsetcond(state, epsg, epsf, epsx, maxits);
    181       alglib.minlmoptimize(state, CreateCallBack(interpreter, tree, problemData, rows, upperEstimationLimit, lowerEstimationLimit, treeLength, evaluatedTrees, evaluatedTreeNodes), null, terminalNodes);
    182       alglib.minlmresults(state, out c, out report);
    183 
    184       for (int i = 0; i < c.Length; i++) {
    185         ConstantTreeNode constantTreeNode = terminalNodes[i] as ConstantTreeNode;
    186         if (constantTreeNode != null) constantTreeNode.Value = c[i];
    187         VariableTreeNode variableTreeNode = terminalNodes[i] as VariableTreeNode;
    188         if (variableTreeNode != null) variableTreeNode.Weight = c[i];
    189       }
    190 
    191       return (state.fi[0] - 1) * -1;
    192     }
    193 
    194     private static alglib.ndimensional_fvec CreateCallBack(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, double upperEstimationLimit, double lowerEstimationLimit, int treeLength, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) {
    195       return (double[] arg, double[] fi, object obj) => {
    196         // update constants of tree
    197         List<SymbolicExpressionTreeTerminalNode> terminalNodes = (List<SymbolicExpressionTreeTerminalNode>)obj;
    198         for (int i = 0; i < terminalNodes.Count; i++) {
    199           ConstantTreeNode constantTreeNode = terminalNodes[i] as ConstantTreeNode;
    200           if (constantTreeNode != null) constantTreeNode.Value = arg[i];
    201           VariableTreeNode variableTreeNode = terminalNodes[i] as VariableTreeNode;
    202           if (variableTreeNode != null) variableTreeNode.Weight = arg[i];
    203         }
    204 
    205         double quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows);
    206 
    207         fi[0] = 1 - quality;
    208         if (evaluatedTrees != null) evaluatedTrees.Value++;
    209         if (evaluatedTreeNodes != null) evaluatedTreeNodes.Value += treeLength;
     187      double[] c = new double[variables.Count];
     188
     189      {
     190        c[0] = 0.0;
     191        c[1] = 1.0;
     192        //extract inital constants
     193        int i = 2;
     194        foreach (var node in terminalNodes) {
     195          ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
     196          VariableTreeNode variableTreeNode = node as VariableTreeNode;
     197          if (constantTreeNode != null)
     198            c[i++] = constantTreeNode.Value;
     199          else if (variableTreeNode != null)
     200            c[i++] = variableTreeNode.Weight;
     201        }
     202      }
     203      double[] originalConstants = (double[])c.Clone();
     204      double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
     205
     206      alglib.lsfitstate state;
     207      alglib.lsfitreport rep;
     208      int info;
     209
     210      Dataset ds = problemData.Dataset;
     211      double[,] x = new double[rows.Count(), variableNames.Count];
     212      int row = 0;
     213      foreach (var r in rows) {
     214        for (int col = 0; col < variableNames.Count; col++) {
     215          x[row, col] = ds.GetDoubleValue(variableNames[col], r);
     216        }
     217        row++;
     218      }
     219      double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
     220      int n = x.GetLength(0);
     221      int m = x.GetLength(1);
     222      int k = c.Length;
     223
     224      alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(compiledFunc);
     225      alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(compiledFunc);
     226
     227      try {
     228        alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
     229        alglib.lsfitsetcond(state, 0.0, 0.0, maxIterations);
     230        //alglib.lsfitsetgradientcheck(state, 0.001);
     231        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, null, null);
     232        alglib.lsfitresults(state, out info, out c, out rep);
     233      }
     234      catch (ArithmeticException) {
     235        return originalQuality;
     236      }
     237      catch (alglib.alglibexception) {
     238        return originalQuality;
     239      }
     240
     241      //info == -7  => constant optimization failed due to wrong gradient
     242      if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray());
     243      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
     244
     245      if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray());
     246      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
     247        UpdateConstants(tree, originalConstants.Skip(2).ToArray());
     248        return originalQuality;
     249      }
     250      return quality;
     251    }
     252
     253    private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants) {
     254      int i = 0;
     255      foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
     256        ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
     257        VariableTreeNode variableTreeNode = node as VariableTreeNode;
     258        if (constantTreeNode != null)
     259          constantTreeNode.Value = constants[i++];
     260        else if (variableTreeNode != null)
     261          variableTreeNode.Weight = constants[i++];
     262      }
     263    }
     264
     265    private static alglib.ndimensional_pfunc CreatePFunc(AutoDiff.IParametricCompiledTerm compiledFunc) {
     266      return (double[] c, double[] x, ref double func, object o) => {
     267        func = compiledFunc.Evaluate(c, x);
    210268      };
    211269    }
    212270
     271    private static alglib.ndimensional_pgrad CreatePGrad(AutoDiff.IParametricCompiledTerm compiledFunc) {
     272      return (double[] c, double[] x, ref double func, double[] grad, object o) => {
     273        var tupel = compiledFunc.Differentiate(c, x);
     274        func = tupel.Item2;
     275        Array.Copy(tupel.Item1, grad, grad.Length);
     276      };
     277    }
     278
     279    private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, out AutoDiff.Term term) {
     280      if (node.Symbol is Constant) {
     281        var var = new AutoDiff.Variable();
     282        variables.Add(var);
     283        term = var;
     284        return true;
     285      }
     286      if (node.Symbol is Variable) {
     287        var varNode = node as VariableTreeNode;
     288        var par = new AutoDiff.Variable();
     289        parameters.Add(par);
     290        variableNames.Add(varNode.VariableName);
     291        var w = new AutoDiff.Variable();
     292        variables.Add(w);
     293        term = AutoDiff.TermBuilder.Product(w, par);
     294        return true;
     295      }
     296      if (node.Symbol is Addition) {
     297        List<AutoDiff.Term> terms = new List<Term>();
     298        foreach (var subTree in node.Subtrees) {
     299          AutoDiff.Term t;
     300          if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out t)) {
     301            term = null;
     302            return false;
     303          }
     304          terms.Add(t);
     305        }
     306        term = AutoDiff.TermBuilder.Sum(terms);
     307        return true;
     308      }
     309      if (node.Symbol is Subtraction) {
     310        List<AutoDiff.Term> terms = new List<Term>();
     311        for (int i = 0; i < node.SubtreeCount; i++) {
     312          AutoDiff.Term t;
     313          if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) {
     314            term = null;
     315            return false;
     316          }
     317          if (i > 0) t = -t;
     318          terms.Add(t);
     319        }
     320        term = AutoDiff.TermBuilder.Sum(terms);
     321        return true;
     322      }
     323      if (node.Symbol is Multiplication) {
     324        AutoDiff.Term a, b;
     325        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||
     326          !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {
     327          term = null;
     328          return false;
     329        } else {
     330          List<AutoDiff.Term> factors = new List<Term>();
     331          foreach (var subTree in node.Subtrees.Skip(2)) {
     332            AutoDiff.Term f;
     333            if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {
     334              term = null;
     335              return false;
     336            }
     337            factors.Add(f);
     338          }
     339          term = AutoDiff.TermBuilder.Product(a, b, factors.ToArray());
     340          return true;
     341        }
     342      }
     343      if (node.Symbol is Division) {
     344        // only works for at least two subtrees
     345        AutoDiff.Term a, b;
     346        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||
     347          !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {
     348          term = null;
     349          return false;
     350        } else {
     351          List<AutoDiff.Term> factors = new List<Term>();
     352          foreach (var subTree in node.Subtrees.Skip(2)) {
     353            AutoDiff.Term f;
     354            if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {
     355              term = null;
     356              return false;
     357            }
     358            factors.Add(1.0 / f);
     359          }
     360          term = AutoDiff.TermBuilder.Product(a, 1.0 / b, factors.ToArray());
     361          return true;
     362        }
     363      }
     364      if (node.Symbol is Logarithm) {
     365        AutoDiff.Term t;
     366        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     367          term = null;
     368          return false;
     369        } else {
     370          term = AutoDiff.TermBuilder.Log(t);
     371          return true;
     372        }
     373      }
     374      if (node.Symbol is Exponential) {
     375        AutoDiff.Term t;
     376        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     377          term = null;
     378          return false;
     379        } else {
     380          term = AutoDiff.TermBuilder.Exp(t);
     381          return true;
     382        }
     383      } if (node.Symbol is Sine) {
     384        AutoDiff.Term t;
     385        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     386          term = null;
     387          return false;
     388        } else {
     389          term = sin(t);
     390          return true;
     391        }
     392      } if (node.Symbol is Cosine) {
     393        AutoDiff.Term t;
     394        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     395          term = null;
     396          return false;
     397        } else {
     398          term = cos(t);
     399          return true;
     400        }
     401      } if (node.Symbol is Tangent) {
     402        AutoDiff.Term t;
     403        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     404          term = null;
     405          return false;
     406        } else {
     407          term = tan(t);
     408          return true;
     409        }
     410      }
     411      if (node.Symbol is Square) {
     412        AutoDiff.Term t;
     413        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     414          term = null;
     415          return false;
     416        } else {
     417          term = square(t);
     418          return true;
     419        }
     420      } if (node.Symbol is Erf) {
     421        AutoDiff.Term t;
     422        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     423          term = null;
     424          return false;
     425        } else {
     426          term = erf(t);
     427          return true;
     428        }
     429      } if (node.Symbol is Norm) {
     430        AutoDiff.Term t;
     431        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     432          term = null;
     433          return false;
     434        } else {
     435          term = norm(t);
     436          return true;
     437        }
     438      }
     439      if (node.Symbol is StartSymbol) {
     440        var alpha = new AutoDiff.Variable();
     441        var beta = new AutoDiff.Variable();
     442        variables.Add(beta);
     443        variables.Add(alpha);
     444        AutoDiff.Term branchTerm;
     445        if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out branchTerm)) {
     446          term = branchTerm * alpha + beta;
     447          return true;
     448        } else {
     449          term = null;
     450          return false;
     451        }
     452      }
     453      term = null;
     454      return false;
     455    }
     456
     457    public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
     458      var containsUnknownSymbol = (
     459        from n in tree.Root.GetSubtree(0).IterateNodesPrefix()
     460        where
     461         !(n.Symbol is Variable) &&
     462         !(n.Symbol is Constant) &&
     463         !(n.Symbol is Addition) &&
     464         !(n.Symbol is Subtraction) &&
     465         !(n.Symbol is Multiplication) &&
     466         !(n.Symbol is Division) &&
     467         !(n.Symbol is Logarithm) &&
     468         !(n.Symbol is Exponential) &&
     469         !(n.Symbol is Sine) &&
     470         !(n.Symbol is Cosine) &&
     471         !(n.Symbol is Tangent) &&
     472         !(n.Symbol is Square) &&
     473         !(n.Symbol is Erf) &&
     474         !(n.Symbol is Norm) &&
     475         !(n.Symbol is StartSymbol)
     476        select n).
     477      Any();
     478      return !containsUnknownSymbol;
     479    }
    213480  }
    214481}
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionSingleObjectiveEvaluator.cs

    r8127 r9363  
    3030namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
    3131  [StorableClass]
    32   public abstract class SymbolicRegressionSingleObjectiveEvaluator : SymbolicDataAnalysisSingleObjectiveEvaluator<IRegressionProblemData>, ISymbolicRegressionSingleObjectiveEvaluator {
    33     private const string ApplyLinearScalingParameterName = "ApplyLinearScaling";
    34     public IFixedValueParameter<BoolValue> ApplyLinearScalingParameter {
    35       get { return (IFixedValueParameter<BoolValue>)Parameters[ApplyLinearScalingParameterName]; }
    36     }
    37     public bool ApplyLinearScaling {
    38       get { return ApplyLinearScalingParameter.Value.Value; }
    39       set { ApplyLinearScalingParameter.Value.Value = value; }
    40     }
    41 
     32  public abstract class SymbolicRegressionSingleObjectiveEvaluator : SymbolicDataAnalysisSingleObjectiveEvaluator<IRegressionProblemData>, ISymbolicRegressionSingleObjectiveEvaluator { 
    4233    [StorableConstructor]
    4334    protected SymbolicRegressionSingleObjectiveEvaluator(bool deserializing) : base(deserializing) { }
    4435    protected SymbolicRegressionSingleObjectiveEvaluator(SymbolicRegressionSingleObjectiveEvaluator original, Cloner cloner) : base(original, cloner) { }
    45     protected SymbolicRegressionSingleObjectiveEvaluator()
    46       : base() {
    47       Parameters.Add(new FixedValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Flag that indicates if the individual should be linearly scaled before evaluating.", new BoolValue(true)));
    48       ApplyLinearScalingParameter.Hidden = true;
    49     }
    50 
    51     [StorableHook(HookType.AfterDeserialization)]
    52     private void AfterDeserialization() {
    53       if (!Parameters.ContainsKey(ApplyLinearScalingParameterName)) {
    54         Parameters.Add(new FixedValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Flag that indicates if the individual should be linearly scaled before evaluating.", new BoolValue(false)));
    55         ApplyLinearScalingParameter.Hidden = true;
    56       }
    57     }
    58 
    59     [ThreadStatic]
    60     private static double[] cache;
    61 
    62     protected static void CalculateWithScaling(IEnumerable<double> targetValues, IEnumerable<double> estimatedValues,
    63       double lowerEstimationLimit, double upperEstimationLimit,
    64       IOnlineCalculator calculator, int maxRows) {
    65       if (cache == null || cache.GetLength(0) < maxRows) {
    66         cache = new double[maxRows];
    67       }
    68 
    69       //calculate linear scaling
    70       //the static methods of the calculator could not be used as it performs a check if the enumerators have an equal amount of elements
    71       //this is not true if the cache is used
    72       int i = 0;
    73       var linearScalingCalculator = new OnlineLinearScalingParameterCalculator();
    74       var targetValuesEnumerator = targetValues.GetEnumerator();
    75       var estimatedValuesEnumerator = estimatedValues.GetEnumerator();
    76       while (targetValuesEnumerator.MoveNext() && estimatedValuesEnumerator.MoveNext()) {
    77         double target = targetValuesEnumerator.Current;
    78         double estimated = estimatedValuesEnumerator.Current;
    79         cache[i] = estimated;
    80         linearScalingCalculator.Add(estimated, target);
    81         i++;
    82       }
    83       double alpha = linearScalingCalculator.Alpha;
    84       double beta = linearScalingCalculator.Beta;
    85 
    86       //calculate the quality by using the passed online calculator
    87       targetValuesEnumerator = targetValues.GetEnumerator();
    88       var scaledBoundedEstimatedValuesEnumerator = Enumerable.Range(0, i).Select(x => cache[x] * beta + alpha)
    89         .LimitToRange(lowerEstimationLimit, upperEstimationLimit).GetEnumerator();
    90 
    91       while (targetValuesEnumerator.MoveNext() & scaledBoundedEstimatedValuesEnumerator.MoveNext()) {
    92         calculator.Add(targetValuesEnumerator.Current, scaledBoundedEstimatedValuesEnumerator.Current);
    93       }
    94     }
     36    protected SymbolicRegressionSingleObjectiveEvaluator(): base() {}   
    9537  }
    9638}
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionSingleObjectiveMaxAbsoluteErrorEvaluator.cs

    r8113 r9363  
    2020#endregion
    2121
    22 using System;
    2322using System.Collections.Generic;
    2423using HeuristicLab.Common;
     
    4746      IEnumerable<int> rows = GenerateRowsToEvaluate();
    4847
    49       double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScaling);
     48      double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value);
    5049      QualityParameter.ActualValue = new DoubleValue(quality);
    5150
     
    6867        mse = OnlineMaxAbsoluteErrorCalculator.Calculate(targetValues, boundedEstimatedValues, out errorState);
    6968      }
    70       if (errorState != OnlineCalculatorError.None) return Double.NaN;
    71       else return mse;
     69      if (errorState != OnlineCalculatorError.None) return double.NaN;
     70      return mse;
    7271    }
    7372
     
    7574      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
    7675      EstimationLimitsParameter.ExecutionContext = context;
     76      ApplyLinearScalingParameter.ExecutionContext = context;
    7777
    78       double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScaling);
     78      double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
    7979
    8080      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
    8181      EstimationLimitsParameter.ExecutionContext = null;
     82      ApplyLinearScalingParameter.ExecutionContext = null;
    8283
    8384      return mse;
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionSingleObjectiveMeanAbsoluteErrorEvaluator.cs

    r8113 r9363  
    2020#endregion
    2121
    22 using System;
    2322using System.Collections.Generic;
    2423using HeuristicLab.Common;
     
    4746      IEnumerable<int> rows = GenerateRowsToEvaluate();
    4847
    49       double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScaling);
     48      double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value);
    5049      QualityParameter.ActualValue = new DoubleValue(quality);
    5150
     
    5857      OnlineCalculatorError errorState;
    5958
    60       double mse;
     59      double mae;
    6160      if (applyLinearScaling) {
    6261        var maeCalculator = new OnlineMeanAbsoluteErrorCalculator();
    6362        CalculateWithScaling(targetValues, estimatedValues, lowerEstimationLimit, upperEstimationLimit, maeCalculator, problemData.Dataset.Rows);
    6463        errorState = maeCalculator.ErrorState;
    65         mse = maeCalculator.MeanAbsoluteError;
     64        mae = maeCalculator.MeanAbsoluteError;
    6665      } else {
    67         IEnumerable<double> boundedEstimatedValues = estimatedValues.LimitToRange(lowerEstimationLimit,
    68                                                                                   upperEstimationLimit);
    69         mse = OnlineMeanSquaredErrorCalculator.Calculate(targetValues, boundedEstimatedValues, out errorState);
     66        IEnumerable<double> boundedEstimatedValues = estimatedValues.LimitToRange(lowerEstimationLimit, upperEstimationLimit);
     67        mae = OnlineMeanAbsoluteErrorCalculator.Calculate(targetValues, boundedEstimatedValues, out errorState);
    7068      }
    71       if (errorState != OnlineCalculatorError.None) return Double.NaN;
    72       else return mse;
     69      if (errorState != OnlineCalculatorError.None) return double.NaN;
     70      return mae;
    7371    }
    7472
     
    7674      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
    7775      EstimationLimitsParameter.ExecutionContext = context;
     76      ApplyLinearScalingParameter.ExecutionContext = context;
    7877
    79       double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScaling);
     78      double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
    8079
    8180      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
    8281      EstimationLimitsParameter.ExecutionContext = null;
     82      ApplyLinearScalingParameter.ExecutionContext = null;
    8383
    8484      return mse;
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionSingleObjectiveMeanSquaredErrorEvaluator.cs

    r8113 r9363  
    2020#endregion
    2121
    22 using System;
    2322using System.Collections.Generic;
    2423using HeuristicLab.Common;
     
    4746      IEnumerable<int> rows = GenerateRowsToEvaluate();
    4847
    49       double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScaling);
     48      double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value);
    5049      QualityParameter.ActualValue = new DoubleValue(quality);
    5150
     
    6867        mse = OnlineMeanSquaredErrorCalculator.Calculate(targetValues, boundedEstimatedValues, out errorState);
    6968      }
    70       if (errorState != OnlineCalculatorError.None) return Double.NaN;
    71       else return mse;
     69      if (errorState != OnlineCalculatorError.None) return double.NaN;
     70      return mse;
    7271    }
    7372
     
    7574      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
    7675      EstimationLimitsParameter.ExecutionContext = context;
     76      ApplyLinearScalingParameter.ExecutionContext = context;
    7777
    78       double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScaling);
     78      double mse = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
    7979
    8080      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
    8181      EstimationLimitsParameter.ExecutionContext = null;
     82      ApplyLinearScalingParameter.ExecutionContext = null;
    8283
    8384      return mse;
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.cs

    r7672 r9363  
    4848      IEnumerable<int> rows = GenerateRowsToEvaluate();
    4949
    50       double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows);
     50      double quality = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value);
    5151      QualityParameter.ActualValue = new DoubleValue(quality);
    5252
     
    5454    }
    5555
    56     public static double Calculate(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree solution, double lowerEstimationLimit, double upperEstimationLimit, IRegressionProblemData problemData, IEnumerable<int> rows) {
     56    public static double Calculate(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree solution, double lowerEstimationLimit, double upperEstimationLimit, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling) {
    5757      IEnumerable<double> estimatedValues = interpreter.GetSymbolicExpressionTreeValues(solution, problemData.Dataset, rows);
    58       IEnumerable<double> originalValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows);
     58      IEnumerable<double> targetValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, rows);
    5959      OnlineCalculatorError errorState;
    60       double r2 = OnlinePearsonsRSquaredCalculator.Calculate(estimatedValues, originalValues, out errorState);
    61       if (errorState != OnlineCalculatorError.None) return 0.0;
    62       else return r2;
     60
     61      double r2;
     62      if (applyLinearScaling) {
     63        var r2Calculator = new OnlinePearsonsRSquaredCalculator();
     64        CalculateWithScaling(targetValues, estimatedValues, lowerEstimationLimit, upperEstimationLimit, r2Calculator, problemData.Dataset.Rows);
     65        errorState = r2Calculator.ErrorState;
     66        r2 = r2Calculator.RSquared;
     67      } else {
     68        IEnumerable<double> boundedEstimatedValues = estimatedValues.LimitToRange(lowerEstimationLimit, upperEstimationLimit);
     69        r2 = OnlinePearsonsRSquaredCalculator.Calculate(targetValues, boundedEstimatedValues, out errorState);
     70      }
     71      if (errorState != OnlineCalculatorError.None) return double.NaN;
     72      return r2;
    6373    }
    6474
     
    6676      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
    6777      EstimationLimitsParameter.ExecutionContext = context;
     78      ApplyLinearScalingParameter.ExecutionContext = context;
    6879
    69       double r2 = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows);
     80      double r2 = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
    7081
    7182      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
    7283      EstimationLimitsParameter.ExecutionContext = null;
     84      ApplyLinearScalingParameter.ExecutionContext = null;
    7385
    7486      return r2;
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveProblem.cs

    r8175 r9363  
    6161      EstimationLimitsParameter.Hidden = true;
    6262
     63
     64      ApplyLinearScalingParameter.Value.Value = true;
    6365      Maximization.Value = true;
    6466      MaximumSymbolicExpressionTreeDepth.Value = InitialMaximumTreeDepth;
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveTrainingBestSolutionAnalyzer.cs

    r7259 r9363  
    2222using HeuristicLab.Common;
    2323using HeuristicLab.Core;
    24 using HeuristicLab.Data;
    2524using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2625using HeuristicLab.Parameters;
     
    3837    private const string SymbolicDataAnalysisTreeInterpreterParameterName = "SymbolicDataAnalysisTreeInterpreter";
    3938    private const string EstimationLimitsParameterName = "EstimationLimits";
    40     private const string ApplyLinearScalingParameterName = "ApplyLinearScaling";
    4139    #region parameter properties
    4240    public ILookupParameter<IRegressionProblemData> ProblemDataParameter {
     
    4846    public IValueLookupParameter<DoubleLimit> EstimationLimitsParameter {
    4947      get { return (IValueLookupParameter<DoubleLimit>)Parameters[EstimationLimitsParameterName]; }
    50     }
    51     public IValueParameter<BoolValue> ApplyLinearScalingParameter {
    52       get { return (IValueParameter<BoolValue>)Parameters[ApplyLinearScalingParameterName]; }
    53     }
    54     #endregion
    55 
    56     #region properties
    57     public BoolValue ApplyLinearScaling {
    58       get { return ApplyLinearScalingParameter.Value; }
    5948    }
    6049    #endregion
     
    6857      Parameters.Add(new LookupParameter<ISymbolicDataAnalysisExpressionTreeInterpreter>(SymbolicDataAnalysisTreeInterpreterParameterName, "The symbolic data analysis tree interpreter for the symbolic expression tree."));
    6958      Parameters.Add(new ValueLookupParameter<DoubleLimit>(EstimationLimitsParameterName, "The lower and upper limit for the estimated values produced by the symbolic regression model."));
    70       Parameters.Add(new ValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Flag that indicates if the produced symbolic regression solution should be linearly scaled.", new BoolValue(true)));
    7159    }
    7260    public override IDeepCloneable Clone(Cloner cloner) {
     
    7664    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree, double bestQuality) {
    7765      var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    78       if (ApplyLinearScaling.Value)
    79         SymbolicRegressionModel.Scale(model, ProblemDataParameter.ActualValue);
     66      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    8067      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
    8168    }
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer.cs

    r8169 r9363  
    2222using HeuristicLab.Common;
    2323using HeuristicLab.Core;
    24 using HeuristicLab.Data;
    2524using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    26 using HeuristicLab.Parameters;
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
     
    3432  [StorableClass]
    3533  public sealed class SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer : SymbolicDataAnalysisSingleObjectiveTrainingParetoBestSolutionAnalyzer<IRegressionProblemData, ISymbolicRegressionSolution> {
    36     private const string ApplyLinearScalingParameterName = "ApplyLinearScaling";
    37     #region parameter properties
    38     public IValueParameter<BoolValue> ApplyLinearScalingParameter {
    39       get { return (IValueParameter<BoolValue>)Parameters[ApplyLinearScalingParameterName]; }
    40     }
    41     #endregion
    42 
    43     #region properties
    44     public BoolValue ApplyLinearScaling {
    45       get { return ApplyLinearScalingParameter.Value; }
    46     }
    47     #endregion
    4834
    4935    [StorableConstructor]
    5036    private SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer(bool deserializing) : base(deserializing) { }
    5137    private SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer(SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer original, Cloner cloner) : base(original, cloner) { }
    52     public SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer()
    53       : base() {
    54       Parameters.Add(new ValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Flag that indicates if the produced symbolic regression solution should be linearly scaled.", new BoolValue(true)));
    55     }
     38    public SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer() : base() { }
    5639    public override IDeepCloneable Clone(Cloner cloner) {
    5740      return new SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer(this, cloner);
     
    6043    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree) {
    6144      var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    62       if (ApplyLinearScaling.Value)
    63         SymbolicRegressionModel.Scale(model, ProblemDataParameter.ActualValue);
     45      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    6446      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
    6547    }
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveValidationBestSolutionAnalyzer.cs

    r7259 r9363  
    2222using HeuristicLab.Common;
    2323using HeuristicLab.Core;
    24 using HeuristicLab.Data;
    2524using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2625using HeuristicLab.Parameters;
     
    3635    ISymbolicDataAnalysisBoundedOperator {
    3736    private const string EstimationLimitsParameterName = "EstimationLimits";
    38     private const string ApplyLinearScalingParameterName = "ApplyLinearScaling";
    3937
    4038    #region parameter properties
    4139    public IValueLookupParameter<DoubleLimit> EstimationLimitsParameter {
    4240      get { return (IValueLookupParameter<DoubleLimit>)Parameters[EstimationLimitsParameterName]; }
    43     }
    44     public IValueParameter<BoolValue> ApplyLinearScalingParameter {
    45       get { return (IValueParameter<BoolValue>)Parameters[ApplyLinearScalingParameterName]; }
    46     }
    47     #endregion
    48 
    49     #region properties
    50     public BoolValue ApplyLinearScaling {
    51       get { return ApplyLinearScalingParameter.Value; }
    5241    }
    5342    #endregion
     
    5948      : base() {
    6049      Parameters.Add(new ValueLookupParameter<DoubleLimit>(EstimationLimitsParameterName, "The lower and upper limit for the estimated values produced by the symbolic regression model."));
    61       Parameters.Add(new ValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Flag that indicates if the produced symbolic regression solution should be linearly scaled.", new BoolValue(true)));
    6250    }
    6351
     
    6856    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree, double bestQuality) {
    6957      var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    70       if (ApplyLinearScaling.Value)
    71         SymbolicRegressionModel.Scale(model, ProblemDataParameter.ActualValue);
     58      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    7259      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
    7360    }
  • branches/OaaS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer.cs

    r8169 r9363  
    2222using HeuristicLab.Common;
    2323using HeuristicLab.Core;
    24 using HeuristicLab.Data;
    2524using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    26 using HeuristicLab.Parameters;
    2725using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2826
     
    3432  [StorableClass]
    3533  public sealed class SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer : SymbolicDataAnalysisSingleObjectiveValidationParetoBestSolutionAnalyzer<ISymbolicRegressionSolution, ISymbolicRegressionSingleObjectiveEvaluator, IRegressionProblemData> {
    36     private const string ApplyLinearScalingParameterName = "ApplyLinearScaling";
    37     #region parameter properties
    38     public IValueParameter<BoolValue> ApplyLinearScalingParameter {
    39       get { return (IValueParameter<BoolValue>)Parameters[ApplyLinearScalingParameterName]; }
    40     }
    41     #endregion
    42 
    43     #region properties
    44     public BoolValue ApplyLinearScaling {
    45       get { return ApplyLinearScalingParameter.Value; }
    46     }
    47     #endregion
    48 
    4934    [StorableConstructor]
    5035    private SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer(bool deserializing) : base(deserializing) { }
    5136    private SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer(SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer original, Cloner cloner) : base(original, cloner) { }
    52     public SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer()
    53       : base() {
    54       Parameters.Add(new ValueParameter<BoolValue>(ApplyLinearScalingParameterName, "Flag that indicates if the produced symbolic regression solution should be linearly scaled.", new BoolValue(true)));
    55     }
     37    public SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer() : base() { }
     38
    5639    public override IDeepCloneable Clone(Cloner cloner) {
    5740      return new SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer(this, cloner);
     
    6043    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree) {
    6144      var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    62       if (ApplyLinearScaling.Value)
    63         SymbolicRegressionModel.Scale(model, ProblemDataParameter.ActualValue);
     45      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    6446      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
    6547    }
Note: See TracChangeset for help on using the changeset viewer.