Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
07/08/16 14:40:02 (9 years ago)
Author:
gkronber
Message:

#2434: merged trunk changes r12934:14026 from trunk to branch

Location:
branches/crossvalidation-2434
Files:
9 edited

Legend:

Unmodified
Added
Removed
  • branches/crossvalidation-2434

  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression

  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4

    • Property svn:mergeinfo set to (toggle deleted branches)
      /stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regressionmergedeligible
      /stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4mergedeligible
      /trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4mergedeligible
      /branches/Benchmarking/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression6917-7005
      /branches/Benchmarking/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.46917-7005
      /branches/CloningRefactoring/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression4656-4721
      /branches/CloningRefactoring/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.44656-4721
      /branches/DataAnalysis Refactoring/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5471-5473
      /branches/DataAnalysis Refactoring/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45471-5473
      /branches/DataAnalysis SolutionEnsembles/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5815-6180
      /branches/DataAnalysis SolutionEnsembles/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45815-6180
      /branches/DataAnalysis.ComplexityAnalyzer/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.410750-13239
      /branches/DataAnalysis/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression4458-4459,​4462,​4464
      /branches/DataAnalysis/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.44458-4459,​4462,​4464
      /branches/DataPreprocessing/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression10085-11101
      /branches/DataPreprocessing/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.410085-11101
      /branches/GP.Grammar.Editor/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression6284-6795
      /branches/GP.Grammar.Editor/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.46284-6795
      /branches/GP.Symbols (TimeLag, Diff, Integral)/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5060
      /branches/GP.Symbols (TimeLag, Diff, Integral)/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45060
      /branches/HLScript/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression10331-10358
      /branches/HLScript/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.410331-10358
      /branches/HeuristicLab.DatasetRefactor/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.411570-12508
      /branches/HeuristicLab.Problems.Orienteering/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.411130-12721
      /branches/HeuristicLab.TreeSimplifier/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression8388-8942
      /branches/HeuristicLab.TreeSimplifier/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.48388-8942
      /branches/LogResidualEvaluator/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression10202-10483
      /branches/LogResidualEvaluator/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.410202-10483
      /branches/NET40/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5138-5162
      /branches/NET40/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45138-5162
      /branches/ParallelEngine/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5175-5192
      /branches/ParallelEngine/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45175-5192
      /branches/ProblemInstancesRegressionAndClassification/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression7748-7810
      /branches/ProblemInstancesRegressionAndClassification/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.47748-7810
      /branches/QAPAlgorithms/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression6350-6627
      /branches/QAPAlgorithms/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.46350-6627
      /branches/Restructure trunk solution/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression6828
      /branches/Restructure trunk solution/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.46828
      /branches/SpectralKernelForGaussianProcesses/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression10204-10479
      /branches/SpectralKernelForGaussianProcesses/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.410204-10479
      /branches/SuccessProgressAnalysis/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5370-5682
      /branches/SuccessProgressAnalysis/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45370-5682
      /branches/SymbolicExpressionTreeDiversityAnalyzers/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.412029-12100
      /branches/SymbolicExpressionTreeEncoding/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.412336-12421
      /branches/Trunk/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression6829-6865
      /branches/Trunk/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.46829-6865
      /branches/VNS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5594-5752
      /branches/VNS/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45594-5752
      /branches/histogram/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression5959-6341
      /branches/histogram/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.45959-6341
  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs

    r12509 r14029  
    4040    private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
    4141    private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
     42    private const string UpdateVariableWeightsParameterName = "Update Variable Weights";
    4243
    4344    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
     
    5657      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
    5758    }
     59    public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter {
     60      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; }
     61    }
     62
    5863
    5964    public IntValue ConstantOptimizationIterations {
     
    7277      get { return UpdateConstantsInTreeParameter.Value.Value; }
    7378      set { UpdateConstantsInTreeParameter.Value.Value = value; }
     79    }
     80
     81    public bool UpdateVariableWeights {
     82      get { return UpdateVariableWeightsParameter.Value.Value; }
     83      set { UpdateVariableWeightsParameter.Value.Value = value; }
    7484    }
    7585
     
    8696      : base() {
    8797      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10), true));
    88       Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true));
     98      Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true) { Hidden = true });
    8999      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
    90100      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
    91       Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
     101      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true });
     102      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)) { Hidden = true });
    92103    }
    93104
     
    100111      if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
    101112        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
     113      if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName))
     114        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be  optimized.", new BoolValue(true)));
    102115    }
    103116
     
    108121        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
    109122        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
    110            constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value,
    111            EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, UpdateConstantsInTree);
     123           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree);
    112124
    113125        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
     
    164176
    165177
    166     public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData,
    167       IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, bool updateConstantsInTree = true) {
     178    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, bool updateVariableWeights = true, double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, bool updateConstantsInTree = true) {
    168179
    169180      List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>();
     
    172183
    173184      AutoDiff.Term func;
    174       if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, out func))
     185      if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out func))
    175186        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
    176187      if (variableNames.Count == 0) return 0.0;
    177188
    178       AutoDiff.IParametricCompiledTerm compiledFunc = AutoDiff.TermUtils.Compile(func, variables.ToArray(), parameters.ToArray());
    179 
    180       List<SymbolicExpressionTreeTerminalNode> terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList();
     189      AutoDiff.IParametricCompiledTerm compiledFunc = func.Compile(variables.ToArray(), parameters.ToArray());
     190
     191      List<SymbolicExpressionTreeTerminalNode> terminalNodes = null;
     192      if (updateVariableWeights)
     193        terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList();
     194      else
     195        terminalNodes = new List<SymbolicExpressionTreeTerminalNode>(tree.Root.IterateNodesPrefix().OfType<ConstantTreeNode>());
     196
     197      //extract inital constants
    181198      double[] c = new double[variables.Count];
    182 
    183199      {
    184200        c[0] = 0.0;
    185201        c[1] = 1.0;
    186         //extract inital constants
    187202        int i = 2;
    188203        foreach (var node in terminalNodes) {
     
    191206          if (constantTreeNode != null)
    192207            c[i++] = constantTreeNode.Value;
    193           else if (variableTreeNode != null)
     208          else if (updateVariableWeights && variableTreeNode != null)
    194209            c[i++] = variableTreeNode.Weight;
    195210        }
     
    234249
    235250      //info == -7  => constant optimization failed due to wrong gradient
    236       if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray());
     251      if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights);
    237252      var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
    238253
    239       if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray());
     254      if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
    240255      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
    241         UpdateConstants(tree, originalConstants.Skip(2).ToArray());
     256        UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);
    242257        return originalQuality;
    243258      }
     
    245260    }
    246261
    247     private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants) {
     262    private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) {
    248263      int i = 0;
    249264      foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
     
    252267        if (constantTreeNode != null)
    253268          constantTreeNode.Value = constants[i++];
    254         else if (variableTreeNode != null)
     269        else if (updateVariableWeights && variableTreeNode != null)
    255270          variableTreeNode.Weight = constants[i++];
    256271      }
     
    271286    }
    272287
    273     private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, out AutoDiff.Term term) {
     288    private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, bool updateVariableWeights, out AutoDiff.Term term) {
    274289      if (node.Symbol is Constant) {
    275290        var var = new AutoDiff.Variable();
     
    283298        parameters.Add(par);
    284299        variableNames.Add(varNode.VariableName);
    285         var w = new AutoDiff.Variable();
    286         variables.Add(w);
    287         term = AutoDiff.TermBuilder.Product(w, par);
     300
     301        if (updateVariableWeights) {
     302          var w = new AutoDiff.Variable();
     303          variables.Add(w);
     304          term = AutoDiff.TermBuilder.Product(w, par);
     305        } else {
     306          term = par;
     307        }
    288308        return true;
    289309      }
     
    292312        foreach (var subTree in node.Subtrees) {
    293313          AutoDiff.Term t;
    294           if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out t)) {
     314          if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out t)) {
    295315            term = null;
    296316            return false;
     
    305325        for (int i = 0; i < node.SubtreeCount; i++) {
    306326          AutoDiff.Term t;
    307           if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) {
     327          if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, updateVariableWeights, out t)) {
    308328            term = null;
    309329            return false;
     
    316336      }
    317337      if (node.Symbol is Multiplication) {
    318         AutoDiff.Term a, b;
    319         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||
    320           !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {
    321           term = null;
    322           return false;
    323         } else {
    324           List<AutoDiff.Term> factors = new List<Term>();
    325           foreach (var subTree in node.Subtrees.Skip(2)) {
    326             AutoDiff.Term f;
    327             if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {
    328               term = null;
    329               return false;
    330             }
    331             factors.Add(f);
     338        List<AutoDiff.Term> terms = new List<Term>();
     339        foreach (var subTree in node.Subtrees) {
     340          AutoDiff.Term t;
     341          if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out t)) {
     342            term = null;
     343            return false;
    332344          }
    333           term = AutoDiff.TermBuilder.Product(a, b, factors.ToArray());
    334           return true;
    335         }
     345          terms.Add(t);
     346        }
     347        if (terms.Count == 1) term = terms[0];
     348        else term = terms.Aggregate((a, b) => new AutoDiff.Product(a, b));
     349        return true;
     350
    336351      }
    337352      if (node.Symbol is Division) {
    338         // only works for at least two subtrees
    339         AutoDiff.Term a, b;
    340         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||
    341           !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {
    342           term = null;
    343           return false;
    344         } else {
    345           List<AutoDiff.Term> factors = new List<Term>();
    346           foreach (var subTree in node.Subtrees.Skip(2)) {
    347             AutoDiff.Term f;
    348             if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {
    349               term = null;
    350               return false;
    351             }
    352             factors.Add(1.0 / f);
     353        List<AutoDiff.Term> terms = new List<Term>();
     354        foreach (var subTree in node.Subtrees) {
     355          AutoDiff.Term t;
     356          if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out t)) {
     357            term = null;
     358            return false;
    353359          }
    354           term = AutoDiff.TermBuilder.Product(a, 1.0 / b, factors.ToArray());
    355           return true;
    356         }
     360          terms.Add(t);
     361        }
     362        if (terms.Count == 1) term = 1.0 / terms[0];
     363        else term = terms.Aggregate((a, b) => new AutoDiff.Product(a, 1.0 / b));
     364        return true;
    357365      }
    358366      if (node.Symbol is Logarithm) {
    359367        AutoDiff.Term t;
    360         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     368        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    361369          term = null;
    362370          return false;
     
    368376      if (node.Symbol is Exponential) {
    369377        AutoDiff.Term t;
    370         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     378        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    371379          term = null;
    372380          return false;
     
    378386      if (node.Symbol is Square) {
    379387        AutoDiff.Term t;
    380         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     388        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    381389          term = null;
    382390          return false;
     
    385393          return true;
    386394        }
    387       } if (node.Symbol is SquareRoot) {
    388         AutoDiff.Term t;
    389         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     395      }
     396      if (node.Symbol is SquareRoot) {
     397        AutoDiff.Term t;
     398        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    390399          term = null;
    391400          return false;
     
    394403          return true;
    395404        }
    396       } if (node.Symbol is Sine) {
    397         AutoDiff.Term t;
    398         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     405      }
     406      if (node.Symbol is Sine) {
     407        AutoDiff.Term t;
     408        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    399409          term = null;
    400410          return false;
     
    403413          return true;
    404414        }
    405       } if (node.Symbol is Cosine) {
    406         AutoDiff.Term t;
    407         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     415      }
     416      if (node.Symbol is Cosine) {
     417        AutoDiff.Term t;
     418        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    408419          term = null;
    409420          return false;
     
    412423          return true;
    413424        }
    414       } if (node.Symbol is Tangent) {
    415         AutoDiff.Term t;
    416         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     425      }
     426      if (node.Symbol is Tangent) {
     427        AutoDiff.Term t;
     428        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    417429          term = null;
    418430          return false;
     
    421433          return true;
    422434        }
    423       } if (node.Symbol is Erf) {
    424         AutoDiff.Term t;
    425         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     435      }
     436      if (node.Symbol is Erf) {
     437        AutoDiff.Term t;
     438        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    426439          term = null;
    427440          return false;
     
    430443          return true;
    431444        }
    432       } if (node.Symbol is Norm) {
    433         AutoDiff.Term t;
    434         if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
     445      }
     446      if (node.Symbol is Norm) {
     447        AutoDiff.Term t;
     448        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) {
    435449          term = null;
    436450          return false;
     
    446460        variables.Add(alpha);
    447461        AutoDiff.Term branchTerm;
    448         if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out branchTerm)) {
     462        if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out branchTerm)) {
    449463          term = branchTerm * alpha + beta;
    450464          return true;
  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveTrainingBestSolutionAnalyzer.cs

    r12012 r14029  
    6363
    6464    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree, double bestQuality) {
    65       var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
     65      var model = new SymbolicRegressionModel(ProblemDataParameter.ActualValue.TargetVariable, (ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    6666      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    6767      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveTrainingParetoBestSolutionAnalyzer.cs

    r12012 r14029  
    4242
    4343    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree) {
    44       var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
     44      var model = new SymbolicRegressionModel(ProblemDataParameter.ActualValue.TargetVariable, (ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    4545      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    4646      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveValidationBestSolutionAnalyzer.cs

    r12012 r14029  
    5555
    5656    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree, double bestQuality) {
    57       var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
     57      var model = new SymbolicRegressionModel(ProblemDataParameter.ActualValue.TargetVariable, (ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    5858      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    5959      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSingleObjectiveValidationParetoBestSolutionAnalyzer.cs

    r12012 r14029  
    4242
    4343    protected override ISymbolicRegressionSolution CreateSolution(ISymbolicExpressionTree bestTree) {
    44       var model = new SymbolicRegressionModel((ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
     44      var model = new SymbolicRegressionModel(ProblemDataParameter.ActualValue.TargetVariable, (ISymbolicExpressionTree)bestTree.Clone(), SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper);
    4545      if (ApplyLinearScalingParameter.ActualValue.Value) model.Scale(ProblemDataParameter.ActualValue);
    4646      return new SymbolicRegressionSolution(model, (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone());
  • branches/crossvalidation-2434/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/SymbolicRegressionSolutionsAnalyzer.cs

    r12012 r14029  
    2727using HeuristicLab.Common;
    2828using HeuristicLab.Core;
     29using HeuristicLab.Data;
    2930using HeuristicLab.Operators;
    3031using HeuristicLab.Optimization;
     
    3334
    3435namespace HeuristicLab.Problems.DataAnalysis.Symbolic.Regression {
     36  [StorableClass]
    3537  public class SymbolicRegressionSolutionsAnalyzer : SingleSuccessorOperator, IAnalyzer {
    3638    private const string ResultCollectionParameterName = "Results";
    3739    private const string RegressionSolutionQualitiesResultName = "Regression Solution Qualities";
     40    private const string TrainingQualityParameterName = "TrainingRSquared";
     41    private const string TestQualityParameterName = "TestRSquared";
    3842
    3943    public ILookupParameter<ResultCollection> ResultCollectionParameter {
    4044      get { return (ILookupParameter<ResultCollection>)Parameters[ResultCollectionParameterName]; }
     45    }
     46    public ILookupParameter<DoubleValue> TrainingQualityParameter {
     47      get { return (ILookupParameter<DoubleValue>)Parameters[TrainingQualityParameterName]; }
     48    }
     49    public ILookupParameter<DoubleValue> TestQualityParameter {
     50      get { return (ILookupParameter<DoubleValue>)Parameters[TestQualityParameterName]; }
    4151    }
    4252
     
    5565    public SymbolicRegressionSolutionsAnalyzer() {
    5666      Parameters.Add(new LookupParameter<ResultCollection>(ResultCollectionParameterName, "The result collection to store the analysis results."));
     67      Parameters.Add(new LookupParameter<DoubleValue>(TrainingQualityParameterName));
     68      Parameters.Add(new LookupParameter<DoubleValue>(TestQualityParameterName));
     69    }
     70
     71    [StorableHook(HookType.AfterDeserialization)]
     72    private void AfterDeserialization() {
     73      // BackwardsCompatibility3.3
     74
     75      #region Backwards compatible code, remove with 3.4
     76      if (!Parameters.ContainsKey(TrainingQualityParameterName))
     77        Parameters.Add(new LookupParameter<DoubleValue>(TrainingQualityParameterName));
     78      if (!Parameters.ContainsKey(TestQualityParameterName))
     79        Parameters.Add(new LookupParameter<DoubleValue>(TestQualityParameterName));
     80      #endregion
    5781    }
    5882
     
    6690
    6791      var dataTable = (DataTable)results[RegressionSolutionQualitiesResultName].Value;
     92
     93      // only if the parameters are available (not available in old persisted code)
     94      ILookupParameter<DoubleValue> trainingQualityParam = null;
     95      ILookupParameter<DoubleValue> testQualityParam = null;
     96      // store actual names of parameter because it is changed below
     97      trainingQualityParam = TrainingQualityParameter;
     98      string prevTrainingQualityParamName = trainingQualityParam.ActualName;
     99      testQualityParam = TestQualityParameter;
     100      string prevTestQualityParamName = testQualityParam.ActualName;
    68101      foreach (var result in results.Where(r => r.Value is IRegressionSolution)) {
    69102        var solution = (IRegressionSolution)result.Value;
    70103
    71         var trainingR2 = result.Name + Environment.NewLine + "Training R²";
    72         if (!dataTable.Rows.ContainsKey(trainingR2))
    73           dataTable.Rows.Add(new DataRow(trainingR2));
     104        var trainingR2Name = result.Name + " Training R²";
     105        if (!dataTable.Rows.ContainsKey(trainingR2Name))
     106          dataTable.Rows.Add(new DataRow(trainingR2Name));
    74107
    75         var testR2 = result.Name + Environment.NewLine + " Test R²";
    76         if (!dataTable.Rows.ContainsKey(testR2))
    77           dataTable.Rows.Add(new DataRow(testR2));
     108        var testR2Name = result.Name + " Test R²";
     109        if (!dataTable.Rows.ContainsKey(testR2Name))
     110          dataTable.Rows.Add(new DataRow(testR2Name));
    78111
    79         dataTable.Rows[trainingR2].Values.Add(solution.TrainingRSquared);
    80         dataTable.Rows[testR2].Values.Add(solution.TestRSquared);
     112        dataTable.Rows[trainingR2Name].Values.Add(solution.TrainingRSquared);
     113        dataTable.Rows[testR2Name].Values.Add(solution.TestRSquared);
     114
     115        // also add training and test R² to the scope using the parameters
     116        // HACK: we change the ActualName of the parameter to write two variables for each solution in the results collection
     117        trainingQualityParam.ActualName = trainingR2Name;
     118        trainingQualityParam.ActualValue = new DoubleValue(solution.TrainingRSquared);
     119        testQualityParam.ActualName = testR2Name;
     120        testQualityParam.ActualValue = new DoubleValue(solution.TestRSquared);
    81121      }
     122
     123      trainingQualityParam.ActualName = prevTrainingQualityParamName;
     124      testQualityParam.ActualName = prevTestQualityParamName;
    82125
    83126      return base.Apply();
Note: See TracChangeset for help on using the changeset viewer.