Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
01/10/22 10:15:25 (3 years ago)
Author:
dpiringe
Message:

#3138

  • merged trunk into branch
Location:
branches/3138_Shape_Constraints_Transformations
Files:
11 edited

Legend:

Unmodified
Added
Removed
  • branches/3138_Shape_Constraints_Transformations

  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis

  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4

  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/GBM/GradientBoostingRegressionAlgorithm.cs

    r17180 r18180  
    417417      var addNode = new Addition().CreateTreeNode();
    418418      var mulNode = new Multiplication().CreateTreeNode();
    419       var scaleNode = (ConstantTreeNode)new Constant().CreateTreeNode(); // all models are scaled using the same nu
     419      var scaleNode = (NumberTreeNode)new Number().CreateTreeNode(); // all models are scaled using the same nu
    420420      scaleNode.Value = nu;
    421421
  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessCovarianceOptimizationProblem.cs

    r17180 r18180  
    9797
    9898    private const string ProblemDataParameterName = "ProblemData";
    99     private const string ConstantOptIterationsParameterName = "Constant optimization steps";
     99    private const string ParameterOptIterationsParameterName = "Parameter optimization steps";
    100100    private const string RestartsParameterName = "Restarts";
    101101
     
    108108      get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; }
    109109    }
    110     public IFixedValueParameter<IntValue> ConstantOptIterationsParameter {
    111       get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptIterationsParameterName]; }
     110    public IFixedValueParameter<IntValue> ParameterOptIterationsParameter {
     111      get { return (IFixedValueParameter<IntValue>)Parameters[ParameterOptIterationsParameterName]; }
    112112    }
    113113    public IFixedValueParameter<IntValue> RestartsParameter {
     
    124124    IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } }
    125125
    126     public int ConstantOptIterations {
    127       get { return ConstantOptIterationsParameter.Value.Value; }
    128       set { ConstantOptIterationsParameter.Value.Value = value; }
     126    public int ParameterOptIterations {
     127      get { return ParameterOptIterationsParameter.Value.Value; }
     128      set { ParameterOptIterationsParameter.Value.Value = value; }
    129129    }
    130130
     
    153153      : base() {
    154154      Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data for the regression problem", new RegressionProblemData()));
    155       Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptIterationsParameterName, "Number of optimization steps for hyperparameter values", new IntValue(50)));
    156       Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of random restarts for constant optimization.", new IntValue(10)));
     155      Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptIterationsParameterName, "Number of optimization steps for hyperparameter values", new IntValue(50)));
     156      Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of random restarts for parameter optimization.", new IntValue(10)));
    157157      Parameters["Restarts"].Hidden = true;
    158158      var g = new SimpleSymbolicExpressionGrammar();
     
    236236        double epsx = 0;
    237237        double stpmax = 1;
    238         int maxits = ConstantOptIterations;
     238        int maxits = ParameterOptIterations;
    239239        alglib.mincgstate state;
    240240        alglib.mincgreport rep;
     
    384384    [StorableHook(HookType.AfterDeserialization)]
    385385    private void AfterDeserialization() {
     386      if (!Parameters.ContainsKey(ParameterOptIterationsParameterName)) {
     387        if (Parameters.ContainsKey("Constant optimization steps")) {
     388          Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptIterationsParameterName, "Number of optimization steps for hyperparameter values", (IntValue)Parameters["Constant optimization steps"].ActualValue));
     389          Parameters.Remove("Constant optimization steps");
     390        } else {
     391          Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptIterationsParameterName, "Number of optimization steps for hyperparameter values", new IntValue(50)));
     392        }
     393      }
    386394    }
    387395
  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeModel.cs

    r17180 r18180  
    230230      var startSy = new StartSymbol();
    231231      var varCondSy = new VariableCondition() { IgnoreSlope = true };
    232       var constSy = new Constant();
     232      var numSy = new Number();
    233233
    234234      var startNode = startSy.CreateTreeNode();
    235       startNode.AddSubtree(CreateSymbolicRegressionTreeRecursive(tree, 0, varCondSy, constSy));
     235      startNode.AddSubtree(CreateSymbolicRegressionTreeRecursive(tree, 0, varCondSy, numSy));
    236236      var rootNode = rootSy.CreateTreeNode();
    237237      rootNode.AddSubtree(startNode);
     
    239239    }
    240240
    241     private ISymbolicExpressionTreeNode CreateSymbolicRegressionTreeRecursive(TreeNode[] treeNodes, int nodeIdx, VariableCondition varCondSy, Constant constSy) {
     241    private ISymbolicExpressionTreeNode CreateSymbolicRegressionTreeRecursive(TreeNode[] treeNodes, int nodeIdx, VariableCondition varCondSy, Number numSy) {
    242242      var curNode = treeNodes[nodeIdx];
    243243      if (curNode.VarName == TreeNode.NO_VARIABLE) {
    244         var node = (ConstantTreeNode)constSy.CreateTreeNode();
     244        var node = (NumberTreeNode)numSy.CreateTreeNode();
    245245        node.Value = curNode.Val;
    246246        return node;
     
    250250        node.Threshold = curNode.Val;
    251251
    252         var left = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.LeftIdx, varCondSy, constSy);
    253         var right = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.RightIdx, varCondSy, constSy);
     252        var left = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.LeftIdx, varCondSy, numSy);
     253        var right = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.RightIdx, varCondSy, numSy);
    254254        node.AddSubtree(left);
    255255        node.AddSubtree(right);
  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/NonlinearRegression/NonlinearRegression.cs

    r17180 r18180  
    123123      : base() {
    124124      Problem = new RegressionProblem();
    125       Parameters.Add(new FixedValueParameter<StringValue>(ModelStructureParameterName, "The function for which the parameters must be fit (only numeric constants are tuned).", new StringValue("1.0 * x*x + 0.0")));
    126       Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "The maximum number of iterations for constants optimization.", new IntValue(200)));
     125      Parameters.Add(new FixedValueParameter<StringValue>(ModelStructureParameterName,
     126        "The expression for which the <num> parameters should be fit.\n " +
     127        "Defined constants will not be modified.\n " +
     128        "Modifiable numbers are specified with <num>. To specify a default value within this number symbol, a default value can be declared by e.g. <num=1.0>.",
     129        new StringValue("<num> * x*x + 0.0")));
     130      Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "The maximum number of iterations for parameter optimization.", new IntValue(200)));
    127131      Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of independent random restarts (>0)", new IntValue(10)));
    128132      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The PRNG seed value.", new IntValue()));
     
    210214
    211215    /// <summary>
    212     /// Fits a model to the data by optimizing the numeric constants.
     216    /// Fits a model to the data by optimizing parameters.
    213217    /// Model is specified as infix expression containing variable names and numbers.
    214     /// The starting point for the numeric constants is initialized randomly if a random number generator is specified (~N(0,1)). Otherwise the user specified constants are
     218    /// The starting values for the parameters are initialized randomly if a random number generator is specified (~N(0,1)). Otherwise the user specified values are
    215219    /// used as a starting point.
    216220    /// </summary>-
    217221    /// <param name="problemData">Training and test data</param>
    218222    /// <param name="modelStructure">The function as infix expression</param>
    219     /// <param name="maxIterations">Number of constant optimization iterations (using Levenberg-Marquardt algorithm)</param>
    220     /// <param name="random">Optional random number generator for random initialization of numeric constants.</param>
     223    /// <param name="maxIterations">Number of Levenberg-Marquardt iterations</param>
     224    /// <param name="random">Optional random number generator for random initialization of parameters.</param>
    221225    /// <returns></returns>
    222226    public static ISymbolicRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData, string modelStructure, int maxIterations, bool applyLinearScaling, IRandom rand = null) {
     
    263267      }
    264268
    265       if (!SymbolicRegressionConstantOptimizationEvaluator.CanOptimizeConstants(tree)) throw new ArgumentException("The optimizer does not support the specified model structure.");
    266 
    267       // initialize constants randomly
     269      if (!SymbolicRegressionParameterOptimizationEvaluator.CanOptimizeParameters(tree)) throw new ArgumentException("The optimizer does not support the specified model structure.");
     270
     271      // initialize parameters randomly
    268272      if (rand != null) {
    269         foreach (var node in tree.IterateNodesPrefix().OfType<ConstantTreeNode>()) {
     273        foreach (var node in tree.IterateNodesPrefix().OfType<NumberTreeNode>()) {
    270274          double f = Math.Exp(NormalDistributedRandom.NextDouble(rand, 0, 1));
    271275          double s = rand.NextDouble() < 0.5 ? -1 : 1;
     
    275279      var interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter();
    276280
    277       SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, tree, problemData, problemData.TrainingIndices,
     281      SymbolicRegressionParameterOptimizationEvaluator.OptimizeParameters(interpreter, tree, problemData, problemData.TrainingIndices,
    278282        applyLinearScaling: applyLinearScaling, maxIterations: maxIterations,
    279         updateVariableWeights: false, updateConstantsInTree: true);
     283        updateVariableWeights: false, updateParametersInTree: true);
    280284
    281285      var model = new SymbolicRegressionModel(problemData.TargetVariable, tree, (ISymbolicDataAnalysisExpressionTreeInterpreter)interpreter.Clone());
  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs

    r17931 r18180  
    220220      }
    221221
    222       var constSy = new Constant();
     222      var numSy = new Number();
    223223      var varCondSy = new VariableCondition() { IgnoreSlope = true };
    224224
    225       var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, constSy, varCondSy);
     225      var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, numSy, varCondSy);
    226226
    227227      var startNode = new StartSymbol().CreateTreeNode();
     
    232232    }
    233233
    234     private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Constant constSy, VariableCondition varCondSy) {
     234    private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Number numSy, VariableCondition varCondSy) {
    235235
    236236      // alglib source for evaluation of one tree (dfprocessinternal)
     
    262262
    263263      if ((double)(trees[k]) == (double)(-1)) {
    264         var constNode = (ConstantTreeNode)constSy.CreateTreeNode();
    265         constNode.Value = trees[k + 1];
    266         return constNode;
     264        var numNode = (NumberTreeNode)numSy.CreateTreeNode();
     265        numNode.Value = trees[k + 1];
     266        return numNode;
    267267      } else {
    268268        var condNode = (VariableConditionTreeNode)varCondSy.CreateTreeNode();
     
    271271        condNode.Slope = double.PositiveInfinity;
    272272
    273         var left = CreateRegressionTreeRec(trees, offset, k + 3, constSy, varCondSy);
    274         var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), constSy, varCondSy);
     273        var left = CreateRegressionTreeRec(trees, offset, k + 3, numSy, varCondSy);
     274        var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), numSy, varCondSy);
    275275
    276276        condNode.AddSubtree(left); // not 100% correct because interpreter uses: if(x <= thres) left() else right() and RF uses if(x < thres) left() else right() (see above)
  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModelAlglib_3_7.cs

    r17931 r18180  
    220220      }
    221221
    222       var constSy = new Constant();
     222      var numSy = new Number();
    223223      var varCondSy = new VariableCondition() { IgnoreSlope = true };
    224224
    225       var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, constSy, varCondSy);
     225      var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, numSy, varCondSy);
    226226
    227227      var startNode = new StartSymbol().CreateTreeNode();
     
    232232    }
    233233
    234     private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Constant constSy, VariableCondition varCondSy) {
     234    private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Number numSy, VariableCondition varCondSy) {
    235235
    236236      // alglib source for evaluation of one tree (dfprocessinternal)
     
    261261      // }
    262262
    263       if ((double)(trees[k]) == (double)(-1)) {
    264         var constNode = (ConstantTreeNode)constSy.CreateTreeNode();
    265         constNode.Value = trees[k + 1];
    266         return constNode;
     263      if (trees[k] == -1) {
     264        var numNode = (NumberTreeNode)numSy.CreateTreeNode();
     265        numNode.Value = trees[k + 1];
     266        return numNode;
    267267      } else {
    268268        var condNode = (VariableConditionTreeNode)varCondSy.CreateTreeNode();
     
    271271        condNode.Slope = double.PositiveInfinity;
    272272
    273         var left = CreateRegressionTreeRec(trees, offset, k + 3, constSy, varCondSy);
    274         var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), constSy, varCondSy);
     273        var left = CreateRegressionTreeRec(trees, offset, k + 3, numSy, varCondSy);
     274        var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), numSy, varCondSy);
    275275
    276276        condNode.AddSubtree(left); // not 100% correct because interpreter uses: if(x <= thres) left() else right() and RF uses if(x < thres) left() else right() (see above)
  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModelFull.cs

    r17931 r18180  
    199199      }
    200200
    201       var constSy = new Constant();
     201      var numSy = new Number();
    202202      var varCondSy = new VariableCondition() { IgnoreSlope = true };
    203203
    204       var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, constSy, varCondSy);
     204      var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, numSy, varCondSy);
    205205
    206206      var startNode = new StartSymbol().CreateTreeNode();
     
    211211    }
    212212
    213     private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Constant constSy, VariableCondition varCondSy) {
     213    private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Number numSy, VariableCondition varCondSy) {
    214214
    215215      // alglib source for evaluation of one tree (dfprocessinternal)
     
    240240      // }
    241241
    242       if ((double)(trees[k]) == (double)(-1)) {
    243         var constNode = (ConstantTreeNode)constSy.CreateTreeNode();
    244         constNode.Value = trees[k + 1];
    245         return constNode;
     242      if (trees[k] == -1) {
     243        var numNode = (NumberTreeNode)numSy.CreateTreeNode();
     244        numNode.Value = trees[k + 1];
     245        return numNode;
    246246      } else {
    247247        var condNode = (VariableConditionTreeNode)varCondSy.CreateTreeNode();
     
    250250        condNode.Slope = double.PositiveInfinity;
    251251
    252         var left = CreateRegressionTreeRec(trees, offset, k + 3, constSy, varCondSy);
    253         var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), constSy, varCondSy);
     252        var left = CreateRegressionTreeRec(trees, offset, k + 3, numSy, varCondSy);
     253        var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), numSy, varCondSy);
    254254
    255255        condNode.AddSubtree(left); // not 100% correct because interpreter uses: if(x <= thres) left() else right() and RF uses if(x < thres) left() else right() (see above)
  • branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/TimeSeries/AutoregressiveModeling.cs

    r17180 r18180  
    104104      int nRows = inputMatrix.GetLength(0);
    105105      int nFeatures = inputMatrix.GetLength(1) - 1;
    106       double[] coefficients = new double[nFeatures + 1]; // last coefficient is for the constant
     106      double[] coefficients = new double[nFeatures + 1]; // last coefficient is for the offset
    107107
    108108      int retVal = 1;
Note: See TracChangeset for help on using the changeset viewer.