Changeset 18180 for branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis
- Timestamp:
- 01/10/22 10:15:25 (3 years ago)
- Location:
- branches/3138_Shape_Constraints_Transformations
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/3138_Shape_Constraints_Transformations
- Property svn:mergeinfo changed
-
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
/branches/3140_NumberSymbol/HeuristicLab.Algorithms.DataAnalysis (added) merged: 18100,18113-18114,18116 /trunk/HeuristicLab.Algorithms.DataAnalysis (added) merged: 18132
- Property svn:mergeinfo changed
-
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4
- Property svn:mergeinfo changed
/branches/3140_NumberSymbol/HeuristicLab.Algorithms.DataAnalysis/3.4 (added) merged: 18100,18113-18114,18116 /trunk/HeuristicLab.Algorithms.DataAnalysis/3.4 (added) merged: 18132
- Property svn:mergeinfo changed
-
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/GBM/GradientBoostingRegressionAlgorithm.cs
r17180 r18180 417 417 var addNode = new Addition().CreateTreeNode(); 418 418 var mulNode = new Multiplication().CreateTreeNode(); 419 var scaleNode = ( ConstantTreeNode)new Constant().CreateTreeNode(); // all models are scaled using the same nu419 var scaleNode = (NumberTreeNode)new Number().CreateTreeNode(); // all models are scaled using the same nu 420 420 scaleNode.Value = nu; 421 421 -
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessCovarianceOptimizationProblem.cs
r17180 r18180 97 97 98 98 private const string ProblemDataParameterName = "ProblemData"; 99 private const string ConstantOptIterationsParameterName = "Constantoptimization steps";99 private const string ParameterOptIterationsParameterName = "Parameter optimization steps"; 100 100 private const string RestartsParameterName = "Restarts"; 101 101 … … 108 108 get { return (IValueParameter<IRegressionProblemData>)Parameters[ProblemDataParameterName]; } 109 109 } 110 public IFixedValueParameter<IntValue> ConstantOptIterationsParameter {111 get { return (IFixedValueParameter<IntValue>)Parameters[ ConstantOptIterationsParameterName]; }110 public IFixedValueParameter<IntValue> ParameterOptIterationsParameter { 111 get { return (IFixedValueParameter<IntValue>)Parameters[ParameterOptIterationsParameterName]; } 112 112 } 113 113 public IFixedValueParameter<IntValue> RestartsParameter { … … 124 124 IDataAnalysisProblemData IDataAnalysisProblem.ProblemData { get { return ProblemData; } } 125 125 126 public int ConstantOptIterations {127 get { return ConstantOptIterationsParameter.Value.Value; }128 set { ConstantOptIterationsParameter.Value.Value = value; }126 public int ParameterOptIterations { 127 get { return ParameterOptIterationsParameter.Value.Value; } 128 set { ParameterOptIterationsParameter.Value.Value = value; } 129 129 } 130 130 … … 153 153 : base() { 154 154 Parameters.Add(new ValueParameter<IRegressionProblemData>(ProblemDataParameterName, "The data for the regression problem", new RegressionProblemData())); 155 Parameters.Add(new FixedValueParameter<IntValue>( ConstantOptIterationsParameterName, "Number of optimization steps for hyperparameter values", new IntValue(50)));156 Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of random restarts for constantoptimization.", new IntValue(10)));155 Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptIterationsParameterName, "Number of optimization steps for hyperparameter values", new IntValue(50))); 156 Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of random restarts for parameter optimization.", new IntValue(10))); 157 157 Parameters["Restarts"].Hidden = true; 158 158 var g = new SimpleSymbolicExpressionGrammar(); … … 236 236 double epsx = 0; 237 237 double stpmax = 1; 238 int maxits = ConstantOptIterations;238 int maxits = ParameterOptIterations; 239 239 alglib.mincgstate state; 240 240 alglib.mincgreport rep; … … 384 384 [StorableHook(HookType.AfterDeserialization)] 385 385 private void AfterDeserialization() { 386 if (!Parameters.ContainsKey(ParameterOptIterationsParameterName)) { 387 if (Parameters.ContainsKey("Constant optimization steps")) { 388 Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptIterationsParameterName, "Number of optimization steps for hyperparameter values", (IntValue)Parameters["Constant optimization steps"].ActualValue)); 389 Parameters.Remove("Constant optimization steps"); 390 } else { 391 Parameters.Add(new FixedValueParameter<IntValue>(ParameterOptIterationsParameterName, "Number of optimization steps for hyperparameter values", new IntValue(50))); 392 } 393 } 386 394 } 387 395 -
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeModel.cs
r17180 r18180 230 230 var startSy = new StartSymbol(); 231 231 var varCondSy = new VariableCondition() { IgnoreSlope = true }; 232 var constSy = new Constant();232 var numSy = new Number(); 233 233 234 234 var startNode = startSy.CreateTreeNode(); 235 startNode.AddSubtree(CreateSymbolicRegressionTreeRecursive(tree, 0, varCondSy, constSy));235 startNode.AddSubtree(CreateSymbolicRegressionTreeRecursive(tree, 0, varCondSy, numSy)); 236 236 var rootNode = rootSy.CreateTreeNode(); 237 237 rootNode.AddSubtree(startNode); … … 239 239 } 240 240 241 private ISymbolicExpressionTreeNode CreateSymbolicRegressionTreeRecursive(TreeNode[] treeNodes, int nodeIdx, VariableCondition varCondSy, Constant constSy) {241 private ISymbolicExpressionTreeNode CreateSymbolicRegressionTreeRecursive(TreeNode[] treeNodes, int nodeIdx, VariableCondition varCondSy, Number numSy) { 242 242 var curNode = treeNodes[nodeIdx]; 243 243 if (curNode.VarName == TreeNode.NO_VARIABLE) { 244 var node = ( ConstantTreeNode)constSy.CreateTreeNode();244 var node = (NumberTreeNode)numSy.CreateTreeNode(); 245 245 node.Value = curNode.Val; 246 246 return node; … … 250 250 node.Threshold = curNode.Val; 251 251 252 var left = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.LeftIdx, varCondSy, constSy);253 var right = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.RightIdx, varCondSy, constSy);252 var left = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.LeftIdx, varCondSy, numSy); 253 var right = CreateSymbolicRegressionTreeRecursive(treeNodes, curNode.RightIdx, varCondSy, numSy); 254 254 node.AddSubtree(left); 255 255 node.AddSubtree(right); -
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/NonlinearRegression/NonlinearRegression.cs
r17180 r18180 123 123 : base() { 124 124 Problem = new RegressionProblem(); 125 Parameters.Add(new FixedValueParameter<StringValue>(ModelStructureParameterName, "The function for which the parameters must be fit (only numeric constants are tuned).", new StringValue("1.0 * x*x + 0.0"))); 126 Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "The maximum number of iterations for constants optimization.", new IntValue(200))); 125 Parameters.Add(new FixedValueParameter<StringValue>(ModelStructureParameterName, 126 "The expression for which the <num> parameters should be fit.\n " + 127 "Defined constants will not be modified.\n " + 128 "Modifiable numbers are specified with <num>. To specify a default value within this number symbol, a default value can be declared by e.g. <num=1.0>.", 129 new StringValue("<num> * x*x + 0.0"))); 130 Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "The maximum number of iterations for parameter optimization.", new IntValue(200))); 127 131 Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of independent random restarts (>0)", new IntValue(10))); 128 132 Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The PRNG seed value.", new IntValue())); … … 210 214 211 215 /// <summary> 212 /// Fits a model to the data by optimizing the numeric constants.216 /// Fits a model to the data by optimizing parameters. 213 217 /// Model is specified as infix expression containing variable names and numbers. 214 /// The starting point for the numeric constants is initialized randomly if a random number generator is specified (~N(0,1)). Otherwise the user specified constants are218 /// The starting values for the parameters are initialized randomly if a random number generator is specified (~N(0,1)). Otherwise the user specified values are 215 219 /// used as a starting point. 216 220 /// </summary>- 217 221 /// <param name="problemData">Training and test data</param> 218 222 /// <param name="modelStructure">The function as infix expression</param> 219 /// <param name="maxIterations">Number of constant optimization iterations (using Levenberg-Marquardt algorithm)</param>220 /// <param name="random">Optional random number generator for random initialization of numeric constants.</param>223 /// <param name="maxIterations">Number of Levenberg-Marquardt iterations</param> 224 /// <param name="random">Optional random number generator for random initialization of parameters.</param> 221 225 /// <returns></returns> 222 226 public static ISymbolicRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData, string modelStructure, int maxIterations, bool applyLinearScaling, IRandom rand = null) { … … 263 267 } 264 268 265 if (!SymbolicRegression ConstantOptimizationEvaluator.CanOptimizeConstants(tree)) throw new ArgumentException("The optimizer does not support the specified model structure.");266 267 // initialize constants randomly269 if (!SymbolicRegressionParameterOptimizationEvaluator.CanOptimizeParameters(tree)) throw new ArgumentException("The optimizer does not support the specified model structure."); 270 271 // initialize parameters randomly 268 272 if (rand != null) { 269 foreach (var node in tree.IterateNodesPrefix().OfType< ConstantTreeNode>()) {273 foreach (var node in tree.IterateNodesPrefix().OfType<NumberTreeNode>()) { 270 274 double f = Math.Exp(NormalDistributedRandom.NextDouble(rand, 0, 1)); 271 275 double s = rand.NextDouble() < 0.5 ? -1 : 1; … … 275 279 var interpreter = new SymbolicDataAnalysisExpressionTreeLinearInterpreter(); 276 280 277 SymbolicRegression ConstantOptimizationEvaluator.OptimizeConstants(interpreter, tree, problemData, problemData.TrainingIndices,281 SymbolicRegressionParameterOptimizationEvaluator.OptimizeParameters(interpreter, tree, problemData, problemData.TrainingIndices, 278 282 applyLinearScaling: applyLinearScaling, maxIterations: maxIterations, 279 updateVariableWeights: false, update ConstantsInTree: true);283 updateVariableWeights: false, updateParametersInTree: true); 280 284 281 285 var model = new SymbolicRegressionModel(problemData.TargetVariable, tree, (ISymbolicDataAnalysisExpressionTreeInterpreter)interpreter.Clone()); -
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs
r17931 r18180 220 220 } 221 221 222 var constSy = new Constant();222 var numSy = new Number(); 223 223 var varCondSy = new VariableCondition() { IgnoreSlope = true }; 224 224 225 var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, constSy, varCondSy);225 var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, numSy, varCondSy); 226 226 227 227 var startNode = new StartSymbol().CreateTreeNode(); … … 232 232 } 233 233 234 private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Constant constSy, VariableCondition varCondSy) {234 private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Number numSy, VariableCondition varCondSy) { 235 235 236 236 // alglib source for evaluation of one tree (dfprocessinternal) … … 262 262 263 263 if ((double)(trees[k]) == (double)(-1)) { 264 var constNode = (ConstantTreeNode)constSy.CreateTreeNode();265 constNode.Value = trees[k + 1];266 return constNode;264 var numNode = (NumberTreeNode)numSy.CreateTreeNode(); 265 numNode.Value = trees[k + 1]; 266 return numNode; 267 267 } else { 268 268 var condNode = (VariableConditionTreeNode)varCondSy.CreateTreeNode(); … … 271 271 condNode.Slope = double.PositiveInfinity; 272 272 273 var left = CreateRegressionTreeRec(trees, offset, k + 3, constSy, varCondSy);274 var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), constSy, varCondSy);273 var left = CreateRegressionTreeRec(trees, offset, k + 3, numSy, varCondSy); 274 var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), numSy, varCondSy); 275 275 276 276 condNode.AddSubtree(left); // not 100% correct because interpreter uses: if(x <= thres) left() else right() and RF uses if(x < thres) left() else right() (see above) -
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModelAlglib_3_7.cs
r17931 r18180 220 220 } 221 221 222 var constSy = new Constant();222 var numSy = new Number(); 223 223 var varCondSy = new VariableCondition() { IgnoreSlope = true }; 224 224 225 var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, constSy, varCondSy);225 var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, numSy, varCondSy); 226 226 227 227 var startNode = new StartSymbol().CreateTreeNode(); … … 232 232 } 233 233 234 private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Constant constSy, VariableCondition varCondSy) {234 private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Number numSy, VariableCondition varCondSy) { 235 235 236 236 // alglib source for evaluation of one tree (dfprocessinternal) … … 261 261 // } 262 262 263 if ( (double)(trees[k]) == (double)(-1)) {264 var constNode = (ConstantTreeNode)constSy.CreateTreeNode();265 constNode.Value = trees[k + 1];266 return constNode;263 if (trees[k] == -1) { 264 var numNode = (NumberTreeNode)numSy.CreateTreeNode(); 265 numNode.Value = trees[k + 1]; 266 return numNode; 267 267 } else { 268 268 var condNode = (VariableConditionTreeNode)varCondSy.CreateTreeNode(); … … 271 271 condNode.Slope = double.PositiveInfinity; 272 272 273 var left = CreateRegressionTreeRec(trees, offset, k + 3, constSy, varCondSy);274 var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), constSy, varCondSy);273 var left = CreateRegressionTreeRec(trees, offset, k + 3, numSy, varCondSy); 274 var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), numSy, varCondSy); 275 275 276 276 condNode.AddSubtree(left); // not 100% correct because interpreter uses: if(x <= thres) left() else right() and RF uses if(x < thres) left() else right() (see above) -
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModelFull.cs
r17931 r18180 199 199 } 200 200 201 var constSy = new Constant();201 var numSy = new Number(); 202 202 var varCondSy = new VariableCondition() { IgnoreSlope = true }; 203 203 204 var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, constSy, varCondSy);204 var node = CreateRegressionTreeRec(rf.innerobj.trees, offset, offset + 1, numSy, varCondSy); 205 205 206 206 var startNode = new StartSymbol().CreateTreeNode(); … … 211 211 } 212 212 213 private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Constant constSy, VariableCondition varCondSy) {213 private ISymbolicExpressionTreeNode CreateRegressionTreeRec(double[] trees, int offset, int k, Number numSy, VariableCondition varCondSy) { 214 214 215 215 // alglib source for evaluation of one tree (dfprocessinternal) … … 240 240 // } 241 241 242 if ( (double)(trees[k]) == (double)(-1)) {243 var constNode = (ConstantTreeNode)constSy.CreateTreeNode();244 constNode.Value = trees[k + 1];245 return constNode;242 if (trees[k] == -1) { 243 var numNode = (NumberTreeNode)numSy.CreateTreeNode(); 244 numNode.Value = trees[k + 1]; 245 return numNode; 246 246 } else { 247 247 var condNode = (VariableConditionTreeNode)varCondSy.CreateTreeNode(); … … 250 250 condNode.Slope = double.PositiveInfinity; 251 251 252 var left = CreateRegressionTreeRec(trees, offset, k + 3, constSy, varCondSy);253 var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), constSy, varCondSy);252 var left = CreateRegressionTreeRec(trees, offset, k + 3, numSy, varCondSy); 253 var right = CreateRegressionTreeRec(trees, offset, offset + (int)Math.Round(trees[k + 2]), numSy, varCondSy); 254 254 255 255 condNode.AddSubtree(left); // not 100% correct because interpreter uses: if(x <= thres) left() else right() and RF uses if(x < thres) left() else right() (see above) -
branches/3138_Shape_Constraints_Transformations/HeuristicLab.Algorithms.DataAnalysis/3.4/TimeSeries/AutoregressiveModeling.cs
r17180 r18180 104 104 int nRows = inputMatrix.GetLength(0); 105 105 int nFeatures = inputMatrix.GetLength(1) - 1; 106 double[] coefficients = new double[nFeatures + 1]; // last coefficient is for the constant106 double[] coefficients = new double[nFeatures + 1]; // last coefficient is for the offset 107 107 108 108 int retVal = 1;
Note: See TracChangeset
for help on using the changeset viewer.