Changeset 13670
- Timestamp:
- 03/09/16 12:05:50 (9 years ago)
- Location:
- trunk/sources
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Views/3.4/InteractiveSymbolicRegressionSolutionSimplifierView.cs
r12720 r13670 73 73 var model = Content.Model; 74 74 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(model.Interpreter, model.SymbolicExpressionTree, Content.ProblemData, Content.ProblemData.TrainingIndices, 75 applyLinearScaling: true, maxIterations: 50, up perEstimationLimit: model.UpperEstimationLimit, lowerEstimationLimit: model.LowerEstimationLimit);75 applyLinearScaling: true, maxIterations: 50, updateVariableWeights: true,lowerEstimationLimit: model.LowerEstimationLimit, upperEstimationLimit: model.UpperEstimationLimit); 76 76 UpdateModel(Content.Model.SymbolicExpressionTree); 77 77 } -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/PearsonRSquaredNestedTreeSizeEvaluator.cs
r13300 r13670 55 55 56 56 if (UseConstantOptimization) { 57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights,lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 58 58 } 59 59 -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/PearsonRSquaredNumberOfVariablesEvaluator.cs
r13300 r13670 55 55 56 56 if (UseConstantOptimization) { 57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);57 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 58 58 } 59 59 double[] qualities = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value, DecimalPlaces); -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/PearsonRSquaredTreeComplexityEvaluator.cs
r13300 r13670 54 54 55 55 if (UseConstantOptimization) { 56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 57 57 } 58 58 double[] qualities = Calculate(interpreter, solution, estimationLimits.Lower, estimationLimits.Upper, problemData, rows, applyLinearScaling, DecimalPlaces); -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/SymbolicRegressionMultiObjectiveEvaluator.cs
r13241 r13670 34 34 private const string ConstantOptimizationIterationsParameterName = "Constant optimization iterations"; 35 35 36 private const string ConstantOptimizationUpdateVariableWeightsParameterName = 37 "Constant optimization update variable weights"; 38 36 39 public IFixedValueParameter<IntValue> DecimalPlacesParameter { 37 40 get { return (IFixedValueParameter<IntValue>)Parameters[DecimalPlacesParameterName]; } … … 45 48 } 46 49 50 public IFixedValueParameter<BoolValue> ConstantOptimizationUpdateVariableWeightsParameter { 51 get { return (IFixedValueParameter<BoolValue>)Parameters[ConstantOptimizationUpdateVariableWeightsParameterName]; } 52 } 47 53 48 54 public int DecimalPlaces { … … 58 64 set { ConstantOptimizationIterationsParameter.Value.Value = value; } 59 65 } 66 public bool ConstantOptimizationUpdateVariableWeights { 67 get { return ConstantOptimizationUpdateVariableWeightsParameter.Value.Value; } 68 set { ConstantOptimizationUpdateVariableWeightsParameter.Value.Value = value; } 69 } 60 70 61 71 [StorableConstructor] … … 70 80 Parameters.Add(new FixedValueParameter<BoolValue>(UseConstantOptimizationParameterName, "", new BoolValue(false))); 71 81 Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "The number of iterations constant optimization should be applied.", new IntValue(5))); 82 Parameters.Add(new FixedValueParameter<BoolValue>(ConstantOptimizationUpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized during constant optimization.", new BoolValue(true))); 72 83 } 73 84 … … 83 94 Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "The number of iterations constant optimization should be applied.", new IntValue(5))); 84 95 } 96 if (!Parameters.ContainsKey(ConstantOptimizationUpdateVariableWeightsParameterName)) { 97 Parameters.Add(new FixedValueParameter<BoolValue>(ConstantOptimizationUpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized during constant optimization.", new BoolValue(true))); 98 } 85 99 } 86 100 } -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/SymbolicRegressionMultiObjectiveMeanSquaredErrorTreeSizeEvaluator.cs
r13241 r13670 54 54 55 55 if (UseConstantOptimization) { 56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 57 57 } 58 58 -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/MultiObjective/SymbolicRegressionMultiObjectivePearsonRSquaredTreeSizeEvaluator.cs
r13241 r13670 54 54 55 55 if (UseConstantOptimization) { 56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, estimationLimits.Upper, estimationLimits.Lower);56 SymbolicRegressionConstantOptimizationEvaluator.OptimizeConstants(interpreter, solution, problemData, rows, applyLinearScaling, ConstantOptimizationIterations, updateVariableWeights: ConstantOptimizationUpdateVariableWeights, lowerEstimationLimit: estimationLimits.Lower, upperEstimationLimit: estimationLimits.Upper); 57 57 } 58 58 double[] qualities = Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, rows, ApplyLinearScalingParameter.ActualValue.Value, DecimalPlaces); -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs
r13300 r13670 40 40 private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage"; 41 41 private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree"; 42 private const string UpdateVariableWeightsParameterName = "Update Variable Weights"; 42 43 43 44 public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter { … … 56 57 get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; } 57 58 } 59 public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter { 60 get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; } 61 } 62 58 63 59 64 public IntValue ConstantOptimizationIterations { … … 72 77 get { return UpdateConstantsInTreeParameter.Value.Value; } 73 78 set { UpdateConstantsInTreeParameter.Value.Value = value; } 79 } 80 81 public bool UpdateVariableWeights { 82 get { return UpdateVariableWeightsParameter.Value.Value; } 83 set { UpdateVariableWeightsParameter.Value.Value = value; } 74 84 } 75 85 … … 90 100 Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true)); 91 101 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true))); 102 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true))); 92 103 } 93 104 … … 100 111 if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName)) 101 112 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true))); 113 if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName)) 114 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true))); 102 115 } 103 116 … … 108 121 IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value); 109 122 quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue, 110 constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, 111 EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, UpdateConstantsInTree); 123 constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree); 112 124 113 125 if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) { … … 164 176 165 177 166 // TODO: swap positions of lowerEstimationLimit and upperEstimationLimit parameters 167 public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, 168 IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, bool updateConstantsInTree = true) { 178 public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, bool updateVariableWeights = true, double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, bool updateConstantsInTree = true) { 169 179 170 180 List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>(); … … 173 183 174 184 AutoDiff.Term func; 175 if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, out func))185 if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out func)) 176 186 throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree."); 177 187 if (variableNames.Count == 0) return 0.0; 178 188 179 AutoDiff.IParametricCompiledTerm compiledFunc = AutoDiff.TermUtils.Compile(func, variables.ToArray(), parameters.ToArray()); 180 181 List<SymbolicExpressionTreeTerminalNode> terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList(); 189 AutoDiff.IParametricCompiledTerm compiledFunc = func.Compile(variables.ToArray(), parameters.ToArray()); 190 191 List<SymbolicExpressionTreeTerminalNode> terminalNodes = null; 192 if (updateVariableWeights) 193 terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList(); 194 else 195 terminalNodes = new List<SymbolicExpressionTreeTerminalNode>(tree.Root.IterateNodesPrefix().OfType<ConstantTreeNode>()); 196 197 //extract inital constants 182 198 double[] c = new double[variables.Count]; 183 184 199 { 185 200 c[0] = 0.0; 186 201 c[1] = 1.0; 187 //extract inital constants188 202 int i = 2; 189 203 foreach (var node in terminalNodes) { … … 192 206 if (constantTreeNode != null) 193 207 c[i++] = constantTreeNode.Value; 194 else if ( variableTreeNode != null)208 else if (updateVariableWeights && variableTreeNode != null) 195 209 c[i++] = variableTreeNode.Weight; 196 210 } … … 235 249 236 250 //info == -7 => constant optimization failed due to wrong gradient 237 if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray() );251 if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights); 238 252 var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 239 253 240 if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray() );254 if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights); 241 255 if (originalQuality - quality > 0.001 || double.IsNaN(quality)) { 242 UpdateConstants(tree, originalConstants.Skip(2).ToArray() );256 UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights); 243 257 return originalQuality; 244 258 } … … 246 260 } 247 261 248 private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants ) {262 private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants, bool updateVariableWeights) { 249 263 int i = 0; 250 264 foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) { … … 253 267 if (constantTreeNode != null) 254 268 constantTreeNode.Value = constants[i++]; 255 else if ( variableTreeNode != null)269 else if (updateVariableWeights && variableTreeNode != null) 256 270 variableTreeNode.Weight = constants[i++]; 257 271 } … … 272 286 } 273 287 274 private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, out AutoDiff.Term term) {288 private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, bool updateVariableWeights, out AutoDiff.Term term) { 275 289 if (node.Symbol is Constant) { 276 290 var var = new AutoDiff.Variable(); … … 284 298 parameters.Add(par); 285 299 variableNames.Add(varNode.VariableName); 286 var w = new AutoDiff.Variable(); 287 variables.Add(w); 288 term = AutoDiff.TermBuilder.Product(w, par); 300 301 if (updateVariableWeights) { 302 var w = new AutoDiff.Variable(); 303 variables.Add(w); 304 term = AutoDiff.TermBuilder.Product(w, par); 305 } else { 306 term = par; 307 } 289 308 return true; 290 309 } … … 293 312 foreach (var subTree in node.Subtrees) { 294 313 AutoDiff.Term t; 295 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out t)) {314 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out t)) { 296 315 term = null; 297 316 return false; … … 306 325 for (int i = 0; i < node.SubtreeCount; i++) { 307 326 AutoDiff.Term t; 308 if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) {327 if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, updateVariableWeights, out t)) { 309 328 term = null; 310 329 return false; … … 318 337 if (node.Symbol is Multiplication) { 319 338 AutoDiff.Term a, b; 320 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||321 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {339 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out a) || 340 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, updateVariableWeights, out b)) { 322 341 term = null; 323 342 return false; … … 326 345 foreach (var subTree in node.Subtrees.Skip(2)) { 327 346 AutoDiff.Term f; 328 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {347 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out f)) { 329 348 term = null; 330 349 return false; … … 339 358 // only works for at least two subtrees 340 359 AutoDiff.Term a, b; 341 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||342 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {360 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out a) || 361 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, updateVariableWeights, out b)) { 343 362 term = null; 344 363 return false; … … 347 366 foreach (var subTree in node.Subtrees.Skip(2)) { 348 367 AutoDiff.Term f; 349 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {368 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, updateVariableWeights, out f)) { 350 369 term = null; 351 370 return false; … … 359 378 if (node.Symbol is Logarithm) { 360 379 AutoDiff.Term t; 361 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {380 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 362 381 term = null; 363 382 return false; … … 369 388 if (node.Symbol is Exponential) { 370 389 AutoDiff.Term t; 371 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {390 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 372 391 term = null; 373 392 return false; … … 379 398 if (node.Symbol is Square) { 380 399 AutoDiff.Term t; 381 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {400 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 382 401 term = null; 383 402 return false; … … 388 407 } if (node.Symbol is SquareRoot) { 389 408 AutoDiff.Term t; 390 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {409 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 391 410 term = null; 392 411 return false; … … 397 416 } if (node.Symbol is Sine) { 398 417 AutoDiff.Term t; 399 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {418 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 400 419 term = null; 401 420 return false; … … 406 425 } if (node.Symbol is Cosine) { 407 426 AutoDiff.Term t; 408 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {427 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 409 428 term = null; 410 429 return false; … … 415 434 } if (node.Symbol is Tangent) { 416 435 AutoDiff.Term t; 417 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {436 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 418 437 term = null; 419 438 return false; … … 424 443 } if (node.Symbol is Erf) { 425 444 AutoDiff.Term t; 426 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {445 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 427 446 term = null; 428 447 return false; … … 433 452 } if (node.Symbol is Norm) { 434 453 AutoDiff.Term t; 435 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {454 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out t)) { 436 455 term = null; 437 456 return false; … … 447 466 variables.Add(alpha); 448 467 AutoDiff.Term branchTerm; 449 if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out branchTerm)) {468 if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, updateVariableWeights, out branchTerm)) { 450 469 term = branchTerm * alpha + beta; 451 470 return true; -
trunk/sources/HeuristicLab.Problems.GrammaticalEvolution/3.4/ArtificialAnt/GEArtificialAntProblem.cs
r13243 r13670 39 39 [StorableClass] 40 40 public sealed class GEArtificialAntProblem : SingleObjectiveBasicProblem<IntegerVectorEncoding>, IStorableContent { 41 public string Filename { get; set; }42 41 43 42 #region Parameter Properties
Note: See TracChangeset
for help on using the changeset viewer.