Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/27/12 10:39:03 (12 years ago)
Author:
jkarder
Message:

#1853:

  • added problem instance selection to CreateExperimentDialog
  • adopted experiment creation
  • minor code improvements
File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/ParameterConfigurationEncoding/HeuristicLab.Encodings.ParameterConfigurationEncoding/3.3/ParameterConfigurationTree.cs

    r8517 r8524  
    3131using HeuristicLab.Optimization;
    3232using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     33using HeuristicLab.Problems.Instances;
    3334
    3435namespace HeuristicLab.Encodings.ParameterConfigurationEncoding {
    35   // todo: storable, name, descr, ...
     36  [Item("ParameterConfigurationTree", "Represents a parameter configuration.")]
    3637  [StorableClass]
    3738  public class ParameterConfigurationTree : ParameterizedValueConfiguration, IEnumerable {
    38 
    3939    [Storable]
    4040    private DoubleValue quality;
     
    180180    }
    181181
    182     #region constructors and cloning
    183     public ParameterConfigurationTree(IAlgorithm algorithm, IProblem problem)
    184       : base(null, algorithm.GetType(), false) {
    185       this.Optimize = false;
    186       this.IsOptimizable = false;
    187       this.parameters = new Dictionary<string, IItem>();
    188       this.Name = algorithm.ItemName;
    189 
    190       var algproblemitem = new AlgorithmProblemItem();
    191       algproblemitem.AlgorithmParameter.Value = algorithm;
    192       algproblemitem.ProblemParameter.Value = problem;
    193       this.discoverValidValues = false;
    194 
    195       this.parameterConfigurations.Add(new SingleValuedParameterConfiguration("Algorithm", algproblemitem.AlgorithmParameter));
    196       this.parameterConfigurations.Add(new SingleValuedParameterConfiguration("Problem", algproblemitem.ProblemParameter));
    197 
    198       // problems can be modified in the list of problem instances, so the parameters which are not Optimize=true,
    199       // must not be modifiable in the parameter configuration tree. otherwise the parameter values would be ambiguous
    200       ProblemConfiguration.ValuesReadOnly = true;
    201     }
    202     public ParameterConfigurationTree() { }
     182    #region Constructors and Cloning
    203183    [StorableConstructor]
    204184    protected ParameterConfigurationTree(bool deserializing) : base(deserializing) { }
     
    225205      }
    226206    }
     207    public ParameterConfigurationTree() : base() { }
     208    public ParameterConfigurationTree(IAlgorithm algorithm, IProblem problem)
     209      : base(null, algorithm.GetType(), false) {
     210      this.Optimize = false;
     211      this.IsOptimizable = false;
     212      this.parameters = new Dictionary<string, IItem>();
     213      this.Name = algorithm.ItemName;
     214
     215      var algproblemitem = new AlgorithmProblemItem();
     216      algproblemitem.AlgorithmParameter.Value = algorithm;
     217      algproblemitem.ProblemParameter.Value = problem;
     218      this.discoverValidValues = false;
     219
     220      this.parameterConfigurations.Add(new SingleValuedParameterConfiguration("Algorithm", algproblemitem.AlgorithmParameter));
     221      this.parameterConfigurations.Add(new SingleValuedParameterConfiguration("Problem", algproblemitem.ProblemParameter));
     222
     223      // problems can be modified in the list of problem instances, so the parameters which are not Optimize=true,
     224      // must not be modifiable in the parameter configuration tree. otherwise the parameter values would be ambiguous
     225      ProblemConfiguration.ValuesReadOnly = true;
     226    }
     227
    227228    public override IDeepCloneable Clone(Cloner cloner) {
    228229      return new ParameterConfigurationTree(this, cloner);
    229230    }
     231
    230232    [StorableHook(HookType.AfterDeserialization)]
    231233    private void AfterDeserialization() {
     
    274276    public override void Parameterize(IParameterizedItem item) {
    275277      this.parameters.Clear();
    276       var algorithm = item as IAlgorithm;
     278      var algorithm = (IAlgorithm)item;
    277279      var problem = algorithm.Problem;
    278280
     
    283285    }
    284286
    285     public Experiment GenerateExperiment(IAlgorithm algorithm, bool createBatchRuns, int repetitions, CancellationToken ct) {
     287    public Experiment GenerateExperiment(IAlgorithm algorithm, bool createBatchRuns, int repetitions, Dictionary<IProblemInstanceProvider, HashSet<IDataDescriptor>> problemInstances, CancellationToken ct) {
    286288      Experiment experiment = new Experiment();
     289      var algorithms = new List<IAlgorithm>(1 + problemInstances.Values.Count) { (IAlgorithm)algorithm.Clone() };
     290      foreach (var provider in problemInstances) {
     291        foreach (var descriptor in provider.Value) {
     292          var alg = (IAlgorithm)algorithm.Clone();
     293          ProblemInstanceManager.LoadData(provider.Key, descriptor, (IProblemInstanceConsumer)alg.Problem);
     294          algorithms.Add(alg);
     295        }
     296      }
    287297      ExperimentGenerationProgress = 0;
    288       foreach (ParameterizedValueConfiguration combination in this) {
    289         ct.ThrowIfCancellationRequested();
    290         IAlgorithm clonedAlg = (IAlgorithm)algorithm.Clone();
    291         clonedAlg.Name = combination.ParameterInfoString;
    292         combination.Parameterize(clonedAlg);
    293         clonedAlg.StoreAlgorithmInEachRun = false;
    294         if (createBatchRuns) {
    295           BatchRun batchRun = new BatchRun(string.Format("BatchRun: {0}", combination.ParameterInfoString));
    296           batchRun.Optimizer = clonedAlg;
    297           batchRun.Repetitions = repetitions;
    298           experiment.Optimizers.Add(batchRun);
    299         } else {
    300           experiment.Optimizers.Add(clonedAlg);
    301         }
    302         ExperimentGenerationProgress = (double)experiment.Optimizers.Count / this.GetCombinationCount(0);
     298      foreach (var alg in algorithms) {
     299        foreach (ParameterizedValueConfiguration combination in this) {
     300          ct.ThrowIfCancellationRequested();
     301          var clonedAlg = (IAlgorithm)alg.Clone();
     302          clonedAlg.Name = combination.ParameterInfoString;
     303          combination.Parameterize(clonedAlg);
     304          clonedAlg.StoreAlgorithmInEachRun = false;
     305          if (createBatchRuns) {
     306            BatchRun batchRun = new BatchRun(string.Format("BatchRun: {0}", combination.ParameterInfoString));
     307            batchRun.Optimizer = clonedAlg;
     308            batchRun.Repetitions = repetitions;
     309            experiment.Optimizers.Add(batchRun);
     310          } else {
     311            experiment.Optimizers.Add(clonedAlg);
     312          }
     313          ExperimentGenerationProgress = (double)experiment.Optimizers.Count / (this.GetCombinationCount(0) * algorithms.Count);
     314        }
    303315      }
    304316      return experiment;
     
    306318
    307319    public Experiment GenerateExperiment(IAlgorithm algorithm) {
    308       return GenerateExperiment(algorithm, false, 0, CancellationToken.None);
     320      return GenerateExperiment(algorithm, false, 0, null, CancellationToken.None);
    309321    }
    310322
    311323    public Experiment GenerateExperiment(IAlgorithm algorithm, bool createBatchRuns, int repetitions) {
    312       return GenerateExperiment(algorithm, createBatchRuns, repetitions, CancellationToken.None);
     324      return GenerateExperiment(algorithm, createBatchRuns, repetitions, null, CancellationToken.None);
    313325    }
    314326
Note: See TracChangeset for help on using the changeset viewer.