#region License Information
/* HeuristicLab
* Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Encodings.ParameterConfigurationEncoding;
using HeuristicLab.Operators;
using HeuristicLab.Optimization;
using HeuristicLab.Parameters;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
namespace HeuristicLab.Problems.MetaOptimization {
///
/// TODO
///
[Item("PMOEvaluator", "TODO")]
[StorableClass]
public class PMOEvaluator : AlgorithmOperator, IParameterConfigurationEvaluator {
#region Parameter properties
public ILookupParameter RandomParameter {
get { return (LookupParameter)Parameters["Random"]; }
}
public ILookupParameter QualityParameter {
get { return (ILookupParameter)Parameters["Quality"]; }
}
public ILookupParameter AlgorithmTypeParameter {
get { return (ILookupParameter)Parameters[MetaOptimizationProblem.AlgorithmTypeParameterName]; }
}
public ILookupParameter> ProblemsParameter {
get { return (ILookupParameter>)Parameters[MetaOptimizationProblem.ProblemsParameterName]; }
}
public ILookupParameter ParameterConfigurationParameter {
get { return (ILookupParameter)Parameters["ParameterConfigurationTree"]; }
}
public LookupParameter RepetitionsParameter {
get { return (LookupParameter)Parameters[MetaOptimizationProblem.RepetitionsParameterName]; }
}
public LookupParameter GenerationsParameter {
get { return (LookupParameter)Parameters["Generations"]; }
}
public LookupParameter ResultsParameter {
get { return (LookupParameter)Parameters["Results"]; }
}
private ScopeParameter CurrentScopeParameter {
get { return (ScopeParameter)Parameters["CurrentScope"]; }
}
public IScope CurrentScope {
get { return CurrentScopeParameter.ActualValue; }
}
#endregion
#region Constructors and Cloning
public PMOEvaluator()
: base() {
Initialize();
}
[StorableConstructor]
protected PMOEvaluator(bool deserializing) : base(deserializing) { }
protected PMOEvaluator(PMOEvaluator original, Cloner cloner) : base(original, cloner) { }
public override IDeepCloneable Clone(Cloner cloner) {
return new PMOEvaluator(this, cloner);
}
private void Initialize() {
#region Create parameters
Parameters.Add(new LookupParameter("Random", "The pseudo random number generator which should be used to initialize the new random permutation."));
Parameters.Add(new LookupParameter("Quality", "The evaluated quality of the ParameterVector."));
Parameters.Add(new LookupParameter(MetaOptimizationProblem.AlgorithmTypeParameterName, ""));
Parameters.Add(new LookupParameter>(MetaOptimizationProblem.ProblemsParameterName, ""));
Parameters.Add(new LookupParameter("ParameterConfigurationTree", ""));
Parameters.Add(new LookupParameter(MetaOptimizationProblem.RepetitionsParameterName, "Number of evaluations on one problem."));
Parameters.Add(new LookupParameter("Generations", ""));
Parameters.Add(new ScopeParameter("CurrentScope", "The current scope which represents a population of solutions on which the genetic algorithm should be applied."));
#endregion
var algorithmSubScopesCreator = new AlgorithmSubScopesCreator();
var uniformSubScopesProcessor = new UniformSubScopesProcessor();
var algorithmEvaluator = new AlgorithmEvaluator();
var algorithmRunsAnalyzer = new AlgorithmRunsAnalyzer();
uniformSubScopesProcessor.Parallel.Value = true;
this.OperatorGraph.InitialOperator = algorithmSubScopesCreator;
algorithmSubScopesCreator.Successor = uniformSubScopesProcessor;
uniformSubScopesProcessor.Operator = algorithmEvaluator;
uniformSubScopesProcessor.Successor = algorithmRunsAnalyzer;
algorithmRunsAnalyzer.Successor = null;
}
[StorableHook(HookType.AfterDeserialization)]
private void AfterDeserialization() {
///// TODO: only for debug reasons - remove later (set this in stored algs)
((UniformSubScopesProcessor)((AlgorithmSubScopesCreator)this.OperatorGraph.InitialOperator).Successor).Parallel.Value = true;
}
#endregion
}
}