using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Operators; using HeuristicLab.Optimization; using HeuristicLab.Parameters; using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; namespace HeuristicLab.Problems.MetaOptimization { [Item("PMOEvaluator", "An operator which represents the main loop of a genetic algorithm.")] [StorableClass] public class PMOEvaluator : AlgorithmOperator, IParameterConfigurationEvaluator { #region Parameter properties public ILookupParameter RandomParameter { get { return (LookupParameter)Parameters["Random"]; } } public ILookupParameter QualityParameter { get { return (ILookupParameter)Parameters["Quality"]; } } public ILookupParameter AlgorithmTypeParameter { get { return (ILookupParameter)Parameters[MetaOptimizationProblem.AlgorithmTypeParameterName]; } } public ILookupParameter> ProblemsParameter { get { return (ILookupParameter>)Parameters[MetaOptimizationProblem.ProblemsParameterName]; } } public ILookupParameter ParameterConfigurationParameter { get { return (ILookupParameter)Parameters["ParameterConfigurationTree"]; } } public LookupParameter RepetitionsParameter { get { return (LookupParameter)Parameters[MetaOptimizationProblem.RepetitionsParameterName]; } } public LookupParameter GenerationsParameter { get { return (LookupParameter)Parameters["Generations"]; } } public LookupParameter ResultsParameter { get { return (LookupParameter)Parameters["Results"]; } } private ScopeParameter CurrentScopeParameter { get { return (ScopeParameter)Parameters["CurrentScope"]; } } public IScope CurrentScope { get { return CurrentScopeParameter.ActualValue; } } #endregion [StorableConstructor] protected PMOEvaluator(bool deserializing) : base(deserializing) { } public PMOEvaluator() { Initialize(); } protected PMOEvaluator(PMOEvaluator original, Cloner cloner) : base(original, cloner) { } public override IDeepCloneable Clone(Cloner cloner) { return new PMOEvaluator(this, cloner); } private void Initialize() { #region Create parameters Parameters.Add(new LookupParameter("Random", "The pseudo random number generator which should be used to initialize the new random permutation.")); Parameters.Add(new LookupParameter("Quality", "The evaluated quality of the ParameterVector.")); Parameters.Add(new LookupParameter(MetaOptimizationProblem.AlgorithmTypeParameterName, "")); Parameters.Add(new LookupParameter>(MetaOptimizationProblem.ProblemsParameterName, "")); Parameters.Add(new LookupParameter("ParameterConfigurationTree", "")); Parameters.Add(new LookupParameter(MetaOptimizationProblem.RepetitionsParameterName, "Number of evaluations on one problem.")); Parameters.Add(new LookupParameter("Generations", "")); Parameters.Add(new ScopeParameter("CurrentScope", "The current scope which represents a population of solutions on which the genetic algorithm should be applied.")); #endregion var algorithmSubScopesCreator = new AlgorithmSubScopesCreator(); var uniformSubScopesProcessor = new UniformSubScopesProcessor(); var algorithmEvaluator = new AlgorithmEvaluator(); var algorithmRunsAnalyzer = new AlgorithmRunsAnalyzer(); uniformSubScopesProcessor.Parallel.Value = true; this.OperatorGraph.InitialOperator = algorithmSubScopesCreator; algorithmSubScopesCreator.Successor = uniformSubScopesProcessor; uniformSubScopesProcessor.Operator = algorithmEvaluator; uniformSubScopesProcessor.Successor = algorithmRunsAnalyzer; algorithmRunsAnalyzer.Successor = null; } [StorableHook(HookType.AfterDeserialization)] private void AfterDeserialization() { ///// only for debug reasons - remove later (set this in stored algs) ((UniformSubScopesProcessor)((AlgorithmSubScopesCreator)this.OperatorGraph.InitialOperator).Successor).Parallel.Value = true; } } }