#region License Information /* HeuristicLab * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL) * * This file is part of HeuristicLab. * * HeuristicLab is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * HeuristicLab is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HeuristicLab. If not, see . */ #endregion using System; using System.Linq; using HeuristicLab.Analysis; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Encodings.ConditionActionEncoding; using HeuristicLab.Operators; using HeuristicLab.Optimization; using HeuristicLab.Optimization.Operators; using HeuristicLab.Parameters; using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; using HeuristicLab.Random; namespace HeuristicLab.Algorithms.LearningClassifierSystems { /// /// A learning classifier system. /// [Item("Learning Classifier System", "A genetic algorithm.")] [Creatable("Algorithms")] [StorableClass] public sealed class LearningClassifierSystem : HeuristicOptimizationEngineAlgorithm, IStorableContent { public string Filename { get; set; } #region Problem Properties public override Type ProblemType { get { return typeof(IConditionActionProblem); } } public new IConditionActionProblem Problem { get { return (IConditionActionProblem)base.Problem; } set { base.Problem = value; } } #endregion #region Parameter Properties private ValueParameter SeedParameter { get { return (ValueParameter)Parameters["Seed"]; } } private ValueParameter SetSeedRandomlyParameter { get { return (ValueParameter)Parameters["SetSeedRandomly"]; } } private ValueParameter CreateInitialPopulationParameter { get { return (ValueParameter)Parameters["CreateInitialPopulation"]; } } private ValueParameter PopulationSizeParameter { get { return (ValueParameter)Parameters["N"]; } } private ValueParameter BetaParameter { get { return (ValueParameter)Parameters["Beta"]; } } private ValueParameter AlphaParameter { get { return (ValueParameter)Parameters["Alpha"]; } } private ValueParameter ErrorZeroParameter { get { return (ValueParameter)Parameters["ErrorZero"]; } } private ValueParameter PowerParameter { get { return (ValueParameter)Parameters["v"]; } } private ValueParameter GammaParameter { get { return (ValueParameter)Parameters["Gamma"]; } } private ValueParameter CrossoverProbabilityParameter { get { return (ValueParameter)Parameters["CrossoverProbability"]; } } private ValueParameter MutationProbabilityParameter { get { return (ValueParameter)Parameters["MutationProbability"]; } } private ValueParameter ThetaGAParameter { get { return (ValueParameter)Parameters["ThetaGA"]; } } private ValueParameter ThetaDeletionParameter { get { return (ValueParameter)Parameters["ThetaDeletion"]; } } private ValueParameter ThetaSubsumptionParameter { get { return (ValueParameter)Parameters["ThetaSubsumption"]; } } private ValueParameter DeltaParameter { get { return (ValueParameter)Parameters["Delta"]; } } private ValueParameter ExplorationProbabilityParameter { get { return (ValueParameter)Parameters["ExplorationProbability"]; } } private ValueParameter DoGASubsumptionParameter { get { return (ValueParameter)Parameters["DoGASubsumption"]; } } private ValueParameter DoActionSetSubsumptionParameter { get { return (ValueParameter)Parameters["DoActionSetSubsumption"]; } } private ValueParameter AnalyzerParameter { get { return (ValueParameter)Parameters["Analyzer"]; } } private ValueParameter FinalAnalyzerParameter { get { return (ValueParameter)Parameters["FinalAnalyzer"]; } } private ValueParameter MaxIterationsParameter { get { return (ValueParameter)Parameters["MaxIterations"]; } } #endregion #region Properties public IntValue Seed { get { return SeedParameter.Value; } set { SeedParameter.Value = value; } } public BoolValue SetSeedRandomly { get { return SetSeedRandomlyParameter.Value; } set { SetSeedRandomlyParameter.Value = value; } } public BoolValue CreateInitialPopulation { get { return CreateInitialPopulationParameter.Value; } set { CreateInitialPopulationParameter.Value = value; } } public IntValue PopulationSize { get { return PopulationSizeParameter.Value; } set { PopulationSizeParameter.Value = value; } } public PercentValue Beta { get { return BetaParameter.Value; } set { BetaParameter.Value = value; } } public PercentValue Alpha { get { return AlphaParameter.Value; } set { AlphaParameter.Value = value; } } public DoubleValue ErrorZero { get { return ErrorZeroParameter.Value; } set { ErrorZeroParameter.Value = value; } } public DoubleValue Power { get { return PowerParameter.Value; } set { PowerParameter.Value = value; } } public PercentValue Gamma { get { return GammaParameter.Value; } set { GammaParameter.Value = value; } } public PercentValue CrossoverProbability { get { return CrossoverProbabilityParameter.Value; } set { CrossoverProbabilityParameter.Value = value; } } public PercentValue MutationProbability { get { return MutationProbabilityParameter.Value; } set { MutationProbabilityParameter.Value = value; } } public IntValue ThetaGA { get { return ThetaGAParameter.Value; } set { ThetaGAParameter.Value = value; } } public IntValue ThetaDeletion { get { return ThetaDeletionParameter.Value; } set { ThetaDeletionParameter.Value = value; } } public IntValue ThetaSubsumption { get { return ThetaSubsumptionParameter.Value; } set { ThetaSubsumptionParameter.Value = value; } } public PercentValue Delta { get { return DeltaParameter.Value; } set { DeltaParameter.Value = value; } } public PercentValue ExplorationProbability { get { return ExplorationProbabilityParameter.Value; } set { ExplorationProbabilityParameter.Value = value; } } public BoolValue DoGASubsumption { get { return DoGASubsumptionParameter.Value; } set { DoGASubsumptionParameter.Value = value; } } public BoolValue DoActionSetSubsumption { get { return DoActionSetSubsumptionParameter.Value; } set { DoActionSetSubsumptionParameter.Value = value; } } public IntValue MaxIterations { get { return MaxIterationsParameter.Value; } set { MaxIterationsParameter.Value = value; } } public MultiAnalyzer Analyzer { get { return AnalyzerParameter.Value; } set { AnalyzerParameter.Value = value; } } public MultiAnalyzer FinalAnalyzer { get { return FinalAnalyzerParameter.Value; } set { FinalAnalyzerParameter.Value = value; } } private RandomCreator RandomCreator { get { return (RandomCreator)OperatorGraph.InitialOperator; } } public LearningClassifierSystemMainLoop MainLoop { get { return FindMainLoop(RandomCreator.Successor); } } #endregion public LearningClassifierSystem() : base() { #region Create parameters Parameters.Add(new ValueParameter("Seed", "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new ValueParameter("SetSeedRandomly", "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new ValueParameter("CreateInitialPopulation", "Specifies if a population should be created at the beginning of the algorithm.", new BoolValue(false))); Parameters.Add(new ValueParameter("N", "Max size of the population of solutions.", new IntValue(100))); Parameters.Add(new ValueParameter("Beta", "Learning rate", new PercentValue(0.1))); Parameters.Add(new ValueParameter("Alpha", "", new PercentValue(0.1))); Parameters.Add(new ValueParameter("ErrorZero", "The error below which classifiers are considered to have equal accuracy", new DoubleValue(10))); Parameters.Add(new ValueParameter("v", "Power parameter", new DoubleValue(5))); Parameters.Add(new ValueParameter("Gamma", "Discount factor", new PercentValue(0.71))); Parameters.Add(new ValueParameter("CrossoverProbability", "Probability of crossover", new PercentValue(0.9))); Parameters.Add(new ValueParameter("MutationProbability", "Probability of mutation", new PercentValue(0.05))); Parameters.Add(new ValueParameter("ThetaGA", "GA threshold. GA is applied in a set when the average time since the last GA is greater than ThetaGA.", new IntValue(25))); Parameters.Add(new ValueParameter("ThetaDeletion", "Deletion threshold. If the experience of a classifier is greater than ThetaDeletion, its fitness may be considered in its probability of deletion.", new IntValue(20))); Parameters.Add(new ValueParameter("ThetaSubsumption", "Subsumption threshold. The experience of a classifier must be greater than TheatSubsumption to be able to subsume another classifier.", new IntValue(20))); Parameters.Add(new ValueParameter("Delta", "Delta specifies the fraction of mean fitness in [P] below which the fitness of a classifier may be considered in its probability of deletion", new PercentValue(0.1))); Parameters.Add(new ValueParameter("ExplorationProbability", "Probability of selecting the action uniform randomly", new PercentValue(0.5))); Parameters.Add(new ValueParameter("DoGASubsumption", "Specifies if offsprings are tested for possible logical subsumption by parents.", new BoolValue(true))); Parameters.Add(new ValueParameter("DoActionSetSubsumption", "Specifies if action set is tested for subsuming classifiers.", new BoolValue(true))); Parameters.Add(new ValueParameter("Analyzer", "The operator used to analyze each generation.", new MultiAnalyzer())); Parameters.Add(new ValueParameter("FinalAnalyzer", "The operator used to analyze the last generation.", new MultiAnalyzer())); Parameters.Add(new ValueParameter("MaxIterations", "The maximum number of iterations.", new IntValue(1000))); #endregion #region Create operators RandomCreator randomCreator = new RandomCreator(); ResultsCollector resultsCollector = new ResultsCollector(); LearningClassifierSystemMainLoop mainLoop = new LearningClassifierSystemMainLoop(); randomCreator.RandomParameter.ActualName = "Random"; randomCreator.SeedParameter.ActualName = SeedParameter.Name; randomCreator.SeedParameter.Value = null; randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameter.Name; randomCreator.SetSeedRandomlyParameter.Value = null; resultsCollector.ResultsParameter.ActualName = "Results"; mainLoop.AnalyzerParameter.ActualName = AnalyzerParameter.Name; mainLoop.FinalAnalyzerParameter.ActualName = FinalAnalyzerParameter.Name; mainLoop.MaxIterationsParameter.ActualName = MaxIterationsParameter.Name; #endregion #region Create operator graph OperatorGraph.InitialOperator = randomCreator; randomCreator.Successor = resultsCollector; resultsCollector.Successor = mainLoop; #endregion UpdateAnalyzers(); } private LearningClassifierSystem(LearningClassifierSystem original, Cloner cloner) : base(original, cloner) { } public override IDeepCloneable Clone(Cloner cloner) { return new LearningClassifierSystem(this, cloner); } [StorableConstructor] private LearningClassifierSystem(bool deserializing) : base(deserializing) { } protected override void OnProblemChanged() { if (Problem != null) { ParameterizeEvaluator(Problem.Evaluator); MainLoop.SetCurrentProblem(Problem); UpdateAnalyzers(); } base.OnProblemChanged(); } protected override void Problem_EvaluatorChanged(object sender, EventArgs e) { ParameterizeEvaluator(Problem.Evaluator); MainLoop.SetCurrentProblem(Problem); base.Problem_EvaluatorChanged(sender, e); } protected override void Problem_SolutionCreatorChanged(object sender, EventArgs e) { MainLoop.SetCurrentProblem(Problem); base.Problem_SolutionCreatorChanged(sender, e); } protected override void Problem_OperatorsChanged(object sender, EventArgs e) { UpdateAnalyzers(); base.Problem_OperatorsChanged(sender, e); } private void ParameterizeEvaluator(IXCSEvaluator evaluator) { evaluator.ActualTimeParameter.ActualName = "Iteration"; evaluator.BetaParameter.ActualName = BetaParameter.Name; evaluator.AlphaParameter.ActualName = AlphaParameter.Name; evaluator.PowerParameter.ActualName = PowerParameter.Name; evaluator.ErrorZeroParameter.ActualName = ErrorZeroParameter.Name; } private void UpdateAnalyzers() { Analyzer.Operators.Clear(); FinalAnalyzer.Operators.Clear(); if (Problem != null) { foreach (IAnalyzer analyzer in Problem.Operators.OfType()) { Analyzer.Operators.Add(analyzer, analyzer.EnabledByDefault); FinalAnalyzer.Operators.Add(analyzer, analyzer.EnabledByDefault); } } } private LearningClassifierSystemMainLoop FindMainLoop(IOperator start) { IOperator mainLoop = start; while (mainLoop != null && !(mainLoop is LearningClassifierSystemMainLoop)) mainLoop = ((SingleSuccessorOperator)mainLoop).Successor; if (mainLoop == null) return null; else return (LearningClassifierSystemMainLoop)mainLoop; } } }