Free cookie consent management tool by TermsFeed Policy Generator

source: branches/LearningClassifierSystems/HeuristicLab.Algorithms.LearningClassifierSystems/3.3/LearningClassifierSystem.cs @ 9204

Last change on this file since 9204 was 9204, checked in by sforsten, 11 years ago

#1980:

  • deleted not needed interface IMatching
  • finished VariableVector encoding
  • the algorithm LearningClassifierSystem is now independent of a specific encoding. It still needs the ConditionActionEncoding.
  • merged r9191:9203 HeuristicLab.Core from trunk to branch
File size: 18.2 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Linq;
24using HeuristicLab.Analysis;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Encodings.ConditionActionEncoding;
29using HeuristicLab.Operators;
30using HeuristicLab.Optimization;
31using HeuristicLab.Optimization.Operators;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.Random;
35
36namespace HeuristicLab.Algorithms.LearningClassifierSystems {
37  /// <summary>
38  /// A learning classifier system.
39  /// </summary>
40  [Item("Learning Classifier System", "A genetic algorithm.")]
41  [Creatable("Algorithms")]
42  [StorableClass]
43  public sealed class LearningClassifierSystem : HeuristicOptimizationEngineAlgorithm, IStorableContent {
44    public string Filename { get; set; }
45
46    #region Problem Properties
47    public override Type ProblemType {
48      get { return typeof(IConditionActionProblem); }
49    }
50    public new IConditionActionProblem Problem {
51      get { return (IConditionActionProblem)base.Problem; }
52      set { base.Problem = value; }
53    }
54    #endregion
55
56    #region Parameter Properties
57    private ValueParameter<IntValue> SeedParameter {
58      get { return (ValueParameter<IntValue>)Parameters["Seed"]; }
59    }
60    private ValueParameter<BoolValue> SetSeedRandomlyParameter {
61      get { return (ValueParameter<BoolValue>)Parameters["SetSeedRandomly"]; }
62    }
63    private ValueParameter<BoolValue> CreateInitialPopulationParameter {
64      get { return (ValueParameter<BoolValue>)Parameters["CreateInitialPopulation"]; }
65    }
66    private ValueParameter<IntValue> PopulationSizeParameter {
67      get { return (ValueParameter<IntValue>)Parameters["N"]; }
68    }
69    private ValueParameter<PercentValue> BetaParameter {
70      get { return (ValueParameter<PercentValue>)Parameters["Beta"]; }
71    }
72    private ValueParameter<PercentValue> AlphaParameter {
73      get { return (ValueParameter<PercentValue>)Parameters["Alpha"]; }
74    }
75    private ValueParameter<DoubleValue> ErrorZeroParameter {
76      get { return (ValueParameter<DoubleValue>)Parameters["ErrorZero"]; }
77    }
78    private ValueParameter<DoubleValue> PowerParameter {
79      get { return (ValueParameter<DoubleValue>)Parameters["v"]; }
80    }
81    private ValueParameter<PercentValue> GammaParameter {
82      get { return (ValueParameter<PercentValue>)Parameters["Gamma"]; }
83    }
84    private ValueParameter<PercentValue> CrossoverProbabilityParameter {
85      get { return (ValueParameter<PercentValue>)Parameters["CrossoverProbability"]; }
86    }
87    private ValueParameter<PercentValue> MutationProbabilityParameter {
88      get { return (ValueParameter<PercentValue>)Parameters["MutationProbability"]; }
89    }
90    private ValueParameter<IntValue> ThetaGAParameter {
91      get { return (ValueParameter<IntValue>)Parameters["ThetaGA"]; }
92    }
93    private ValueParameter<IntValue> ThetaDeletionParameter {
94      get { return (ValueParameter<IntValue>)Parameters["ThetaDeletion"]; }
95    }
96    private ValueParameter<IntValue> ThetaSubsumptionParameter {
97      get { return (ValueParameter<IntValue>)Parameters["ThetaSubsumption"]; }
98    }
99    private ValueParameter<PercentValue> DeltaParameter {
100      get { return (ValueParameter<PercentValue>)Parameters["Delta"]; }
101    }
102    private ValueParameter<PercentValue> ExplorationProbabilityParameter {
103      get { return (ValueParameter<PercentValue>)Parameters["ExplorationProbability"]; }
104    }
105    private ValueParameter<BoolValue> DoGASubsumptionParameter {
106      get { return (ValueParameter<BoolValue>)Parameters["DoGASubsumption"]; }
107    }
108    private ValueParameter<BoolValue> DoActionSetSubsumptionParameter {
109      get { return (ValueParameter<BoolValue>)Parameters["DoActionSetSubsumption"]; }
110    }
111    private ValueParameter<MultiAnalyzer> AnalyzerParameter {
112      get { return (ValueParameter<MultiAnalyzer>)Parameters["Analyzer"]; }
113    }
114    private ValueParameter<MultiAnalyzer> FinalAnalyzerParameter {
115      get { return (ValueParameter<MultiAnalyzer>)Parameters["FinalAnalyzer"]; }
116    }
117    private ValueParameter<IntValue> MaxIterationsParameter {
118      get { return (ValueParameter<IntValue>)Parameters["MaxIterations"]; }
119    }
120    public IConstrainedValueParameter<ICrossover> CrossoverParameter {
121      get { return (IConstrainedValueParameter<ICrossover>)Parameters["Crossover"]; }
122    }
123    public IConstrainedValueParameter<IManipulator> MutatorParameter {
124      get { return (IConstrainedValueParameter<IManipulator>)Parameters["Mutator"]; }
125    }
126    #endregion
127
128    #region Properties
129    public IntValue Seed {
130      get { return SeedParameter.Value; }
131      set { SeedParameter.Value = value; }
132    }
133    public BoolValue SetSeedRandomly {
134      get { return SetSeedRandomlyParameter.Value; }
135      set { SetSeedRandomlyParameter.Value = value; }
136    }
137    public BoolValue CreateInitialPopulation {
138      get { return CreateInitialPopulationParameter.Value; }
139      set { CreateInitialPopulationParameter.Value = value; }
140    }
141    public IntValue PopulationSize {
142      get { return PopulationSizeParameter.Value; }
143      set { PopulationSizeParameter.Value = value; }
144    }
145    public PercentValue Beta {
146      get { return BetaParameter.Value; }
147      set { BetaParameter.Value = value; }
148    }
149    public PercentValue Alpha {
150      get { return AlphaParameter.Value; }
151      set { AlphaParameter.Value = value; }
152    }
153    public DoubleValue ErrorZero {
154      get { return ErrorZeroParameter.Value; }
155      set { ErrorZeroParameter.Value = value; }
156    }
157    public DoubleValue Power {
158      get { return PowerParameter.Value; }
159      set { PowerParameter.Value = value; }
160    }
161    public PercentValue Gamma {
162      get { return GammaParameter.Value; }
163      set { GammaParameter.Value = value; }
164    }
165    public PercentValue CrossoverProbability {
166      get { return CrossoverProbabilityParameter.Value; }
167      set { CrossoverProbabilityParameter.Value = value; }
168    }
169    public PercentValue MutationProbability {
170      get { return MutationProbabilityParameter.Value; }
171      set { MutationProbabilityParameter.Value = value; }
172    }
173    public IntValue ThetaGA {
174      get { return ThetaGAParameter.Value; }
175      set { ThetaGAParameter.Value = value; }
176    }
177    public IntValue ThetaDeletion {
178      get { return ThetaDeletionParameter.Value; }
179      set { ThetaDeletionParameter.Value = value; }
180    }
181    public IntValue ThetaSubsumption {
182      get { return ThetaSubsumptionParameter.Value; }
183      set { ThetaSubsumptionParameter.Value = value; }
184    }
185    public PercentValue Delta {
186      get { return DeltaParameter.Value; }
187      set { DeltaParameter.Value = value; }
188    }
189    public PercentValue ExplorationProbability {
190      get { return ExplorationProbabilityParameter.Value; }
191      set { ExplorationProbabilityParameter.Value = value; }
192    }
193    public BoolValue DoGASubsumption {
194      get { return DoGASubsumptionParameter.Value; }
195      set { DoGASubsumptionParameter.Value = value; }
196    }
197    public BoolValue DoActionSetSubsumption {
198      get { return DoActionSetSubsumptionParameter.Value; }
199      set { DoActionSetSubsumptionParameter.Value = value; }
200    }
201    public IntValue MaxIterations {
202      get { return MaxIterationsParameter.Value; }
203      set { MaxIterationsParameter.Value = value; }
204    }
205    public MultiAnalyzer Analyzer {
206      get { return AnalyzerParameter.Value; }
207      set { AnalyzerParameter.Value = value; }
208    }
209    public MultiAnalyzer FinalAnalyzer {
210      get { return FinalAnalyzerParameter.Value; }
211      set { FinalAnalyzerParameter.Value = value; }
212    }
213    public ICrossover Crossover {
214      get { return CrossoverParameter.Value; }
215      set { CrossoverParameter.Value = value; }
216    }
217    public IManipulator Mutator {
218      get { return MutatorParameter.Value; }
219      set { MutatorParameter.Value = value; }
220    }
221    private RandomCreator RandomCreator {
222      get { return (RandomCreator)OperatorGraph.InitialOperator; }
223    }
224    public LearningClassifierSystemMainLoop MainLoop {
225      get { return FindMainLoop(RandomCreator.Successor); }
226    }
227    #endregion
228
229    public LearningClassifierSystem()
230      : base() {
231      #region Create parameters
232      Parameters.Add(new ValueParameter<IntValue>("Seed", "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
233      Parameters.Add(new ValueParameter<BoolValue>("SetSeedRandomly", "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
234      Parameters.Add(new ValueParameter<BoolValue>("CreateInitialPopulation", "Specifies if a population should be created at the beginning of the algorithm.", new BoolValue(false)));
235      Parameters.Add(new ValueParameter<IntValue>("N", "Max size of the population of solutions.", new IntValue(100)));
236      Parameters.Add(new ValueParameter<PercentValue>("Beta", "Learning rate", new PercentValue(0.1)));
237      Parameters.Add(new ValueParameter<PercentValue>("Alpha", "", new PercentValue(0.1)));
238      Parameters.Add(new ValueParameter<DoubleValue>("ErrorZero", "The error below which classifiers are considered to have equal accuracy", new DoubleValue(10)));
239      Parameters.Add(new ValueParameter<DoubleValue>("v", "Power parameter", new DoubleValue(5)));
240      Parameters.Add(new ValueParameter<PercentValue>("Gamma", "Discount factor", new PercentValue(0.71)));
241      Parameters.Add(new ValueParameter<PercentValue>("CrossoverProbability", "Probability of crossover", new PercentValue(0.9)));
242      Parameters.Add(new ValueParameter<PercentValue>("MutationProbability", "Probability of mutation", new PercentValue(0.05)));
243      Parameters.Add(new ValueParameter<IntValue>("ThetaGA", "GA threshold. GA is applied in a set when the average time since the last GA is greater than ThetaGA.", new IntValue(25)));
244      Parameters.Add(new ValueParameter<IntValue>("ThetaDeletion", "Deletion threshold. If the experience of a classifier is greater than ThetaDeletion, its fitness may be considered in its probability of deletion.", new IntValue(20)));
245      Parameters.Add(new ValueParameter<IntValue>("ThetaSubsumption", "Subsumption threshold. The experience of a classifier must be greater than TheatSubsumption to be able to subsume another classifier.", new IntValue(20)));
246      Parameters.Add(new ValueParameter<PercentValue>("Delta", "Delta specifies the fraction of mean fitness in [P] below which the fitness of a classifier may be considered in its probability of deletion", new PercentValue(0.1)));
247      Parameters.Add(new ValueParameter<PercentValue>("ExplorationProbability", "Probability of selecting the action uniform randomly", new PercentValue(0.5)));
248      Parameters.Add(new ValueParameter<BoolValue>("DoGASubsumption", "Specifies if offsprings are tested for possible logical subsumption by parents.", new BoolValue(true)));
249      Parameters.Add(new ValueParameter<BoolValue>("DoActionSetSubsumption", "Specifies if action set is tested for subsuming classifiers.", new BoolValue(true)));
250      Parameters.Add(new ValueParameter<MultiAnalyzer>("Analyzer", "The operator used to analyze each generation.", new MultiAnalyzer()));
251      Parameters.Add(new ValueParameter<MultiAnalyzer>("FinalAnalyzer", "The operator used to analyze the last generation.", new MultiAnalyzer()));
252      Parameters.Add(new ValueParameter<IntValue>("MaxIterations", "The maximum number of iterations.", new IntValue(1000)));
253      Parameters.Add(new ConstrainedValueParameter<ICrossover>("Crossover", "The operator used to cross solutions."));
254      Parameters.Add(new ConstrainedValueParameter<IManipulator>("Mutator", "The operator used to mutate solutions."));
255      #endregion
256
257      #region Create operators
258      RandomCreator randomCreator = new RandomCreator();
259
260      ResultsCollector resultsCollector = new ResultsCollector();
261      LearningClassifierSystemMainLoop mainLoop = new LearningClassifierSystemMainLoop();
262
263      randomCreator.RandomParameter.ActualName = "Random";
264      randomCreator.SeedParameter.ActualName = SeedParameter.Name;
265      randomCreator.SeedParameter.Value = null;
266      randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameter.Name;
267      randomCreator.SetSeedRandomlyParameter.Value = null;
268
269      resultsCollector.ResultsParameter.ActualName = "Results";
270
271      mainLoop.AnalyzerParameter.ActualName = AnalyzerParameter.Name;
272      mainLoop.FinalAnalyzerParameter.ActualName = FinalAnalyzerParameter.Name;
273      mainLoop.MaxIterationsParameter.ActualName = MaxIterationsParameter.Name;
274      mainLoop.CrossoverParameter.ActualName = CrossoverParameter.Name;
275      mainLoop.MutatorParameter.ActualName = MutatorParameter.Name;
276      mainLoop.CrossoverProbabilityParameter.ActualName = CrossoverProbabilityParameter.Name;
277      #endregion
278
279      #region Create operator graph
280      OperatorGraph.InitialOperator = randomCreator;
281      randomCreator.Successor = resultsCollector;
282      resultsCollector.Successor = mainLoop;
283      #endregion
284
285      UpdateAnalyzers();
286    }
287    private LearningClassifierSystem(LearningClassifierSystem original, Cloner cloner)
288      : base(original, cloner) {
289    }
290    public override IDeepCloneable Clone(Cloner cloner) {
291      return new LearningClassifierSystem(this, cloner);
292    }
293    [StorableConstructor]
294    private LearningClassifierSystem(bool deserializing) : base(deserializing) { }
295
296    protected override void OnProblemChanged() {
297      if (Problem != null) {
298        ParameterizeEvaluator(Problem.Evaluator);
299        MainLoop.SetCurrentProblem(Problem);
300        UpdateCrossovers();
301        UpdateMutators();
302        UpdateAnalyzers();
303        ParameterizeManipulator();
304      }
305      base.OnProblemChanged();
306    }
307
308    private void ParameterizeManipulator() {
309      foreach (var op in Problem.Operators.OfType<IProbabilityMutatorOperator>()) {
310        op.ProbabilityParameter.ActualName = MutationProbabilityParameter.Name;
311      }
312    }
313    protected override void Problem_EvaluatorChanged(object sender, EventArgs e) {
314      ParameterizeEvaluator(Problem.Evaluator);
315      MainLoop.SetCurrentProblem(Problem);
316      base.Problem_EvaluatorChanged(sender, e);
317    }
318    protected override void Problem_SolutionCreatorChanged(object sender, EventArgs e) {
319      MainLoop.SetCurrentProblem(Problem);
320      base.Problem_SolutionCreatorChanged(sender, e);
321    }
322    protected override void Problem_OperatorsChanged(object sender, EventArgs e) {
323      UpdateCrossovers();
324      UpdateMutators();
325      UpdateAnalyzers();
326      ParameterizeManipulator();
327      base.Problem_OperatorsChanged(sender, e);
328    }
329
330    private void ParameterizeEvaluator(IXCSEvaluator evaluator) {
331      evaluator.ActualTimeParameter.ActualName = "Iteration";
332      evaluator.BetaParameter.ActualName = BetaParameter.Name;
333      evaluator.AlphaParameter.ActualName = AlphaParameter.Name;
334      evaluator.PowerParameter.ActualName = PowerParameter.Name;
335      evaluator.ErrorZeroParameter.ActualName = ErrorZeroParameter.Name;
336    }
337
338    private void UpdateCrossovers() {
339      ICrossover oldCrossover = CrossoverParameter.Value;
340      CrossoverParameter.ValidValues.Clear();
341      ICrossover defaultCrossover = Problem.Operators.OfType<ICrossover>().FirstOrDefault();
342
343      foreach (ICrossover crossover in Problem.Operators.OfType<ICrossover>().OrderBy(x => x.Name))
344        CrossoverParameter.ValidValues.Add(crossover);
345
346      if (oldCrossover != null) {
347        ICrossover crossover = CrossoverParameter.ValidValues.FirstOrDefault(x => x.GetType() == oldCrossover.GetType());
348        if (crossover != null) CrossoverParameter.Value = crossover;
349        else oldCrossover = null;
350      }
351      if (oldCrossover == null && defaultCrossover != null)
352        CrossoverParameter.Value = defaultCrossover;
353    }
354    private void UpdateMutators() {
355      IManipulator oldMutator = MutatorParameter.Value;
356      MutatorParameter.ValidValues.Clear();
357      IManipulator defaultMutator = Problem.Operators.OfType<IManipulator>().FirstOrDefault();
358
359      foreach (IManipulator mutator in Problem.Operators.OfType<IManipulator>().OrderBy(x => x.Name))
360        MutatorParameter.ValidValues.Add(mutator);
361      if (oldMutator != null) {
362        IManipulator mutator = MutatorParameter.ValidValues.FirstOrDefault(x => x.GetType() == oldMutator.GetType());
363        if (mutator != null) MutatorParameter.Value = mutator;
364        else oldMutator = null;
365      }
366      if (oldMutator == null && defaultMutator != null)
367        MutatorParameter.Value = defaultMutator;
368    }
369    private void UpdateAnalyzers() {
370      Analyzer.Operators.Clear();
371      FinalAnalyzer.Operators.Clear();
372      if (Problem != null) {
373        foreach (IAnalyzer analyzer in Problem.Operators.OfType<IAnalyzer>()) {
374          Analyzer.Operators.Add(analyzer, analyzer.EnabledByDefault);
375          FinalAnalyzer.Operators.Add(analyzer, analyzer.EnabledByDefault);
376        }
377      }
378    }
379
380    private LearningClassifierSystemMainLoop FindMainLoop(IOperator start) {
381      IOperator mainLoop = start;
382      while (mainLoop != null && !(mainLoop is LearningClassifierSystemMainLoop))
383        mainLoop = ((SingleSuccessorOperator)mainLoop).Successor;
384      if (mainLoop == null) return null;
385      else return (LearningClassifierSystemMainLoop)mainLoop;
386    }
387  }
388}
Note: See TracBrowser for help on using the repository browser.