Free cookie consent management tool by TermsFeed Policy Generator

source: branches/LearningClassifierSystems/HeuristicLab.Algorithms.LearningClassifierSystems/3.3/LearningClassifierSystem.cs @ 9154

Last change on this file since 9154 was 9154, checked in by sforsten, 11 years ago

#1980:

  • added XCSSolution, XCSModel, XCSClassifier to represent the xcs classifier
  • XCSSolution also shows the current accuracy (training and test partition has to be added)
  • added XCSSolutionAnalyzer to create a XCSSolution during the run of the algorithm
  • added XCSModelView to show the xcs model
  • fixed a bug in XCSDeletionOperator (sometimes it deleted less classifiers than it should)
  • moved some parameter from ConditionActionClassificationProblem to ConditionActionClassificationProblemData
File size: 14.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Linq;
24using HeuristicLab.Analysis;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Encodings.ConditionActionEncoding;
29using HeuristicLab.Operators;
30using HeuristicLab.Optimization;
31using HeuristicLab.Optimization.Operators;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.Random;
35
36namespace HeuristicLab.Algorithms.LearningClassifierSystems {
37  /// <summary>
38  /// A learning classifier system.
39  /// </summary>
40  [Item("Learning Classifier System", "A genetic algorithm.")]
41  [Creatable("Algorithms")]
42  [StorableClass]
43  public sealed class LearningClassifierSystem : HeuristicOptimizationEngineAlgorithm, IStorableContent {
44    public string Filename { get; set; }
45
46    #region Problem Properties
47    public override Type ProblemType {
48      get { return typeof(IConditionActionProblem); }
49    }
50    public new IConditionActionProblem Problem {
51      get { return (IConditionActionProblem)base.Problem; }
52      set { base.Problem = value; }
53    }
54    #endregion
55
56    #region Parameter Properties
57    private ValueParameter<IntValue> SeedParameter {
58      get { return (ValueParameter<IntValue>)Parameters["Seed"]; }
59    }
60    private ValueParameter<BoolValue> SetSeedRandomlyParameter {
61      get { return (ValueParameter<BoolValue>)Parameters["SetSeedRandomly"]; }
62    }
63    private ValueParameter<BoolValue> CreateInitialPopulationParameter {
64      get { return (ValueParameter<BoolValue>)Parameters["CreateInitialPopulation"]; }
65    }
66    private ValueParameter<IntValue> PopulationSizeParameter {
67      get { return (ValueParameter<IntValue>)Parameters["N"]; }
68    }
69    private ValueParameter<PercentValue> BetaParameter {
70      get { return (ValueParameter<PercentValue>)Parameters["Beta"]; }
71    }
72    private ValueParameter<PercentValue> AlphaParameter {
73      get { return (ValueParameter<PercentValue>)Parameters["Alpha"]; }
74    }
75    private ValueParameter<DoubleValue> ErrorZeroParameter {
76      get { return (ValueParameter<DoubleValue>)Parameters["ErrorZero"]; }
77    }
78    private ValueParameter<DoubleValue> PowerParameter {
79      get { return (ValueParameter<DoubleValue>)Parameters["v"]; }
80    }
81    private ValueParameter<PercentValue> GammaParameter {
82      get { return (ValueParameter<PercentValue>)Parameters["Gamma"]; }
83    }
84    private ValueParameter<PercentValue> CrossoverProbabilityParameter {
85      get { return (ValueParameter<PercentValue>)Parameters["CrossoverProbability"]; }
86    }
87    private ValueParameter<PercentValue> MutationProbabilityParameter {
88      get { return (ValueParameter<PercentValue>)Parameters["MutationProbability"]; }
89    }
90    private ValueParameter<IntValue> ThetaGAParameter {
91      get { return (ValueParameter<IntValue>)Parameters["ThetaGA"]; }
92    }
93    private ValueParameter<IntValue> ThetaDeletionParameter {
94      get { return (ValueParameter<IntValue>)Parameters["ThetaDeletion"]; }
95    }
96    private ValueParameter<IntValue> ThetaSubsumptionParameter {
97      get { return (ValueParameter<IntValue>)Parameters["ThetaSubsumption"]; }
98    }
99    private ValueParameter<PercentValue> DeltaParameter {
100      get { return (ValueParameter<PercentValue>)Parameters["Delta"]; }
101    }
102    private ValueParameter<PercentValue> ExplorationProbabilityParameter {
103      get { return (ValueParameter<PercentValue>)Parameters["ExplorationProbability"]; }
104    }
105    private ValueParameter<BoolValue> DoGASubsumptionParameter {
106      get { return (ValueParameter<BoolValue>)Parameters["DoGASubsumption"]; }
107    }
108    private ValueParameter<BoolValue> DoActionSetSubsumptionParameter {
109      get { return (ValueParameter<BoolValue>)Parameters["DoActionSetSubsumption"]; }
110    }
111    private ValueParameter<MultiAnalyzer> AnalyzerParameter {
112      get { return (ValueParameter<MultiAnalyzer>)Parameters["Analyzer"]; }
113    }
114    private ValueParameter<IntValue> MaxIterationsParameter {
115      get { return (ValueParameter<IntValue>)Parameters["MaxIterations"]; }
116    }
117    #endregion
118
119    #region Properties
120    public IntValue Seed {
121      get { return SeedParameter.Value; }
122      set { SeedParameter.Value = value; }
123    }
124    public BoolValue SetSeedRandomly {
125      get { return SetSeedRandomlyParameter.Value; }
126      set { SetSeedRandomlyParameter.Value = value; }
127    }
128    public BoolValue CreateInitialPopulation {
129      get { return CreateInitialPopulationParameter.Value; }
130      set { CreateInitialPopulationParameter.Value = value; }
131    }
132    public IntValue PopulationSize {
133      get { return PopulationSizeParameter.Value; }
134      set { PopulationSizeParameter.Value = value; }
135    }
136    public PercentValue Beta {
137      get { return BetaParameter.Value; }
138      set { BetaParameter.Value = value; }
139    }
140    public PercentValue Alpha {
141      get { return AlphaParameter.Value; }
142      set { AlphaParameter.Value = value; }
143    }
144    public DoubleValue ErrorZero {
145      get { return ErrorZeroParameter.Value; }
146      set { ErrorZeroParameter.Value = value; }
147    }
148    public DoubleValue Power {
149      get { return PowerParameter.Value; }
150      set { PowerParameter.Value = value; }
151    }
152    public PercentValue Gamma {
153      get { return GammaParameter.Value; }
154      set { GammaParameter.Value = value; }
155    }
156    public PercentValue CrossoverProbability {
157      get { return CrossoverProbabilityParameter.Value; }
158      set { CrossoverProbabilityParameter.Value = value; }
159    }
160    public PercentValue MutationProbability {
161      get { return MutationProbabilityParameter.Value; }
162      set { MutationProbabilityParameter.Value = value; }
163    }
164    public IntValue ThetaGA {
165      get { return ThetaGAParameter.Value; }
166      set { ThetaGAParameter.Value = value; }
167    }
168    public IntValue ThetaDeletion {
169      get { return ThetaDeletionParameter.Value; }
170      set { ThetaDeletionParameter.Value = value; }
171    }
172    public IntValue ThetaSubsumption {
173      get { return ThetaSubsumptionParameter.Value; }
174      set { ThetaSubsumptionParameter.Value = value; }
175    }
176    public PercentValue Delta {
177      get { return DeltaParameter.Value; }
178      set { DeltaParameter.Value = value; }
179    }
180    public PercentValue ExplorationProbability {
181      get { return ExplorationProbabilityParameter.Value; }
182      set { ExplorationProbabilityParameter.Value = value; }
183    }
184    public BoolValue DoGASubsumption {
185      get { return DoGASubsumptionParameter.Value; }
186      set { DoGASubsumptionParameter.Value = value; }
187    }
188    public BoolValue DoActionSetSubsumption {
189      get { return DoActionSetSubsumptionParameter.Value; }
190      set { DoActionSetSubsumptionParameter.Value = value; }
191    }
192    public IntValue MaxIterations {
193      get { return MaxIterationsParameter.Value; }
194      set { MaxIterationsParameter.Value = value; }
195    }
196    public MultiAnalyzer Analyzer {
197      get { return AnalyzerParameter.Value; }
198      set { AnalyzerParameter.Value = value; }
199    }
200    private RandomCreator RandomCreator {
201      get { return (RandomCreator)OperatorGraph.InitialOperator; }
202    }
203    public LearningClassifierSystemMainLoop MainLoop {
204      get { return FindMainLoop(RandomCreator.Successor); }
205    }
206    #endregion
207
208    public LearningClassifierSystem()
209      : base() {
210      #region Create parameters
211      Parameters.Add(new ValueParameter<IntValue>("Seed", "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
212      Parameters.Add(new ValueParameter<BoolValue>("SetSeedRandomly", "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
213      Parameters.Add(new ValueParameter<BoolValue>("CreateInitialPopulation", "Specifies if a population should be created at the beginning of the algorithm.", new BoolValue(false)));
214      Parameters.Add(new ValueParameter<IntValue>("N", "Max size of the population of solutions.", new IntValue(100)));
215      Parameters.Add(new ValueParameter<PercentValue>("Beta", "Learning rate", new PercentValue(0.1)));
216      Parameters.Add(new ValueParameter<PercentValue>("Alpha", "", new PercentValue(0.1)));
217      Parameters.Add(new ValueParameter<DoubleValue>("ErrorZero", "The error below which classifiers are considered to have equal accuracy", new DoubleValue(10)));
218      Parameters.Add(new ValueParameter<DoubleValue>("v", "Power parameter", new DoubleValue(5)));
219      Parameters.Add(new ValueParameter<PercentValue>("Gamma", "Discount factor", new PercentValue(0.71)));
220      Parameters.Add(new ValueParameter<PercentValue>("CrossoverProbability", "Probability of crossover", new PercentValue(0.9)));
221      Parameters.Add(new ValueParameter<PercentValue>("MutationProbability", "Probability of mutation", new PercentValue(0.05)));
222      Parameters.Add(new ValueParameter<IntValue>("ThetaGA", "GA threshold. GA is applied in a set when the average time since the last GA is greater than ThetaGA.", new IntValue(25)));
223      Parameters.Add(new ValueParameter<IntValue>("ThetaDeletion", "Deletion threshold. If the experience of a classifier is greater than ThetaDeletion, its fitness may be considered in its probability of deletion.", new IntValue(20)));
224      Parameters.Add(new ValueParameter<IntValue>("ThetaSubsumption", "Subsumption threshold. The experience of a classifier must be greater than TheatSubsumption to be able to subsume another classifier.", new IntValue(20)));
225      Parameters.Add(new ValueParameter<PercentValue>("Delta", "Delta specifies the fraction of mean fitness in [P] below which the fitness of a classifier may be considered in its probability of deletion", new PercentValue(0.1)));
226      Parameters.Add(new ValueParameter<PercentValue>("ExplorationProbability", "Probability of selecting the action uniform randomly", new PercentValue(0.5)));
227      Parameters.Add(new ValueParameter<BoolValue>("DoGASubsumption", "Specifies if offsprings are tested for possible logical subsumption by parents.", new BoolValue(true)));
228      Parameters.Add(new ValueParameter<BoolValue>("DoActionSetSubsumption", "Specifies if action set is tested for subsuming classifiers.", new BoolValue(true)));
229      Parameters.Add(new ValueParameter<MultiAnalyzer>("Analyzer", "The operator used to analyze each generation.", new MultiAnalyzer()));
230      Parameters.Add(new ValueParameter<IntValue>("MaxIterations", "The maximum number of iterations.", new IntValue(1000)));
231      #endregion
232
233      #region Create operators
234      RandomCreator randomCreator = new RandomCreator();
235
236      ResultsCollector resultsCollector = new ResultsCollector();
237      LearningClassifierSystemMainLoop mainLoop = new LearningClassifierSystemMainLoop();
238
239      randomCreator.RandomParameter.ActualName = "Random";
240      randomCreator.SeedParameter.ActualName = SeedParameter.Name;
241      randomCreator.SeedParameter.Value = null;
242      randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameter.Name;
243      randomCreator.SetSeedRandomlyParameter.Value = null;
244
245      resultsCollector.ResultsParameter.ActualName = "Results";
246
247      mainLoop.AnalyzerParameter.ActualName = AnalyzerParameter.Name;
248      mainLoop.MaxIterationsParameter.ActualName = MaxIterationsParameter.Name;
249      #endregion
250
251      #region Create operator graph
252      OperatorGraph.InitialOperator = randomCreator;
253      randomCreator.Successor = resultsCollector;
254      resultsCollector.Successor = mainLoop;
255      #endregion
256
257      UpdateAnalyzers();
258    }
259    private LearningClassifierSystem(LearningClassifierSystem original, Cloner cloner)
260      : base(original, cloner) {
261    }
262    public override IDeepCloneable Clone(Cloner cloner) {
263      return new LearningClassifierSystem(this, cloner);
264    }
265    [StorableConstructor]
266    private LearningClassifierSystem(bool deserializing) : base(deserializing) { }
267
268    protected override void OnProblemChanged() {
269      if (Problem != null) {
270        ParameterizeEvaluator(Problem.Evaluator);
271        MainLoop.SetCurrentProblem(Problem);
272        UpdateAnalyzers();
273      }
274      base.OnProblemChanged();
275    }
276    protected override void Problem_EvaluatorChanged(object sender, EventArgs e) {
277      ParameterizeEvaluator(Problem.Evaluator);
278      MainLoop.SetCurrentProblem(Problem);
279      base.Problem_EvaluatorChanged(sender, e);
280    }
281    protected override void Problem_SolutionCreatorChanged(object sender, EventArgs e) {
282      MainLoop.SetCurrentProblem(Problem);
283      base.Problem_SolutionCreatorChanged(sender, e);
284    }
285    protected override void Problem_OperatorsChanged(object sender, EventArgs e) {
286      UpdateAnalyzers();
287      base.Problem_OperatorsChanged(sender, e);
288    }
289
290    private void ParameterizeEvaluator(IXCSEvaluator evaluator) {
291      evaluator.ActualTimeParameter.ActualName = "Iteration";
292      evaluator.BetaParameter.ActualName = BetaParameter.Name;
293      evaluator.AlphaParameter.ActualName = AlphaParameter.Name;
294      evaluator.PowerParameter.ActualName = PowerParameter.Name;
295      evaluator.ErrorZeroParameter.ActualName = ErrorZeroParameter.Name;
296    }
297
298    private void UpdateAnalyzers() {
299      Analyzer.Operators.Clear();
300      if (Problem != null) {
301        foreach (IAnalyzer analyzer in Problem.Operators.OfType<IAnalyzer>()) {
302          Analyzer.Operators.Add(analyzer, analyzer.EnabledByDefault);
303        }
304      }
305    }
306
307    private LearningClassifierSystemMainLoop FindMainLoop(IOperator start) {
308      IOperator mainLoop = start;
309      while (mainLoop != null && !(mainLoop is LearningClassifierSystemMainLoop))
310        mainLoop = ((SingleSuccessorOperator)mainLoop).Successor;
311      if (mainLoop == null) return null;
312      else return (LearningClassifierSystemMainLoop)mainLoop;
313    }
314  }
315}
Note: See TracBrowser for help on using the repository browser.