Free cookie consent management tool by TermsFeed Policy Generator

source: branches/PersistentDataStructures/HeuristicLab.Algorithms.DataAnalysis/3.4/BaselineClassifiers/OneR.cs @ 16138

Last change on this file since 16138 was 14186, checked in by swagner, 8 years ago

#2526: Updated year of copyrights in license headers

File size: 7.3 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System.Collections.Generic;
23using System.Linq;
24using HeuristicLab.Common;
25using HeuristicLab.Core;
26using HeuristicLab.Data;
27using HeuristicLab.Optimization;
28using HeuristicLab.Parameters;
29using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
30using HeuristicLab.Problems.DataAnalysis;
31
32namespace HeuristicLab.Algorithms.DataAnalysis {
33  /// <summary>
34  /// 1R classification algorithm.
35  /// </summary>
36  [Item("OneR Classification", "A simple classification algorithm the searches the best single-variable split (does not support categorical features correctly). See R.C. Holte (1993). Very simple classification rules perform well on most commonly used datasets. Machine Learning. 11:63-91.")]
37  [StorableClass]
38  public sealed class OneR : FixedDataAnalysisAlgorithm<IClassificationProblem> {
39
40    public IValueParameter<IntValue> MinBucketSizeParameter {
41      get { return (IValueParameter<IntValue>)Parameters["MinBucketSize"]; }
42    }
43
44    [StorableConstructor]
45    private OneR(bool deserializing) : base(deserializing) { }
46
47    private OneR(OneR original, Cloner cloner)
48      : base(original, cloner) { }
49
50    public OneR()
51      : base() {
52      Parameters.Add(new ValueParameter<IntValue>("MinBucketSize", "Minimum size of a bucket for numerical values. (Except for the rightmost bucket)", new IntValue(6)));
53      Problem = new ClassificationProblem();
54    }
55
56    public override IDeepCloneable Clone(Cloner cloner) {
57      return new OneR(this, cloner);
58    }
59
60    protected override void Run() {
61      var solution = CreateOneRSolution(Problem.ProblemData, MinBucketSizeParameter.Value.Value);
62      Results.Add(new Result("OneR solution", "The 1R classifier.", solution));
63    }
64
65    public static IClassificationSolution CreateOneRSolution(IClassificationProblemData problemData, int minBucketSize = 6) {
66      var bestClassified = 0;
67      List<Split> bestSplits = null;
68      string bestVariable = string.Empty;
69      double bestMissingValuesClass = double.NaN;
70      var classValues = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices);
71
72      foreach (var variable in problemData.AllowedInputVariables) {
73        var inputValues = problemData.Dataset.GetDoubleValues(variable, problemData.TrainingIndices);
74        var samples = inputValues.Zip(classValues, (i, v) => new Sample(i, v)).OrderBy(s => s.inputValue);
75
76        var missingValuesDistribution = samples.Where(s => double.IsNaN(s.inputValue)).GroupBy(s => s.classValue).ToDictionary(s => s.Key, s => s.Count()).MaxItems(s => s.Value).FirstOrDefault();
77
78        //calculate class distributions for all distinct inputValues
79        List<Dictionary<double, int>> classDistributions = new List<Dictionary<double, int>>();
80        List<double> thresholds = new List<double>();
81        double lastValue = double.NaN;
82        foreach (var sample in samples.Where(s => !double.IsNaN(s.inputValue))) {
83          if (sample.inputValue > lastValue || double.IsNaN(lastValue)) {
84            if (!double.IsNaN(lastValue)) thresholds.Add((lastValue + sample.inputValue) / 2);
85            lastValue = sample.inputValue;
86            classDistributions.Add(new Dictionary<double, int>());
87            foreach (var classValue in problemData.ClassValues)
88              classDistributions[classDistributions.Count - 1][classValue] = 0;
89
90          }
91          classDistributions[classDistributions.Count - 1][sample.classValue]++;
92        }
93        thresholds.Add(double.PositiveInfinity);
94
95        var distribution = classDistributions[0];
96        var threshold = thresholds[0];
97        var splits = new List<Split>();
98
99        for (int i = 1; i < classDistributions.Count; i++) {
100          var samplesInSplit = distribution.Max(d => d.Value);
101          //join splits if there are too few samples in the split or the distributions has the same maximum class value as the current split
102          if (samplesInSplit < minBucketSize ||
103            classDistributions[i].MaxItems(d => d.Value).Select(d => d.Key).Contains(
104              distribution.MaxItems(d => d.Value).Select(d => d.Key).First())) {
105            foreach (var classValue in classDistributions[i])
106              distribution[classValue.Key] += classValue.Value;
107            threshold = thresholds[i];
108          } else {
109            splits.Add(new Split(threshold, distribution.MaxItems(d => d.Value).Select(d => d.Key).First()));
110            distribution = classDistributions[i];
111            threshold = thresholds[i];
112          }
113        }
114        splits.Add(new Split(double.PositiveInfinity, distribution.MaxItems(d => d.Value).Select(d => d.Key).First()));
115
116        int correctClassified = 0;
117        int splitIndex = 0;
118        foreach (var sample in samples.Where(s => !double.IsNaN(s.inputValue))) {
119          while (sample.inputValue >= splits[splitIndex].thresholdValue)
120            splitIndex++;
121          correctClassified += sample.classValue == splits[splitIndex].classValue ? 1 : 0;
122        }
123        correctClassified += missingValuesDistribution.Value;
124
125        if (correctClassified > bestClassified) {
126          bestClassified = correctClassified;
127          bestSplits = splits;
128          bestVariable = variable;
129          bestMissingValuesClass = missingValuesDistribution.Value == 0 ? double.NaN : missingValuesDistribution.Key;
130        }
131      }
132
133      //remove neighboring splits with the same class value
134      for (int i = 0; i < bestSplits.Count - 1; i++) {
135        if (bestSplits[i].classValue == bestSplits[i + 1].classValue) {
136          bestSplits.Remove(bestSplits[i]);
137          i--;
138        }
139      }
140
141      var model = new OneRClassificationModel(problemData.TargetVariable, bestVariable, bestSplits.Select(s => s.thresholdValue).ToArray(), bestSplits.Select(s => s.classValue).ToArray(), bestMissingValuesClass);
142      var solution = new OneRClassificationSolution(model, (IClassificationProblemData)problemData.Clone());
143
144      return solution;
145    }
146
147    #region helper classes
148    private class Split {
149      public double thresholdValue;
150      public double classValue;
151
152      public Split(double thresholdValue, double classValue) {
153        this.thresholdValue = thresholdValue;
154        this.classValue = classValue;
155      }
156    }
157
158    private class Sample {
159      public double inputValue;
160      public double classValue;
161
162      public Sample(double inputValue, double classValue) {
163        this.inputValue = inputValue;
164        this.classValue = classValue;
165      }
166    }
167    #endregion
168  }
169}
Note: See TracBrowser for help on using the repository browser.