Free cookie consent management tool by TermsFeed Policy Generator

source: branches/M5Regression/HeuristicLab.Algorithms.DataAnalysis/3.4/M5Regression/MetaModels/M5RuleModel.cs @ 15614

Last change on this file since 15614 was 15614, checked in by bwerth, 6 years ago

#2847 made changes to M5 according to review comments

File size: 7.6 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2017 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Text;
26using System.Threading;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
30using HeuristicLab.Problems.DataAnalysis;
31
32namespace HeuristicLab.Algorithms.DataAnalysis {
33  [StorableClass]
34  internal class M5RuleModel : RegressionModel {
35    #region Properties
36    [Storable]
37    internal string[] SplitAttributes { get; private set; }
38    [Storable]
39    private double[] splitValues;
40    [Storable]
41    private Comparison[] comparisons;
42    [Storable]
43    protected IRegressionModel RuleModel { get; set; }
44    [Storable]
45    private IReadOnlyList<string> variables;
46    #endregion
47
48    #region HLConstructors
49    [StorableConstructor]
50    protected M5RuleModel(bool deserializing) : base(deserializing) { }
51    protected M5RuleModel(M5RuleModel original, Cloner cloner) : base(original, cloner) {
52      if (original.SplitAttributes != null) SplitAttributes = original.SplitAttributes.ToArray();
53      if (original.splitValues != null) splitValues = original.splitValues.ToArray();
54      if (original.comparisons != null) comparisons = original.comparisons.ToArray();
55      RuleModel = cloner.Clone(original.RuleModel);
56      if (original.variables != null) variables = original.variables.ToList();
57    }
58    private M5RuleModel(string target) : base(target) { }
59    public override IDeepCloneable Clone(Cloner cloner) {
60      return new M5RuleModel(this, cloner);
61    }
62    #endregion
63
64    internal static M5RuleModel CreateRuleModel(string target, M5Parameters m5Params) {
65      return m5Params.LeafModel.ProvidesConfidence ? new ConfidenceM5RuleModel(target) : new M5RuleModel(target);
66    }
67
68    #region IRegressionModel
69    public override IEnumerable<string> VariablesUsedForPrediction {
70      get { return variables; }
71    }
72
73    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
74      if (RuleModel == null) throw new NotSupportedException("The model has not been built correctly");
75      return RuleModel.GetEstimatedValues(dataset, rows);
76    }
77
78    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
79      return new RegressionSolution(this, problemData);
80    }
81    #endregion
82
83
84    public void Build(IReadOnlyList<int> trainingRows, IReadOnlyList<int> pruningRows, M5Parameters m5Params, CancellationToken cancellationToken) {
85      variables = m5Params.AllowedInputVariables.ToList();
86      var tree = M5TreeModel.CreateTreeModel(m5Params.TargetVariable, m5Params);
87      tree.Build(trainingRows, pruningRows, m5Params, cancellationToken);
88      var nodeModel = tree.Root.EnumerateNodes().Where(x => x.IsLeaf).MaxItems(x => x.NumSamples).First();
89
90      var satts = new List<string>();
91      var svals = new List<double>();
92      var reops = new List<Comparison>();
93
94      //extract Splits
95      for (var temp = nodeModel; temp.Parent != null; temp = temp.Parent) {
96        satts.Add(temp.Parent.SplitAttribute);
97        svals.Add(temp.Parent.SplitValue);
98        reops.Add(temp.Parent.Left == temp ? Comparison.LessEqual : Comparison.Greater);
99      }
100      RuleModel = nodeModel.Model;
101      comparisons = reops.ToArray();
102      SplitAttributes = satts.ToArray();
103      splitValues = svals.ToArray();
104    }
105
106    public void Update(IReadOnlyList<int> rows, M5Parameters m5Parameters, CancellationToken cancellationToken) {
107      BuildModel(rows, m5Parameters.Random, m5Parameters.Data, m5Parameters.LeafModel, cancellationToken);
108    }
109
110    public bool Covers(IDataset dataset, int row) {
111      return !SplitAttributes.Where((t, i) => !comparisons[i].Compare(dataset.GetDoubleValue(t, row), splitValues[i])).Any();
112    }
113
114    public string ToCompactString() {
115      var mins = new Dictionary<string, double>();
116      var maxs = new Dictionary<string, double>();
117      for (var i = 0; i < SplitAttributes.Length; i++) {
118        var n = SplitAttributes[i];
119        var v = splitValues[i];
120        if (!mins.ContainsKey(n)) mins.Add(n, double.NegativeInfinity);
121        if (!maxs.ContainsKey(n)) maxs.Add(n, double.PositiveInfinity);
122        if (comparisons[i] == Comparison.LessEqual) maxs[n] = Math.Min(maxs[n], v);
123        else mins[n] = Math.Max(mins[n], v);
124      }
125      if (maxs.Count == 0) return "";
126      var s = new StringBuilder();
127      foreach (var key in maxs.Keys)
128        s.Append(string.Format("{0} ∈ [{1:e2}; {2:e2}] && ", key, mins[key], maxs[key]));
129      s.Remove(s.Length - 4, 4);
130      return s.ToString();
131    }
132
133    #region Helpers
134    private void BuildModel(IReadOnlyList<int> rows, IRandom random, IDataset data, ILeafModel leafModel, CancellationToken cancellationToken) {
135      var reducedData = new Dataset(VariablesUsedForPrediction.Concat(new[] {TargetVariable}), VariablesUsedForPrediction.Concat(new[] {TargetVariable}).Select(x => data.GetDoubleValues(x, rows).ToList()));
136      var pd = new RegressionProblemData(reducedData, VariablesUsedForPrediction, TargetVariable);
137      pd.TrainingPartition.Start = 0;
138      pd.TrainingPartition.End = pd.TestPartition.Start = pd.TestPartition.End = reducedData.Rows;
139
140      int noparams;
141      RuleModel = leafModel.Build(pd, random, cancellationToken, out noparams);
142      cancellationToken.ThrowIfCancellationRequested();
143    }
144    #endregion
145
146    [StorableClass]
147    private sealed class ConfidenceM5RuleModel : M5RuleModel, IConfidenceRegressionModel {
148      #region HLConstructors
149      [StorableConstructor]
150      private ConfidenceM5RuleModel(bool deserializing) : base(deserializing) { }
151      private ConfidenceM5RuleModel(ConfidenceM5RuleModel original, Cloner cloner) : base(original, cloner) { }
152      public ConfidenceM5RuleModel(string targetAttr) : base(targetAttr) { }
153      public override IDeepCloneable Clone(Cloner cloner) {
154        return new ConfidenceM5RuleModel(this, cloner);
155      }
156      #endregion
157
158      public IEnumerable<double> GetEstimatedVariances(IDataset dataset, IEnumerable<int> rows) {
159        return ((IConfidenceRegressionModel)RuleModel).GetEstimatedVariances(dataset, rows);
160      }
161
162      public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
163        return new ConfidenceRegressionSolution(this, problemData);
164      }
165    }
166  }
167
168  internal enum Comparison {
169    LessEqual,
170    Greater
171  }
172
173  internal static class ComparisonExtentions {
174    public static bool Compare(this Comparison op, double x, double y) {
175      switch (op) {
176        case Comparison.Greater:
177          return x > y;
178        case Comparison.LessEqual:
179          return x <= y;
180        default:
181          throw new ArgumentOutOfRangeException(op.ToString(), op, null);
182      }
183    }
184  }
185}
Note: See TracBrowser for help on using the repository browser.