Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeModel.cs @ 13895

Last change on this file since 13895 was 13895, checked in by gkronber, 8 years ago

#2612: extended GBT to support calculation of partial dependence (as described in the greedy function approximation paper), changed persistence of regression tree models and added two unit tests.

File size: 8.9 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Collections.ObjectModel;
26using System.Globalization;
27using System.Linq;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31using HeuristicLab.Problems.DataAnalysis;
32
33namespace HeuristicLab.Algorithms.DataAnalysis {
34  [StorableClass]
35  [Item("RegressionTreeModel", "Represents a decision tree for regression.")]
36  public sealed class RegressionTreeModel : NamedItem, IRegressionModel {
37
38    // trees are represented as a flat array   
39    internal struct TreeNode {
40      public readonly static string NO_VARIABLE = null;
41
42      public TreeNode(string varName, double val, int leftIdx = -1, int rightIdx = -1, double weightLeft = -1.0)
43        : this() {
44        VarName = varName;
45        Val = val;
46        LeftIdx = leftIdx;
47        RightIdx = rightIdx;
48        WeightLeft = weightLeft;
49      }
50
51      public string VarName { get; internal set; } // name of the variable for splitting or NO_VARIABLE if terminal node
52      public double Val { get; internal set; } // threshold
53      public int LeftIdx { get; internal set; }
54      public int RightIdx { get; internal set; }
55      public double WeightLeft { get; internal set; } // for partial dependence plots (value in range [0..1] describes the fraction of training samples for the left sub-tree
56
57
58      // necessary because the default implementation of GetHashCode for structs in .NET would only return the hashcode of val here
59      public override int GetHashCode() {
60        return LeftIdx ^ RightIdx ^ Val.GetHashCode();
61      }
62      // necessary because of GetHashCode override
63      public override bool Equals(object obj) {
64        if (obj is TreeNode) {
65          var other = (TreeNode)obj;
66          return Val.Equals(other.Val) &&
67            LeftIdx.Equals(other.LeftIdx) &&
68            RightIdx.Equals(other.RightIdx) &&
69            WeightLeft.Equals(other.WeightLeft) &&
70            EqualStrings(VarName, other.VarName);
71        } else {
72          return false;
73        }
74      }
75
76      private bool EqualStrings(string a, string b) {
77        return (a == null && b == null) ||
78               (a != null && b != null && a.Equals(b));
79      }
80    }
81
82    // not storable!
83    private TreeNode[] tree;
84
85    #region old storable format
86    // remove with HL 3.4
87    [Storable(AllowOneWay = true)]
88    // to prevent storing the references to data caches in nodes
89    // seemingly, it is bad (performance-wise) to persist tuples (tuples are used as keys in a dictionary)
90    private Tuple<string, double, int, int>[] SerializedTree {
91      // get { return tree.Select(t => Tuple.Create(t.VarName, t.Val, t.LeftIdx, t.RightIdx)).ToArray(); }
92      set { this.tree = value.Select(t => new TreeNode(t.Item1, t.Item2, t.Item3, t.Item4, -1.0)).ToArray(); } // use a weight of -1.0 to indicate that partial dependence cannot be calculated for old models
93    }
94    #endregion
95    #region new storable format
96    [Storable]
97    private string[] SerializedTreeVarNames {
98      get { return tree.Select(t => t.VarName).ToArray(); }
99      set {
100        if (tree == null) tree = new TreeNode[value.Length];
101        for (int i = 0; i < value.Length; i++) {
102          tree[i].VarName = value[i];
103        }
104      }
105    }
106    [Storable]
107    private double[] SerializedTreeValues {
108      get { return tree.Select(t => t.Val).ToArray(); }
109      set {
110        if (tree == null) tree = new TreeNode[value.Length];
111        for (int i = 0; i < value.Length; i++) {
112          tree[i].Val = value[i];
113        }
114      }
115    }
116    [Storable]
117    private int[] SerializedTreeLeftIdx {
118      get { return tree.Select(t => t.LeftIdx).ToArray(); }
119      set {
120        if (tree == null) tree = new TreeNode[value.Length];
121        for (int i = 0; i < value.Length; i++) {
122          tree[i].LeftIdx = value[i];
123        }
124      }
125    }
126    [Storable]
127    private int[] SerializedTreeRightIdx {
128      get { return tree.Select(t => t.RightIdx).ToArray(); }
129      set {
130        if (tree == null) tree = new TreeNode[value.Length];
131        for (int i = 0; i < value.Length; i++) {
132          tree[i].RightIdx = value[i];
133        }
134      }
135    }
136    [Storable]
137    private double[] SerializedTreeWeightLeft {
138      get { return tree.Select(t => t.WeightLeft).ToArray(); }
139      set {
140        if (tree == null) tree = new TreeNode[value.Length];
141        for (int i = 0; i < value.Length; i++) {
142          tree[i].WeightLeft = value[i];
143        }
144      }
145    }
146    #endregion
147
148
149
150
151    [StorableConstructor]
152    private RegressionTreeModel(bool serializing) : base(serializing) { }
153    // cloning ctor
154    private RegressionTreeModel(RegressionTreeModel original, Cloner cloner)
155      : base(original, cloner) {
156      if (original.tree != null) {
157        this.tree = new TreeNode[original.tree.Length];
158        Array.Copy(original.tree, this.tree, this.tree.Length);
159      }
160    }
161
162    internal RegressionTreeModel(TreeNode[] tree)
163      : base("RegressionTreeModel", "Represents a decision tree for regression.") {
164      this.tree = tree;
165    }
166
167    private static double GetPredictionForRow(TreeNode[] t, ReadOnlyCollection<double>[] columnCache, int nodeIdx, int row) {
168      while (nodeIdx != -1) {
169        var node = t[nodeIdx];
170        if (node.VarName == TreeNode.NO_VARIABLE)
171          return node.Val;
172        if (columnCache[nodeIdx] == null) {
173          if (node.WeightLeft.IsAlmost(-1.0)) throw new InvalidOperationException("Cannot calculate partial dependence for trees loaded from older versions of HeuristicLab.");
174          // weighted average for partial dependence plot (recursive here because we need to calculate both sub-trees)
175          return node.WeightLeft * GetPredictionForRow(t, columnCache, node.LeftIdx, row) +
176                 (1.0 - node.WeightLeft) * GetPredictionForRow(t, columnCache, node.RightIdx, row);
177        } else if (columnCache[nodeIdx][row] <= node.Val)
178          nodeIdx = node.LeftIdx;
179        else
180          nodeIdx = node.RightIdx;
181      }
182      throw new InvalidOperationException("Invalid tree in RegressionTreeModel");
183    }
184
185    public override IDeepCloneable Clone(Cloner cloner) {
186      return new RegressionTreeModel(this, cloner);
187    }
188
189    public IEnumerable<double> GetEstimatedValues(IDataset ds, IEnumerable<int> rows) {
190      // lookup columns for variableNames in one pass over the tree to speed up evaluation later on
191      ReadOnlyCollection<double>[] columnCache = new ReadOnlyCollection<double>[tree.Length];
192
193      for (int i = 0; i < tree.Length; i++) {
194        if (tree[i].VarName != TreeNode.NO_VARIABLE) {
195          // tree models also support calculating estimations if not all variables used for training are available in the dataset
196          if (ds.ColumnNames.Contains(tree[i].VarName))
197            columnCache[i] = ds.GetReadOnlyDoubleValues(tree[i].VarName);
198        }
199      }
200      return rows.Select(r => GetPredictionForRow(tree, columnCache, 0, r));
201    }
202
203    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
204      return new RegressionSolution(this, new RegressionProblemData(problemData));
205    }
206
207    // mainly for debugging
208    public override string ToString() {
209      return TreeToString(0, "");
210    }
211
212    private string TreeToString(int idx, string part) {
213      var n = tree[idx];
214      if (n.VarName == TreeNode.NO_VARIABLE) {
215        return string.Format(CultureInfo.InvariantCulture, "{0} -> {1:F}{2}", part, n.Val, Environment.NewLine);
216      } else {
217        return
218          TreeToString(n.LeftIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} <= {3:F} ({4:N3})", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, n.WeightLeft))
219        + TreeToString(n.RightIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2}  >  {3:F} ({4:N3}))", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, 1.0 - n.WeightLeft));
220      }
221    }
222  }
223}
Note: See TracBrowser for help on using the repository browser.