Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeModel.cs @ 14316

Last change on this file since 14316 was 14185, checked in by swagner, 8 years ago

#2526: Updated year of copyrights in license headers

File size: 9.1 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Collections.ObjectModel;
26using System.Globalization;
27using System.Linq;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31using HeuristicLab.Problems.DataAnalysis;
32
33namespace HeuristicLab.Algorithms.DataAnalysis {
34  [StorableClass]
35  [Item("RegressionTreeModel", "Represents a decision tree for regression.")]
36  public sealed class RegressionTreeModel : RegressionModel {
37    public override IEnumerable<string> VariablesUsedForPrediction {
38      get { return tree.Select(t => t.VarName).Where(v => v != TreeNode.NO_VARIABLE); }
39    }
40
41    // trees are represented as a flat array   
42    internal struct TreeNode {
43      public readonly static string NO_VARIABLE = null;
44
45      public TreeNode(string varName, double val, int leftIdx = -1, int rightIdx = -1, double weightLeft = -1.0)
46        : this() {
47        VarName = varName;
48        Val = val;
49        LeftIdx = leftIdx;
50        RightIdx = rightIdx;
51        WeightLeft = weightLeft;
52      }
53
54      public string VarName { get; internal set; } // name of the variable for splitting or NO_VARIABLE if terminal node
55      public double Val { get; internal set; } // threshold
56      public int LeftIdx { get; internal set; }
57      public int RightIdx { get; internal set; }
58      public double WeightLeft { get; internal set; } // for partial dependence plots (value in range [0..1] describes the fraction of training samples for the left sub-tree
59
60
61      // necessary because the default implementation of GetHashCode for structs in .NET would only return the hashcode of val here
62      public override int GetHashCode() {
63        return LeftIdx ^ RightIdx ^ Val.GetHashCode();
64      }
65      // necessary because of GetHashCode override
66      public override bool Equals(object obj) {
67        if (obj is TreeNode) {
68          var other = (TreeNode)obj;
69          return Val.Equals(other.Val) &&
70            LeftIdx.Equals(other.LeftIdx) &&
71            RightIdx.Equals(other.RightIdx) &&
72            WeightLeft.Equals(other.WeightLeft) &&
73            EqualStrings(VarName, other.VarName);
74        } else {
75          return false;
76        }
77      }
78
79      private bool EqualStrings(string a, string b) {
80        return (a == null && b == null) ||
81               (a != null && b != null && a.Equals(b));
82      }
83    }
84
85    // not storable!
86    private TreeNode[] tree;
87
88    #region old storable format
89    // remove with HL 3.4
90    [Storable(AllowOneWay = true)]
91    // to prevent storing the references to data caches in nodes
92    // seemingly, it is bad (performance-wise) to persist tuples (tuples are used as keys in a dictionary)
93    private Tuple<string, double, int, int>[] SerializedTree {
94      // get { return tree.Select(t => Tuple.Create(t.VarName, t.Val, t.LeftIdx, t.RightIdx)).ToArray(); }
95      set { this.tree = value.Select(t => new TreeNode(t.Item1, t.Item2, t.Item3, t.Item4, -1.0)).ToArray(); } // use a weight of -1.0 to indicate that partial dependence cannot be calculated for old models
96    }
97    #endregion
98    #region new storable format
99    [Storable]
100    private string[] SerializedTreeVarNames {
101      get { return tree.Select(t => t.VarName).ToArray(); }
102      set {
103        if (tree == null) tree = new TreeNode[value.Length];
104        for (int i = 0; i < value.Length; i++) {
105          tree[i].VarName = value[i];
106        }
107      }
108    }
109    [Storable]
110    private double[] SerializedTreeValues {
111      get { return tree.Select(t => t.Val).ToArray(); }
112      set {
113        if (tree == null) tree = new TreeNode[value.Length];
114        for (int i = 0; i < value.Length; i++) {
115          tree[i].Val = value[i];
116        }
117      }
118    }
119    [Storable]
120    private int[] SerializedTreeLeftIdx {
121      get { return tree.Select(t => t.LeftIdx).ToArray(); }
122      set {
123        if (tree == null) tree = new TreeNode[value.Length];
124        for (int i = 0; i < value.Length; i++) {
125          tree[i].LeftIdx = value[i];
126        }
127      }
128    }
129    [Storable]
130    private int[] SerializedTreeRightIdx {
131      get { return tree.Select(t => t.RightIdx).ToArray(); }
132      set {
133        if (tree == null) tree = new TreeNode[value.Length];
134        for (int i = 0; i < value.Length; i++) {
135          tree[i].RightIdx = value[i];
136        }
137      }
138    }
139    [Storable]
140    private double[] SerializedTreeWeightLeft {
141      get { return tree.Select(t => t.WeightLeft).ToArray(); }
142      set {
143        if (tree == null) tree = new TreeNode[value.Length];
144        for (int i = 0; i < value.Length; i++) {
145          tree[i].WeightLeft = value[i];
146        }
147      }
148    }
149    #endregion
150
151    [StorableConstructor]
152    private RegressionTreeModel(bool serializing) : base(serializing) { }
153    // cloning ctor
154    private RegressionTreeModel(RegressionTreeModel original, Cloner cloner)
155      : base(original, cloner) {
156      if (original.tree != null) {
157        this.tree = new TreeNode[original.tree.Length];
158        Array.Copy(original.tree, this.tree, this.tree.Length);
159      }
160    }
161
162    internal RegressionTreeModel(TreeNode[] tree, string targetVariable)
163      : base(targetVariable, "RegressionTreeModel", "Represents a decision tree for regression.") {
164      this.tree = tree;
165    }
166
167    private static double GetPredictionForRow(TreeNode[] t, ReadOnlyCollection<double>[] columnCache, int nodeIdx, int row) {
168      while (nodeIdx != -1) {
169        var node = t[nodeIdx];
170        if (node.VarName == TreeNode.NO_VARIABLE)
171          return node.Val;
172        if (columnCache[nodeIdx] == null || double.IsNaN(columnCache[nodeIdx][row])) {
173          if (node.WeightLeft.IsAlmost(-1.0)) throw new InvalidOperationException("Cannot calculate partial dependence for trees loaded from older versions of HeuristicLab.");
174          // weighted average for partial dependence plot (recursive here because we need to calculate both sub-trees)
175          return node.WeightLeft * GetPredictionForRow(t, columnCache, node.LeftIdx, row) +
176                 (1.0 - node.WeightLeft) * GetPredictionForRow(t, columnCache, node.RightIdx, row);
177        } else if (columnCache[nodeIdx][row] <= node.Val)
178          nodeIdx = node.LeftIdx;
179        else
180          nodeIdx = node.RightIdx;
181      }
182      throw new InvalidOperationException("Invalid tree in RegressionTreeModel");
183    }
184
185    public override IDeepCloneable Clone(Cloner cloner) {
186      return new RegressionTreeModel(this, cloner);
187    }
188
189    public override IEnumerable<double> GetEstimatedValues(IDataset ds, IEnumerable<int> rows) {
190      // lookup columns for variableNames in one pass over the tree to speed up evaluation later on
191      ReadOnlyCollection<double>[] columnCache = new ReadOnlyCollection<double>[tree.Length];
192
193      for (int i = 0; i < tree.Length; i++) {
194        if (tree[i].VarName != TreeNode.NO_VARIABLE) {
195          // tree models also support calculating estimations if not all variables used for training are available in the dataset
196          if (ds.ColumnNames.Contains(tree[i].VarName))
197            columnCache[i] = ds.GetReadOnlyDoubleValues(tree[i].VarName);
198        }
199      }
200      return rows.Select(r => GetPredictionForRow(tree, columnCache, 0, r));
201    }
202
203    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
204      return new RegressionSolution(this, new RegressionProblemData(problemData));
205    }
206
207    // mainly for debugging
208    public override string ToString() {
209      return TreeToString(0, "");
210    }
211
212    private string TreeToString(int idx, string part) {
213      var n = tree[idx];
214      if (n.VarName == TreeNode.NO_VARIABLE) {
215        return string.Format(CultureInfo.InvariantCulture, "{0} -> {1:F}{2}", part, n.Val, Environment.NewLine);
216      } else {
217        return
218          TreeToString(n.LeftIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} <= {3:F} ({4:N3})", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, n.WeightLeft))
219        + TreeToString(n.RightIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2}  >  {3:F} ({4:N3}))", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, 1.0 - n.WeightLeft));
220      }
221    }
222
223  }
224}
Note: See TracBrowser for help on using the repository browser.