source: branches/HeuristicLab.RegressionSolutionGradientView/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeModel.cs @ 13948

Last change on this file since 13948 was 13948, checked in by pfleck, 5 years ago

#2597

  • Merged recent trunk changes.
  • Adapted VariablesUsedForPrediction property for RegressionSolutionTargetResponseGradientView.
  • Fixed a reference (.dll to project ref).
File size: 9.1 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Collections.ObjectModel;
26using System.Globalization;
27using System.Linq;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31using HeuristicLab.Problems.DataAnalysis;
32
33namespace HeuristicLab.Algorithms.DataAnalysis {
34  [StorableClass]
35  [Item("RegressionTreeModel", "Represents a decision tree for regression.")]
36  public sealed class RegressionTreeModel : RegressionModel {
37    public override IEnumerable<string> VariablesUsedForPrediction {
38      get { return tree.Select(t => t.VarName).Where(v => v != TreeNode.NO_VARIABLE); }
39    }
40
41    // trees are represented as a flat array   
42    internal struct TreeNode {
43      public readonly static string NO_VARIABLE = null;
44
45      public TreeNode(string varName, double val, int leftIdx = -1, int rightIdx = -1, double weightLeft = -1.0)
46        : this() {
47        VarName = varName;
48        Val = val;
49        LeftIdx = leftIdx;
50        RightIdx = rightIdx;
51        WeightLeft = weightLeft;
52      }
53
54      public string VarName { get; internal set; } // name of the variable for splitting or NO_VARIABLE if terminal node
55      public double Val { get; internal set; } // threshold
56      public int LeftIdx { get; internal set; }
57      public int RightIdx { get; internal set; }
58      public double WeightLeft { get; internal set; } // for partial dependence plots (value in range [0..1] describes the fraction of training samples for the left sub-tree
59
60
61      // necessary because the default implementation of GetHashCode for structs in .NET would only return the hashcode of val here
62      public override int GetHashCode() {
63        return LeftIdx ^ RightIdx ^ Val.GetHashCode();
64      }
65      // necessary because of GetHashCode override
66      public override bool Equals(object obj) {
67        if (obj is TreeNode) {
68          var other = (TreeNode)obj;
69          return Val.Equals(other.Val) &&
70            LeftIdx.Equals(other.LeftIdx) &&
71            RightIdx.Equals(other.RightIdx) &&
72            WeightLeft.Equals(other.WeightLeft) &&
73            EqualStrings(VarName, other.VarName);
74        } else {
75          return false;
76        }
77      }
78
79      private bool EqualStrings(string a, string b) {
80        return (a == null && b == null) ||
81               (a != null && b != null && a.Equals(b));
82      }
83    }
84
85    // not storable!
86    private TreeNode[] tree;
87
88    #region old storable format
89    // remove with HL 3.4
90    [Storable(AllowOneWay = true)]
91    // to prevent storing the references to data caches in nodes
92    // seemingly, it is bad (performance-wise) to persist tuples (tuples are used as keys in a dictionary)
93    private Tuple<string, double, int, int>[] SerializedTree {
94      // get { return tree.Select(t => Tuple.Create(t.VarName, t.Val, t.LeftIdx, t.RightIdx)).ToArray(); }
95      set { this.tree = value.Select(t => new TreeNode(t.Item1, t.Item2, t.Item3, t.Item4, -1.0)).ToArray(); } // use a weight of -1.0 to indicate that partial dependence cannot be calculated for old models
96    }
97    #endregion
98    #region new storable format
99    [Storable]
100    private string[] SerializedTreeVarNames {
101      get { return tree.Select(t => t.VarName).ToArray(); }
102      set {
103        if (tree == null) tree = new TreeNode[value.Length];
104        for (int i = 0; i < value.Length; i++) {
105          tree[i].VarName = value[i];
106        }
107      }
108    }
109    [Storable]
110    private double[] SerializedTreeValues {
111      get { return tree.Select(t => t.Val).ToArray(); }
112      set {
113        if (tree == null) tree = new TreeNode[value.Length];
114        for (int i = 0; i < value.Length; i++) {
115          tree[i].Val = value[i];
116        }
117      }
118    }
119    [Storable]
120    private int[] SerializedTreeLeftIdx {
121      get { return tree.Select(t => t.LeftIdx).ToArray(); }
122      set {
123        if (tree == null) tree = new TreeNode[value.Length];
124        for (int i = 0; i < value.Length; i++) {
125          tree[i].LeftIdx = value[i];
126        }
127      }
128    }
129    [Storable]
130    private int[] SerializedTreeRightIdx {
131      get { return tree.Select(t => t.RightIdx).ToArray(); }
132      set {
133        if (tree == null) tree = new TreeNode[value.Length];
134        for (int i = 0; i < value.Length; i++) {
135          tree[i].RightIdx = value[i];
136        }
137      }
138    }
139    [Storable]
140    private double[] SerializedTreeWeightLeft {
141      get { return tree.Select(t => t.WeightLeft).ToArray(); }
142      set {
143        if (tree == null) tree = new TreeNode[value.Length];
144        for (int i = 0; i < value.Length; i++) {
145          tree[i].WeightLeft = value[i];
146        }
147      }
148    }
149    #endregion
150
151
152
153
154    [StorableConstructor]
155    private RegressionTreeModel(bool serializing) : base(serializing) { }
156    // cloning ctor
157    private RegressionTreeModel(RegressionTreeModel original, Cloner cloner)
158      : base(original, cloner) {
159      if (original.tree != null) {
160        this.tree = new TreeNode[original.tree.Length];
161        Array.Copy(original.tree, this.tree, this.tree.Length);
162      }
163    }
164
165    internal RegressionTreeModel(TreeNode[] tree, string target = "Target")
166      : base(target, "RegressionTreeModel", "Represents a decision tree for regression.") {
167      this.tree = tree;
168    }
169
170    private static double GetPredictionForRow(TreeNode[] t, ReadOnlyCollection<double>[] columnCache, int nodeIdx, int row) {
171      while (nodeIdx != -1) {
172        var node = t[nodeIdx];
173        if (node.VarName == TreeNode.NO_VARIABLE)
174          return node.Val;
175        if (columnCache[nodeIdx] == null) {
176          if (node.WeightLeft.IsAlmost(-1.0)) throw new InvalidOperationException("Cannot calculate partial dependence for trees loaded from older versions of HeuristicLab.");
177          // weighted average for partial dependence plot (recursive here because we need to calculate both sub-trees)
178          return node.WeightLeft * GetPredictionForRow(t, columnCache, node.LeftIdx, row) +
179                 (1.0 - node.WeightLeft) * GetPredictionForRow(t, columnCache, node.RightIdx, row);
180        } else if (columnCache[nodeIdx][row] <= node.Val)
181          nodeIdx = node.LeftIdx;
182        else
183          nodeIdx = node.RightIdx;
184      }
185      throw new InvalidOperationException("Invalid tree in RegressionTreeModel");
186    }
187
188    public override IDeepCloneable Clone(Cloner cloner) {
189      return new RegressionTreeModel(this, cloner);
190    }
191
192    public override IEnumerable<double> GetEstimatedValues(IDataset ds, IEnumerable<int> rows) {
193      // lookup columns for variableNames in one pass over the tree to speed up evaluation later on
194      ReadOnlyCollection<double>[] columnCache = new ReadOnlyCollection<double>[tree.Length];
195
196      for (int i = 0; i < tree.Length; i++) {
197        if (tree[i].VarName != TreeNode.NO_VARIABLE) {
198          // tree models also support calculating estimations if not all variables used for training are available in the dataset
199          if (ds.ColumnNames.Contains(tree[i].VarName))
200            columnCache[i] = ds.GetReadOnlyDoubleValues(tree[i].VarName);
201        }
202      }
203      return rows.Select(r => GetPredictionForRow(tree, columnCache, 0, r));
204    }
205
206    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
207      return new RegressionSolution(this, new RegressionProblemData(problemData));
208    }
209
210    // mainly for debugging
211    public override string ToString() {
212      return TreeToString(0, "");
213    }
214
215    private string TreeToString(int idx, string part) {
216      var n = tree[idx];
217      if (n.VarName == TreeNode.NO_VARIABLE) {
218        return string.Format(CultureInfo.InvariantCulture, "{0} -> {1:F}{2}", part, n.Val, Environment.NewLine);
219      } else {
220        return
221          TreeToString(n.LeftIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} <= {3:F} ({4:N3})", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, n.WeightLeft))
222        + TreeToString(n.RightIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2}  >  {3:F} ({4:N3}))", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, 1.0 - n.WeightLeft));
223      }
224    }
225
226  }
227}
Note: See TracBrowser for help on using the repository browser.