Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2883_GBTModelStorage/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModel.cs @ 16220

Last change on this file since 16220 was 16220, checked in by fholzing, 6 years ago

#2883: Changed formatting to adhere to the coding guidelines

File size: 4.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Linq;
26using HeuristicLab.Common;
27using HeuristicLab.Core;
28using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
29using HeuristicLab.Problems.DataAnalysis;
30
31namespace HeuristicLab.Algorithms.DataAnalysis {
32  [StorableClass]
33  [Item("Gradient boosted trees model", "")]
34  // this is essentially a collection of weighted regression models
35  public sealed class GradientBoostedTreesModel : RegressionModel, IGradientBoostedTreesModel {
36    // BackwardsCompatibility3.4 for allowing deserialization & serialization of old models
37    #region Backwards compatible code, remove with 3.5
38
39    [Storable(Name = "models")]
40    private IList<IRegressionModel> __persistedModels {
41      set {
42        this.models.Clear();
43        foreach (var m in value) this.models.Add(m);
44      }
45      get { return models; }
46    }
47    [Storable(Name = "weights")]
48    private IList<double> __persistedWeights {
49      set {
50        this.weights.Clear();
51        foreach (var w in value) this.weights.Add(w);
52      }
53      get { return weights; }
54    }
55    #endregion
56
57    public override IEnumerable<string> VariablesUsedForPrediction {
58      get { return models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x); }
59    }
60
61    private readonly IList<IRegressionModel> models;
62    public IEnumerable<IRegressionModel> Models { get { return models; } }
63
64    private readonly IList<double> weights;
65    public IEnumerable<double> Weights { get { return weights; } }
66
67    [StorableConstructor]
68    private GradientBoostedTreesModel(bool deserializing)
69      : base(deserializing) {
70      models = new List<IRegressionModel>();
71      weights = new List<double>();
72    }
73    private GradientBoostedTreesModel(GradientBoostedTreesModel original, Cloner cloner)
74      : base(original, cloner) {
75      this.weights = new List<double>(original.weights);
76      this.models = new List<IRegressionModel>(original.models.Select(m => cloner.Clone(m)));
77    }
78    [Obsolete("The constructor of GBTModel should not be used directly anymore (use GBTModelSurrogate instead)")]
79    internal GradientBoostedTreesModel(IEnumerable<IRegressionModel> models, IEnumerable<double> weights)
80      : base(string.Empty, "Gradient boosted tree model", string.Empty) {
81      this.models = new List<IRegressionModel>(models);
82      this.weights = new List<double>(weights);
83
84      if (this.models.Count != this.weights.Count) throw new ArgumentException();
85    }
86
87    public override IDeepCloneable Clone(Cloner cloner) {
88      return new GradientBoostedTreesModel(this, cloner);
89    }
90
91    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
92      // allocate target array go over all models and add up weighted estimation for each row
93      if (!rows.Any()) return Enumerable.Empty<double>(); // return immediately if rows is empty. This prevents multiple iteration over lazy rows enumerable.
94      // (which essentially looks up indexes in a dictionary)
95      var res = new double[rows.Count()];
96      for (int i = 0; i < models.Count; i++) {
97        var w = weights[i];
98        var m = models[i];
99        int r = 0;
100        foreach (var est in m.GetEstimatedValues(dataset, rows)) {
101          res[r++] += w * est;
102        }
103      }
104      return res;
105    }
106
107    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
108      return new RegressionSolution(this, (IRegressionProblemData)problemData.Clone());
109    }
110
111  }
112}
Note: See TracBrowser for help on using the repository browser.