Free cookie consent management tool by TermsFeed Policy Generator

source: branches/RemoveBackwardsCompatibility/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModelSurrogate.cs @ 17317

Last change on this file since 17317 was 13157, checked in by gkronber, 9 years ago

#2450 made the changes suggested by mkommend in the review. This is definitely a big improvement, thx!

File size: 4.9 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System.Collections.Generic;
24using HeuristicLab.Common;
25using HeuristicLab.Core;
26using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
27using HeuristicLab.Problems.DataAnalysis;
28
29namespace HeuristicLab.Algorithms.DataAnalysis {
30  [StorableClass]
31  // this class is used as a surrogate for persistence of an actual GBT model
32  // since the actual GBT model would be very large when persisted we only store all necessary information to
33  // recalculate the actual GBT model on demand
34  [Item("Gradient boosted tree model", "")]
35  public sealed class GradientBoostedTreesModelSurrogate : NamedItem, IGradientBoostedTreesModel {
36    // don't store the actual model!
37    private IGradientBoostedTreesModel actualModel; // the actual model is only recalculated when necessary
38
39    [Storable]
40    private readonly IRegressionProblemData trainingProblemData;
41    [Storable]
42    private readonly uint seed;
43    [Storable]
44    private ILossFunction lossFunction;
45    [Storable]
46    private double r;
47    [Storable]
48    private double m;
49    [Storable]
50    private double nu;
51    [Storable]
52    private int iterations;
53    [Storable]
54    private int maxSize;
55
56
57    [StorableConstructor]
58    private GradientBoostedTreesModelSurrogate(bool deserializing) : base(deserializing) { }
59
60    private GradientBoostedTreesModelSurrogate(GradientBoostedTreesModelSurrogate original, Cloner cloner)
61      : base(original, cloner) {
62      if (original.actualModel != null) this.actualModel = cloner.Clone(original.actualModel);
63
64      this.trainingProblemData = cloner.Clone(original.trainingProblemData);
65      this.lossFunction = cloner.Clone(original.lossFunction);
66      this.seed = original.seed;
67      this.iterations = original.iterations;
68      this.maxSize = original.maxSize;
69      this.r = original.r;
70      this.m = original.m;
71      this.nu = original.nu;
72    }
73
74    // create only the surrogate model without an actual model
75    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
76      : base("Gradient boosted tree model", string.Empty) {
77      this.trainingProblemData = trainingProblemData;
78      this.seed = seed;
79      this.lossFunction = lossFunction;
80      this.iterations = iterations;
81      this.maxSize = maxSize;
82      this.r = r;
83      this.m = m;
84      this.nu = nu;
85    }
86
87    // wrap an actual model in a surrograte
88    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu, IGradientBoostedTreesModel model)
89      : this(trainingProblemData, seed, lossFunction, iterations, maxSize, r, m, nu) {
90      this.actualModel = model;
91    }
92
93    public override IDeepCloneable Clone(Cloner cloner) {
94      return new GradientBoostedTreesModelSurrogate(this, cloner);
95    }
96
97    // forward message to actual model (recalculate model first if necessary)
98    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
99      if (actualModel == null) actualModel = RecalculateModel();
100      return actualModel.GetEstimatedValues(dataset, rows);
101    }
102
103    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
104      return new RegressionSolution(this, (IRegressionProblemData)problemData.Clone());
105    }
106
107
108    private IGradientBoostedTreesModel RecalculateModel() {
109      return GradientBoostedTreesAlgorithmStatic.TrainGbm(trainingProblemData, lossFunction, maxSize, nu, r, m, iterations, seed).Model;
110    }
111
112    public IEnumerable<IRegressionModel> Models {
113      get {
114        if (actualModel == null) actualModel = RecalculateModel();
115        return actualModel.Models;
116      }
117    }
118
119    public IEnumerable<double> Weights {
120      get {
121        if (actualModel == null) actualModel = RecalculateModel();
122        return actualModel.Weights;
123      }
124    }
125  }
126}
Note: See TracBrowser for help on using the repository browser.