Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2522_RefactorPluginInfrastructure/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModelSurrogate.cs

Last change on this file was 15973, checked in by gkronber, 6 years ago

#2522: merged trunk changes from r13402:15972 to branch resolving conflicts where necessary

File size: 5.7 KB
RevLine 
[12868]1#region License Information
2/* HeuristicLab
[15973]3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[12868]4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
[15973]23using System;
[12868]24using System.Collections.Generic;
[15973]25using System.Linq;
[12868]26using HeuristicLab.Common;
27using HeuristicLab.Core;
28using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
29using HeuristicLab.Problems.DataAnalysis;
30
31namespace HeuristicLab.Algorithms.DataAnalysis {
32  [StorableClass]
33  // this class is used as a surrogate for persistence of an actual GBT model
34  // since the actual GBT model would be very large when persisted we only store all necessary information to
35  // recalculate the actual GBT model on demand
36  [Item("Gradient boosted tree model", "")]
[15973]37  public sealed class GradientBoostedTreesModelSurrogate : RegressionModel, IGradientBoostedTreesModel {
[12868]38    // don't store the actual model!
[15973]39    // the actual model is only recalculated when necessary
40    private readonly Lazy<IGradientBoostedTreesModel> actualModel;
41    private IGradientBoostedTreesModel ActualModel {
42      get { return actualModel.Value; }
43    }
[12868]44
45    [Storable]
46    private readonly IRegressionProblemData trainingProblemData;
47    [Storable]
48    private readonly uint seed;
49    [Storable]
[12873]50    private ILossFunction lossFunction;
[12868]51    [Storable]
52    private double r;
53    [Storable]
54    private double m;
55    [Storable]
56    private double nu;
57    [Storable]
58    private int iterations;
59    [Storable]
60    private int maxSize;
61
62
[15973]63    public override IEnumerable<string> VariablesUsedForPrediction {
64      get {
65        return ActualModel.Models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x);
66      }
67    }
68
[12868]69    [StorableConstructor]
[15973]70    private GradientBoostedTreesModelSurrogate(bool deserializing)
71      : base(deserializing) {
72      actualModel = new Lazy<IGradientBoostedTreesModel>(() => RecalculateModel());
73    }
[12868]74
75    private GradientBoostedTreesModelSurrogate(GradientBoostedTreesModelSurrogate original, Cloner cloner)
76      : base(original, cloner) {
[15973]77      IGradientBoostedTreesModel clonedModel = null;
78      if (original.ActualModel != null) clonedModel = cloner.Clone(original.ActualModel);
79      actualModel = new Lazy<IGradientBoostedTreesModel>(CreateLazyInitFunc(clonedModel)); // only capture clonedModel in the closure
[12868]80
81      this.trainingProblemData = cloner.Clone(original.trainingProblemData);
[12873]82      this.lossFunction = cloner.Clone(original.lossFunction);
[12868]83      this.seed = original.seed;
84      this.iterations = original.iterations;
85      this.maxSize = original.maxSize;
86      this.r = original.r;
87      this.m = original.m;
88      this.nu = original.nu;
89    }
90
[15973]91    private Func<IGradientBoostedTreesModel> CreateLazyInitFunc(IGradientBoostedTreesModel clonedModel) {
92      return () => {
93        return clonedModel == null ? RecalculateModel() : clonedModel;
94      };
95    }
96
[12868]97    // create only the surrogate model without an actual model
[15973]98    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
99      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
100      : base(trainingProblemData.TargetVariable, "Gradient boosted tree model", string.Empty) {
[12868]101      this.trainingProblemData = trainingProblemData;
102      this.seed = seed;
[12873]103      this.lossFunction = lossFunction;
[12868]104      this.iterations = iterations;
105      this.maxSize = maxSize;
106      this.r = r;
107      this.m = m;
108      this.nu = nu;
109    }
110
111    // wrap an actual model in a surrograte
[15973]112    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
113      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu,
114      IGradientBoostedTreesModel model)
[12873]115      : this(trainingProblemData, seed, lossFunction, iterations, maxSize, r, m, nu) {
[15973]116      actualModel = new Lazy<IGradientBoostedTreesModel>(() => model);
[12868]117    }
118
119    public override IDeepCloneable Clone(Cloner cloner) {
120      return new GradientBoostedTreesModelSurrogate(this, cloner);
121    }
122
123    // forward message to actual model (recalculate model first if necessary)
[15973]124    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
125      return ActualModel.GetEstimatedValues(dataset, rows);
[12868]126    }
127
[15973]128    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
[12868]129      return new RegressionSolution(this, (IRegressionProblemData)problemData.Clone());
130    }
131
[13157]132    private IGradientBoostedTreesModel RecalculateModel() {
[12868]133      return GradientBoostedTreesAlgorithmStatic.TrainGbm(trainingProblemData, lossFunction, maxSize, nu, r, m, iterations, seed).Model;
134    }
[13157]135
136    public IEnumerable<IRegressionModel> Models {
137      get {
[15973]138        return ActualModel.Models;
[13157]139      }
140    }
141
142    public IEnumerable<double> Weights {
143      get {
[15973]144        return ActualModel.Weights;
[13157]145      }
146    }
[12868]147  }
148}
Note: See TracBrowser for help on using the repository browser.