Changeset 17045 for branches/2952_RF-ModelStorage/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees
- Timestamp:
- 06/27/19 15:46:20 (6 years ago)
- Location:
- branches/2952_RF-ModelStorage/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2952_RF-ModelStorage/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesAlgorithm.cs
r17030 r17045 269 269 270 270 if (ModelCreation == ModelCreation.SurrogateModel) { 271 model = new GradientBoostedTreesModelSurrogate( problemData, (uint)Seed, lossFunction, Iterations, MaxSize, R, M, Nu, (GradientBoostedTreesModel)model);271 model = new GradientBoostedTreesModelSurrogate((GradientBoostedTreesModel)model, problemData, (uint)Seed, lossFunction, Iterations, MaxSize, R, M, Nu); 272 272 } 273 273 -
branches/2952_RF-ModelStorage/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModel.cs
r17030 r17045 34 34 // this is essentially a collection of weighted regression models 35 35 public sealed class GradientBoostedTreesModel : RegressionModel, IGradientBoostedTreesModel { 36 // BackwardsCompatibility3.4 for allowing deserialization & serialization of old models37 #region Backwards compatible code, remove with 3.538 39 36 [Storable(Name = "models")] 40 37 private IList<IRegressionModel> __persistedModels { … … 53 50 get { return weights; } 54 51 } 55 #endregion56 52 57 53 public override IEnumerable<string> VariablesUsedForPrediction { -
branches/2952_RF-ModelStorage/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModelSurrogate.cs
r16565 r17045 90 90 private Func<IGradientBoostedTreesModel> CreateLazyInitFunc(IGradientBoostedTreesModel clonedModel) { 91 91 return () => { 92 return clonedModel == null ? RecalculateModel() : clonedModel;92 return clonedModel ?? RecalculateModel(); 93 93 }; 94 94 } 95 95 96 96 // create only the surrogate model without an actual model 97 p ublicGradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,97 private GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, 98 98 ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu) 99 99 : base(trainingProblemData.TargetVariable, "Gradient boosted tree model", string.Empty) { … … 106 106 this.m = m; 107 107 this.nu = nu; 108 109 actualModel = new Lazy<IGradientBoostedTreesModel>(() => RecalculateModel()); 108 110 } 109 111 110 112 // wrap an actual model in a surrograte 111 public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, 112 ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu, 113 IGradientBoostedTreesModel model) 113 public GradientBoostedTreesModelSurrogate(IGradientBoostedTreesModel model, IRegressionProblemData trainingProblemData, uint seed, 114 ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu) 114 115 : this(trainingProblemData, seed, lossFunction, iterations, maxSize, r, m, nu) { 115 116 actualModel = new Lazy<IGradientBoostedTreesModel>(() => model);
Note: See TracChangeset
for help on using the changeset viewer.