Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
07/10/19 17:39:38 (5 years ago)
Author:
gkronber
Message:

#2994: merged r17007:17118 from trunk to branch

Location:
branches/2994-AutoDiffForIntervals
Files:
7 edited
1 copied

Legend:

Unmodified
Added
Removed
  • branches/2994-AutoDiffForIntervals

  • branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis

  • branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis/3.4

  • branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesAlgorithm.cs

    r16565 r17120  
    2121#endregion
    2222
     23using System;
    2324using System.Linq;
    2425using System.Threading;
     26using HeuristicLab.Algorithms.DataAnalysis.GradientBoostedTrees;
    2527using HeuristicLab.Analysis;
    2628using HeuristicLab.Common;
     
    4850    private const string LossFunctionParameterName = "LossFunction";
    4951    private const string UpdateIntervalParameterName = "UpdateInterval";
    50     private const string CreateSolutionParameterName = "CreateSolution";
     52    private const string ModelCreationParameterName = "ModelCreation";
    5153    #endregion
    5254
     
    7981      get { return (IFixedValueParameter<IntValue>)Parameters[UpdateIntervalParameterName]; }
    8082    }
    81     public IFixedValueParameter<BoolValue> CreateSolutionParameter {
    82       get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
     83    private IFixedValueParameter<EnumValue<ModelCreation>> ModelCreationParameter {
     84      get { return (IFixedValueParameter<EnumValue<ModelCreation>>)Parameters[ModelCreationParameterName]; }
    8385    }
    8486    #endregion
     
    113115      set { MParameter.Value.Value = value; }
    114116    }
    115     public bool CreateSolution {
    116       get { return CreateSolutionParameter.Value.Value; }
    117       set { CreateSolutionParameter.Value.Value = value; }
     117    public ModelCreation ModelCreation {
     118      get { return ModelCreationParameter.Value.Value; }
     119      set { ModelCreationParameter.Value.Value = value; }
    118120    }
    119121    #endregion
     
    146148      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
    147149      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
    148       Parameters.Add(new FixedValueParameter<IntValue>(MaxSizeParameterName, "Maximal size of the tree learned in each step (prefer smaller sizes if possible)", new IntValue(10)));
     150      Parameters.Add(new FixedValueParameter<IntValue>(MaxSizeParameterName, "Maximal size of the tree learned in each step (prefer smaller sizes (3 to 10) if possible)", new IntValue(10)));
    149151      Parameters.Add(new FixedValueParameter<DoubleValue>(RParameterName, "Ratio of training rows selected randomly in each step (0 < R <= 1)", new DoubleValue(0.5)));
    150152      Parameters.Add(new FixedValueParameter<DoubleValue>(MParameterName, "Ratio of variables selected randomly in each step (0 < M <= 1)", new DoubleValue(0.5)));
     
    152154      Parameters.Add(new FixedValueParameter<IntValue>(UpdateIntervalParameterName, "", new IntValue(100)));
    153155      Parameters[UpdateIntervalParameterName].Hidden = true;
    154       Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
    155       Parameters[CreateSolutionParameterName].Hidden = true;
     156      Parameters.Add(new FixedValueParameter<EnumValue<ModelCreation>>(ModelCreationParameterName, "Defines the results produced at the end of the run (Surrogate => Less disk space, lazy recalculation of model)", new EnumValue<ModelCreation>(ModelCreation.Model)));
     157      Parameters[ModelCreationParameterName].Hidden = true;
    156158
    157159      var lossFunctions = ApplicationManager.Manager.GetInstances<ILossFunction>();
     
    164166      // BackwardsCompatibility3.4
    165167      #region Backwards compatible code, remove with 3.5
     168
     169      #region LossFunction
    166170      // parameter type has been changed
    167171      var lossFunctionParam = Parameters[LossFunctionParameterName] as ConstrainedValueParameter<StringValue>;
     
    182186      }
    183187      #endregion
     188
     189      #region CreateSolution
     190      // parameter type has been changed
     191      if (Parameters.ContainsKey("CreateSolution")) {
     192        var createSolutionParam = Parameters["CreateSolution"] as FixedValueParameter<BoolValue>;
     193        Parameters.Remove(createSolutionParam);
     194
     195        ModelCreation value = createSolutionParam.Value.Value ? ModelCreation.Model : ModelCreation.QualityOnly;
     196        Parameters.Add(new FixedValueParameter<EnumValue<ModelCreation>>(ModelCreationParameterName, "Defines the results produced at the end of the run (Surrogate => Less disk space, lazy recalculation of model)", new EnumValue<ModelCreation>(value)));
     197        Parameters[ModelCreationParameterName].Hidden = true;
     198      }
     199      #endregion
     200      #endregion
    184201    }
    185202
     
    248265
    249266      // produce solution
    250       if (CreateSolution) {
    251         var model = state.GetModel();
     267      if (ModelCreation == ModelCreation.SurrogateModel || ModelCreation == ModelCreation.Model) {
     268        IRegressionModel model = state.GetModel();
     269
     270        if (ModelCreation == ModelCreation.SurrogateModel) {
     271          model = new GradientBoostedTreesModelSurrogate((GradientBoostedTreesModel)model, problemData, (uint)Seed, lossFunction, Iterations, MaxSize, R, M, Nu);
     272        }
    252273
    253274        // for logistic regression we produce a classification solution
     
    271292          Results.Add(new Result("Solution", new GradientBoostedTreesSolution(model, problemData)));
    272293        }
     294      } else if (ModelCreation == ModelCreation.QualityOnly) {
     295        //Do nothing
     296      } else {
     297        throw new NotImplementedException("Selected parameter for CreateSolution isn't implemented yet");
    273298      }
    274299    }
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesAlgorithmStatic.cs

    r16565 r17120  
    101101
    102102      public IRegressionModel GetModel() {
    103 #pragma warning disable 618
    104         var model = new GradientBoostedTreesModel(models, weights);
    105 #pragma warning restore 618
    106         // we don't know the number of iterations here but the number of weights is equal
    107         // to the number of iterations + 1 (for the constant model)
    108         // wrap the actual model in a surrogate that enables persistence and lazy recalculation of the model if necessary
    109         return new GradientBoostedTreesModelSurrogate(problemData, randSeed, lossFunction, weights.Count - 1, maxSize, r, m, nu, model);
     103        return new GradientBoostedTreesModel(models, weights);
    110104      }
    111105      public IEnumerable<KeyValuePair<string, double>> GetVariableRelevance() {
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModel.cs

    r16565 r17120  
    3434  // this is essentially a collection of weighted regression models
    3535  public sealed class GradientBoostedTreesModel : RegressionModel, IGradientBoostedTreesModel {
    36     // BackwardsCompatibility3.4 for allowing deserialization & serialization of old models
    37     #region Backwards compatible code, remove with 3.5
    38     private bool isCompatibilityLoaded = false; // only set to true if the model is deserialized from the old format, needed to make sure that information is serialized again if it was loaded from the old format
    39 
    4036    [Storable(Name = "models")]
    4137    private IList<IRegressionModel> __persistedModels {
    4238      set {
    43         this.isCompatibilityLoaded = true;
    4439        this.models.Clear();
    4540        foreach (var m in value) this.models.Add(m);
    4641      }
    47       get { if (this.isCompatibilityLoaded) return models; else return null; }
     42      get { return models; }
    4843    }
    4944    [Storable(Name = "weights")]
    5045    private IList<double> __persistedWeights {
    5146      set {
    52         this.isCompatibilityLoaded = true;
    5347        this.weights.Clear();
    5448        foreach (var w in value) this.weights.Add(w);
    5549      }
    56       get { if (this.isCompatibilityLoaded) return weights; else return null; }
     50      get { return weights; }
    5751    }
    58     #endregion
    5952
    6053    public override IEnumerable<string> VariablesUsedForPrediction {
     
    7770      this.weights = new List<double>(original.weights);
    7871      this.models = new List<IRegressionModel>(original.models.Select(m => cloner.Clone(m)));
    79       this.isCompatibilityLoaded = original.isCompatibilityLoaded;
    8072    }
    81     [Obsolete("The constructor of GBTModel should not be used directly anymore (use GBTModelSurrogate instead)")]
     73
    8274    internal GradientBoostedTreesModel(IEnumerable<IRegressionModel> models, IEnumerable<double> weights)
    8375      : base(string.Empty, "Gradient boosted tree model", string.Empty) {
  • branches/2994-AutoDiffForIntervals/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModelSurrogate.cs

    r16565 r17120  
    9090    private Func<IGradientBoostedTreesModel> CreateLazyInitFunc(IGradientBoostedTreesModel clonedModel) {
    9191      return () => {
    92         return clonedModel == null ? RecalculateModel() : clonedModel;
     92        return clonedModel ?? RecalculateModel();
    9393      };
    9494    }
    9595
    9696    // create only the surrogate model without an actual model
    97     public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
     97    private GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
    9898      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
    9999      : base(trainingProblemData.TargetVariable, "Gradient boosted tree model", string.Empty) {
     
    106106      this.m = m;
    107107      this.nu = nu;
     108
     109      actualModel = new Lazy<IGradientBoostedTreesModel>(() => RecalculateModel());
    108110    }
    109111
    110112    // wrap an actual model in a surrograte
    111     public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
    112       ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu,
    113       IGradientBoostedTreesModel model)
     113    public GradientBoostedTreesModelSurrogate(IGradientBoostedTreesModel model, IRegressionProblemData trainingProblemData, uint seed,
     114      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
    114115      : this(trainingProblemData, seed, lossFunction, iterations, maxSize, r, m, nu) {
    115116      actualModel = new Lazy<IGradientBoostedTreesModel>(() => model);
Note: See TracChangeset for help on using the changeset viewer.