Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
10/13/16 19:47:41 (7 years ago)
Author:
gkronber
Message:

#2650 Merged r14282:14322 from trunk to branch (fixing conflicts)

Location:
branches/symbreg-factors-2650
Files:
5 edited

Legend:

Unmodified
Added
Removed
  • branches/symbreg-factors-2650

  • branches/symbreg-factors-2650/HeuristicLab.Algorithms.DataAnalysis

  • branches/symbreg-factors-2650/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModelSurrogate.cs

    r14239 r14330  
    2121#endregion
    2222
     23using System;
    2324using System.Collections.Generic;
     25using System.Diagnostics.Eventing.Reader;
    2426using System.Linq;
    2527using HeuristicLab.Common;
     
    3638  public sealed class GradientBoostedTreesModelSurrogate : RegressionModel, IGradientBoostedTreesModel {
    3739    // don't store the actual model!
    38     private IGradientBoostedTreesModel actualModel; // the actual model is only recalculated when necessary
     40    // the actual model is only recalculated when necessary
     41    private readonly Lazy<IGradientBoostedTreesModel> actualModel;
     42    private IGradientBoostedTreesModel ActualModel {
     43      get { return actualModel.Value; }
     44    }
    3945
    4046    [Storable]
     
    5763
    5864    public override IEnumerable<string> VariablesUsedForPrediction {
    59       get
    60       {
    61         lock (actualModel) { if (actualModel == null) actualModel = RecalculateModel(); }
    62         return actualModel.Models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x);
     65      get {
     66        return ActualModel.Models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x);
    6367      }
    6468    }
    6569
    6670    [StorableConstructor]
    67     private GradientBoostedTreesModelSurrogate(bool deserializing) : base(deserializing) { }
     71    private GradientBoostedTreesModelSurrogate(bool deserializing)
     72      : base(deserializing) {
     73      actualModel = new Lazy<IGradientBoostedTreesModel>(() => RecalculateModel());
     74    }
    6875
    6976    private GradientBoostedTreesModelSurrogate(GradientBoostedTreesModelSurrogate original, Cloner cloner)
    7077      : base(original, cloner) {
    71       if (original.actualModel != null) this.actualModel = cloner.Clone(original.actualModel);
     78      IGradientBoostedTreesModel clonedModel = null;
     79      if (original.ActualModel != null) clonedModel = cloner.Clone(original.ActualModel);
     80      actualModel = new Lazy<IGradientBoostedTreesModel>(CreateLazyInitFunc(clonedModel)); // only capture clonedModel in the closure
    7281
    7382      this.trainingProblemData = cloner.Clone(original.trainingProblemData);
     
    7988      this.m = original.m;
    8089      this.nu = original.nu;
     90    }
     91
     92    private Func<IGradientBoostedTreesModel> CreateLazyInitFunc(IGradientBoostedTreesModel clonedModel) {
     93      return () => {
     94        return clonedModel == null ? RecalculateModel() : clonedModel;
     95      };
    8196    }
    8297
     
    100115      IGradientBoostedTreesModel model)
    101116      : this(trainingProblemData, seed, lossFunction, iterations, maxSize, r, m, nu) {
    102       this.actualModel = model;
     117      actualModel = new Lazy<IGradientBoostedTreesModel>(() => model);
    103118    }
    104119
     
    109124    // forward message to actual model (recalculate model first if necessary)
    110125    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    111       lock (actualModel) { if (actualModel == null) actualModel = RecalculateModel(); }
    112       return actualModel.GetEstimatedValues(dataset, rows);
     126      return ActualModel.GetEstimatedValues(dataset, rows);
    113127    }
    114128
     
    123137    public IEnumerable<IRegressionModel> Models {
    124138      get {
    125         lock(actualModel) { if (actualModel == null) actualModel = RecalculateModel();}
    126         return actualModel.Models;
     139        return ActualModel.Models;
    127140      }
    128141    }
     
    130143    public IEnumerable<double> Weights {
    131144      get {
    132         lock(actualModel) { if (actualModel == null) actualModel = RecalculateModel();}
    133         return actualModel.Weights;
     145        return ActualModel.Weights;
    134146      }
    135147    }
  • branches/symbreg-factors-2650/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs

    r14239 r14330  
    3636  public sealed class NearestNeighbourModel : ClassificationModel, INearestNeighbourModel {
    3737
     38    private readonly object kdTreeLockObject = new object();
    3839    private alglib.nearestneighbor.kdtree kdTree;
    3940    public alglib.nearestneighbor.kdtree KDTree {
     
    4748      }
    4849    }
     50
    4951
    5052    public override IEnumerable<string> VariablesUsedForPrediction {
     
    201203        }
    202204        int numNeighbours;
    203         lock (kdTree) { // gkronber: the following calls change the kdTree data structure
     205        lock (kdTreeLockObject) { // gkronber: the following calls change the kdTree data structure
    204206          numNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
    205207          alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
     
    237239        }
    238240        int numNeighbours;
    239         lock (kdTree) {
     241        lock (kdTreeLockObject) {
    240242          // gkronber: the following calls change the kdTree data structure
    241243          numNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
  • branches/symbreg-factors-2650/HeuristicLab.Algorithms.DataAnalysis/3.4/NonlinearRegression/NonlinearRegression.cs

    r14277 r14330  
    2121
    2222using System;
    23 using System.Collections.Generic;
    2423using System.Linq;
     24using HeuristicLab.Analysis;
    2525using HeuristicLab.Common;
    2626using HeuristicLab.Core;
    2727using HeuristicLab.Data;
     28using HeuristicLab.Optimization;
    2829using HeuristicLab.Parameters;
    2930using HeuristicLab.Optimization;
     
    4849    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
    4950    private const string SeedParameterName = "Seed";
     51    private const string InitParamsRandomlyParameterName = "InitializeParametersRandomly";
    5052
    5153    public IFixedValueParameter<StringValue> ModelStructureParameter {
     
    6668    public IFixedValueParameter<IntValue> RestartsParameter {
    6769      get { return (IFixedValueParameter<IntValue>)Parameters[RestartsParameterName]; }
     70    }
     71
     72    public IFixedValueParameter<BoolValue> InitParametersRandomlyParameter {
     73      get { return (IFixedValueParameter<BoolValue>)Parameters[InitParamsRandomlyParameterName]; }
    6874    }
    6975
     
    9197      get { return SetSeedRandomlyParameter.Value.Value; }
    9298      set { SetSeedRandomlyParameter.Value.Value = value; }
     99    }
     100
     101    public bool InitializeParametersRandomly {
     102      get { return InitParametersRandomlyParameter.Value.Value; }
     103      set { InitParametersRandomlyParameter.Value.Value = value; }
    93104    }
    94105
     
    103114      Parameters.Add(new FixedValueParameter<StringValue>(ModelStructureParameterName, "The function for which the parameters must be fit (only numeric constants are tuned).", new StringValue("1.0 * x*x + 0.0")));
    104115      Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName, "The maximum number of iterations for constants optimization.", new IntValue(200)));
    105       Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of independent random restarts", new IntValue(10)));
     116      Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of independent random restarts (>0)", new IntValue(10)));
    106117      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The PRNG seed value.", new IntValue()));
    107118      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "Switch to determine if the random number seed should be initialized randomly.", new BoolValue(true)));
     119      Parameters.Add(new FixedValueParameter<BoolValue>(InitParamsRandomlyParameterName, "Switch to determine if the real-valued model parameters should be initialized randomly in each restart.", new BoolValue(false)));
     120
     121      SetParameterHiddenState();
     122
     123      InitParametersRandomlyParameter.Value.ValueChanged += (sender, args) => {
     124        SetParameterHiddenState();
     125      };
     126    }
     127
     128    private void SetParameterHiddenState() {
     129      var hide = !InitializeParametersRandomly;
     130      RestartsParameter.Hidden = hide;
     131      SeedParameter.Hidden = hide;
     132      SetSeedRandomlyParameter.Hidden = hide;
    108133    }
    109134
     
    118143      if (!Parameters.ContainsKey(SetSeedRandomlyParameterName))
    119144        Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "Switch to determine if the random number seed should be initialized randomly.", new BoolValue(true)));
     145      if (!Parameters.ContainsKey(InitParamsRandomlyParameterName))
     146        Parameters.Add(new FixedValueParameter<BoolValue>(InitParamsRandomlyParameterName, "Switch to determine if the numeric parameters of the model should be initialized randomly.", new BoolValue(false)));
     147
     148      SetParameterHiddenState();
     149      InitParametersRandomlyParameter.Value.ValueChanged += (sender, args) => {
     150        SetParameterHiddenState();
     151      };
    120152      #endregion
    121153    }
     
    127159    #region nonlinear regression
    128160    protected override void Run() {
    129       if (SetSeedRandomly) Seed = (new System.Random()).Next();
    130       var rand = new MersenneTwister((uint)Seed);
    131161      IRegressionSolution bestSolution = null;
    132       for (int r = 0; r < Restarts; r++) {
    133         var solution = CreateRegressionSolution(Problem.ProblemData, ModelStructure, Iterations, rand);
    134         if (bestSolution == null || solution.TrainingRootMeanSquaredError < bestSolution.TrainingRootMeanSquaredError) {
    135           bestSolution = solution;
     162      if (InitializeParametersRandomly) {
     163        var qualityTable = new DataTable("RMSE table");
     164        qualityTable.VisualProperties.YAxisLogScale = true;
     165        var trainRMSERow = new DataRow("RMSE (train)");
     166        trainRMSERow.VisualProperties.ChartType = DataRowVisualProperties.DataRowChartType.Points;
     167        var testRMSERow = new DataRow("RMSE test");
     168        testRMSERow.VisualProperties.ChartType = DataRowVisualProperties.DataRowChartType.Points;
     169
     170        qualityTable.Rows.Add(trainRMSERow);
     171        qualityTable.Rows.Add(testRMSERow);
     172        Results.Add(new Result(qualityTable.Name, qualityTable.Name + " for all restarts", qualityTable));
     173        if (SetSeedRandomly) Seed = (new System.Random()).Next();
     174        var rand = new MersenneTwister((uint)Seed);
     175        bestSolution = CreateRegressionSolution(Problem.ProblemData, ModelStructure, Iterations, rand);
     176        trainRMSERow.Values.Add(bestSolution.TrainingRootMeanSquaredError);
     177        testRMSERow.Values.Add(bestSolution.TestRootMeanSquaredError);
     178        for (int r = 0; r < Restarts; r++) {
     179          var solution = CreateRegressionSolution(Problem.ProblemData, ModelStructure, Iterations, rand);
     180          trainRMSERow.Values.Add(solution.TrainingRootMeanSquaredError);
     181          testRMSERow.Values.Add(solution.TestRootMeanSquaredError);
     182          if (solution.TrainingRootMeanSquaredError < bestSolution.TrainingRootMeanSquaredError) {
     183            bestSolution = solution;
     184          }
    136185        }
     186      } else {
     187        bestSolution = CreateRegressionSolution(Problem.ProblemData, ModelStructure, Iterations);
    137188      }
    138189
     
    148199    /// The starting point for the numeric constants is initialized randomly if a random number generator is specified (~N(0,1)). Otherwise the user specified constants are
    149200    /// used as a starting point.
    150     /// </summary>
     201    /// </summary>-
    151202    /// <param name="problemData">Training and test data</param>
    152203    /// <param name="modelStructure">The function as infix expression</param>
     
    154205    /// <param name="random">Optional random number generator for random initialization of numeric constants.</param>
    155206    /// <returns></returns>
    156     public static ISymbolicRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData, string modelStructure, int maxIterations, IRandom random = null) {
     207    public static ISymbolicRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData, string modelStructure, int maxIterations, IRandom rand = null) {
    157208      var parser = new InfixExpressionParser();
    158209      var tree = parser.Parse(modelStructure);
     
    183234
    184235      // initialize constants randomly
    185       if (random != null) {
     236      if (rand != null) {
    186237        foreach (var node in tree.IterateNodesPrefix().OfType<ConstantTreeNode>()) {
    187           node.Value = NormalDistributedRandom.NextDouble(random, 0, 1);
     238          double f = Math.Exp(NormalDistributedRandom.NextDouble(rand, 0, 1));
     239          double s = rand.NextDouble() < 0.5 ? -1 : 1;
     240          node.Value = s * node.Value * f;
    188241        }
    189242      }
Note: See TracChangeset for help on using the changeset viewer.