Free cookie consent management tool by TermsFeed Policy Generator

Changeset 13147 for stable


Ignore:
Timestamp:
11/13/15 20:51:23 (9 years ago)
Author:
gkronber
Message:

#2497: merged r13118:13119 from trunk to stable

Location:
stable
Files:
7 edited

Legend:

Unmodified
Added
Removed
  • stable

  • stable/HeuristicLab.Algorithms.DataAnalysis

  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessBase.cs

    r13053 r13147  
    2121#endregion
    2222
    23 using System;
    24 using System.Linq;
    2523using HeuristicLab.Algorithms.GradientDescent;
    2624using HeuristicLab.Common;
     
    3129using HeuristicLab.Parameters;
    3230using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    33 using HeuristicLab.PluginInfrastructure;
    3431using HeuristicLab.Problems.DataAnalysis;
    3532
     
    5148    protected const string HyperparameterGradientsParameterName = "HyperparameterGradients";
    5249    protected const string SolutionCreatorParameterName = "GaussianProcessSolutionCreator";
     50    protected const string ScaleInputValuesParameterName = "ScaleInputValues";
    5351
    5452    public new IDataAnalysisProblem Problem {
     
    7270    public IValueParameter<BoolValue> SetSeedRandomlyParameter {
    7371      get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
     72    }
     73    public IFixedValueParameter<BoolValue> ScaleInputValuesParameter {
     74      get { return (IFixedValueParameter<BoolValue>)Parameters[ScaleInputValuesParameterName]; }
    7475    }
    7576    #endregion
     
    8990    public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } }
    9091    public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } }
     92
     93    public bool ScaleInputValues {
     94      get { return ScaleInputValuesParameter.Value.Value; }
     95      set { ScaleInputValuesParameter.Value.Value = value; }
     96    }
    9197    #endregion
    9298
     
    107113      Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false)));
    108114      Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed
     115
     116      Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName,
     117        "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true)));
     118      Parameters[ScaleInputValuesParameterName].Hidden = true;
    109119
    110120      // necessary for BFGS
     
    191201        Parameters["Maximization"].Hidden = true;
    192202      }
     203
     204      if (!Parameters.ContainsKey(ScaleInputValuesParameterName)) {
     205        Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName,
     206          "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true)));
     207        Parameters[ScaleInputValuesParameterName].Hidden = true;
     208      }
    193209      #endregion
    194210    }
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationModelCreator.cs

    r12009 r13147  
    6262    public override IOperation Apply() {
    6363      try {
    64         var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction);
     64        var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction, ScaleInputValues);
    6565        ModelParameter.ActualValue = model;
    6666        NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood);
    6767        HyperparameterGradientsParameter.ActualValue = new RealVector(model.HyperparameterGradients);
    6868        return base.Apply();
    69       }
    70       catch (ArgumentException) { }
    71       catch (alglib.alglibexception) { }
     69      } catch (ArgumentException) { } catch (alglib.alglibexception) { }
    7270      NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(1E300);
    7371      HyperparameterGradientsParameter.ActualValue = new RealVector(Hyperparameter.Count());
     
    7573    }
    7674
    77     public static IGaussianProcessModel Create(IClassificationProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction) {
    78       return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction);
     75    public static IGaussianProcessModel Create(IClassificationProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction, bool scaleInputs = true) {
     76      return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction, scaleInputs);
    7977    }
    8078  }
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r13145 r13147  
    124124      this.meanFunction = cloner.Clone(original.meanFunction);
    125125      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
    126       this.inputScaling = cloner.Clone(original.inputScaling);
     126      if (original.inputScaling != null)
     127        this.inputScaling = cloner.Clone(original.inputScaling);
    127128      this.trainingDataset = cloner.Clone(original.trainingDataset);
    128129      this.negativeLogLikelihood = original.negativeLogLikelihood;
     
    144145    }
    145146    public GaussianProcessModel(IDataset ds, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows,
    146       IEnumerable<double> hyp, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction)
     147      IEnumerable<double> hyp, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction,
     148      bool scaleInputs = true)
    147149      : base() {
    148150      this.name = ItemName;
     
    163165                                             .ToArray();
    164166      sqrSigmaNoise = Math.Exp(2.0 * hyp.Last());
    165       CalculateModel(ds, rows);
    166     }
    167 
    168     private void CalculateModel(IDataset ds, IEnumerable<int> rows) {
     167      CalculateModel(ds, rows, scaleInputs);
     168    }
     169
     170    private void CalculateModel(IDataset ds, IEnumerable<int> rows, bool scaleInputs = true) {
    169171      this.trainingDataset = (IDataset)ds.Clone();
    170172      this.trainingRows = rows.ToArray();
    171       this.inputScaling = new Scaling(trainingDataset, allowedInputVariables, rows);
    172       this.x = CalculateX(trainingDataset, allowedInputVariables, rows, inputScaling);
    173       var y = ds.GetDoubleValues(targetVariable, rows);
     173      this.inputScaling = scaleInputs ? new Scaling(ds, allowedInputVariables, rows) : null;
     174
     175      x = GetData(ds, this.allowedInputVariables, this.trainingRows, this.inputScaling);
     176
     177      IEnumerable<double> y;
     178      y = ds.GetDoubleValues(targetVariable, rows);
    174179
    175180      int n = x.GetLength(0);
     
    184189        .Select(r => mean.Mean(x, r))
    185190        .ToArray();
    186 
    187 
    188191
    189192      // calculate sum of diagonal elements for likelihood
     
    249252    }
    250253
    251     private static double[,] CalculateX(IDataset ds, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows, Scaling inputScaling) {
    252       return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling);
     254    private static double[,] GetData(IDataset ds, IEnumerable<string> allowedInputs, IEnumerable<int> rows, Scaling scaling) {
     255      if (scaling != null) {
     256        return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputs, rows, scaling);
     257      } else {
     258        return AlglibUtil.PrepareInputMatrix(ds, allowedInputs, rows);
     259      }
    253260    }
    254261
     
    300307    private IEnumerable<double> GetEstimatedValuesHelper(IDataset dataset, IEnumerable<int> rows) {
    301308      if (x == null) {
    302         this.x = CalculateX(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
     309        x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
    303310      }
    304311      int n = x.GetLength(0);
    305312
    306       var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
     313      double[,] newX = GetData(dataset, allowedInputVariables, rows, inputScaling);
    307314      int newN = newX.GetLength(0);
    308315
     
    325332    public IEnumerable<double> GetEstimatedVariance(IDataset dataset, IEnumerable<int> rows) {
    326333      if (x == null) {
    327         this.x = CalculateX(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
     334        x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
    328335      }
    329336      int n = x.GetLength(0);
    330337
    331       var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
     338      var newX = GetData(dataset, allowedInputVariables, rows, inputScaling);
    332339      int newN = newX.GetLength(0);
    333340
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModelCreator.cs

    r12009 r13147  
    3838    private const string NegativeLogLikelihoodParameterName = "NegativeLogLikelihood";
    3939    private const string HyperparameterGradientsParameterName = "HyperparameterGradients";
     40    protected const string ScaleInputValuesParameterName = "ScaleInputValues";
    4041
    4142    #region Parameter Properties
     
    6061      get { return (ILookupParameter<DoubleValue>)Parameters[NegativeLogLikelihoodParameterName]; }
    6162    }
    62 
     63    public ILookupParameter<BoolValue> ScaleInputValuesParameter {
     64      get { return (ILookupParameter<BoolValue>)Parameters[ScaleInputValuesParameterName]; }
     65    }
    6366    #endregion
    6467
     
    6770    protected IMeanFunction MeanFunction { get { return MeanFunctionParameter.ActualValue; } }
    6871    protected ICovarianceFunction CovarianceFunction { get { return CovarianceFunctionParameter.ActualValue; } }
     72    public bool ScaleInputValues { get { return ScaleInputValuesParameter.ActualValue.Value; } }
    6973    #endregion
    7074
     
    8286      Parameters.Add(new LookupParameter<RealVector>(HyperparameterGradientsParameterName, "The gradients of the hyperparameters for the produced Gaussian process model (necessary for hyperparameter optimization)"));
    8387      Parameters.Add(new LookupParameter<DoubleValue>(NegativeLogLikelihoodParameterName, "The negative log-likelihood of the produced Gaussian process model given the data."));
     88
     89
     90      Parameters.Add(new LookupParameter<BoolValue>(ScaleInputValuesParameterName,
     91        "Determines if the input variable values are scaled to the range [0..1] for training."));
     92      Parameters[ScaleInputValuesParameterName].Hidden = true;
     93    }
     94
     95    [StorableHook(HookType.AfterDeserialization)]
     96    private void AfterDeserialization() {
     97      if (!Parameters.ContainsKey(ScaleInputValuesParameterName)) {
     98        Parameters.Add(new LookupParameter<BoolValue>(ScaleInputValuesParameterName,
     99          "Determines if the input variable values are scaled to the range [0..1] for training."));
     100        Parameters[ScaleInputValuesParameterName].Hidden = true;
     101      }
    84102    }
    85103  }
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs

    r12009 r13147  
    6262    public override IOperation Apply() {
    6363      try {
    64         var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction);
     64        var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction, ScaleInputValues);
    6565        ModelParameter.ActualValue = model;
    6666        NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood);
     
    7575    }
    7676
    77     public static IGaussianProcessModel Create(IRegressionProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction) {
    78       return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction);
     77    public static IGaussianProcessModel Create(IRegressionProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction, bool scaleInputs = true) {
     78      return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction, scaleInputs);
    7979    }
    8080  }
Note: See TracChangeset for help on using the changeset viewer.