Changeset 13147
- Timestamp:
- 11/13/15 20:51:23 (9 years ago)
- Location:
- stable
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
stable
- Property svn:mergeinfo changed
/trunk/sources merged: 13118-13119
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Algorithms.DataAnalysis merged: 13118-13119
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessBase.cs
r13053 r13147 21 21 #endregion 22 22 23 using System;24 using System.Linq;25 23 using HeuristicLab.Algorithms.GradientDescent; 26 24 using HeuristicLab.Common; … … 31 29 using HeuristicLab.Parameters; 32 30 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 33 using HeuristicLab.PluginInfrastructure;34 31 using HeuristicLab.Problems.DataAnalysis; 35 32 … … 51 48 protected const string HyperparameterGradientsParameterName = "HyperparameterGradients"; 52 49 protected const string SolutionCreatorParameterName = "GaussianProcessSolutionCreator"; 50 protected const string ScaleInputValuesParameterName = "ScaleInputValues"; 53 51 54 52 public new IDataAnalysisProblem Problem { … … 72 70 public IValueParameter<BoolValue> SetSeedRandomlyParameter { 73 71 get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; } 72 } 73 public IFixedValueParameter<BoolValue> ScaleInputValuesParameter { 74 get { return (IFixedValueParameter<BoolValue>)Parameters[ScaleInputValuesParameterName]; } 74 75 } 75 76 #endregion … … 89 90 public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } } 90 91 public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } } 92 93 public bool ScaleInputValues { 94 get { return ScaleInputValuesParameter.Value.Value; } 95 set { ScaleInputValuesParameter.Value.Value = value; } 96 } 91 97 #endregion 92 98 … … 107 113 Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false))); 108 114 Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed 115 116 Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName, 117 "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true))); 118 Parameters[ScaleInputValuesParameterName].Hidden = true; 109 119 110 120 // necessary for BFGS … … 191 201 Parameters["Maximization"].Hidden = true; 192 202 } 203 204 if (!Parameters.ContainsKey(ScaleInputValuesParameterName)) { 205 Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName, 206 "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true))); 207 Parameters[ScaleInputValuesParameterName].Hidden = true; 208 } 193 209 #endregion 194 210 } -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationModelCreator.cs
r12009 r13147 62 62 public override IOperation Apply() { 63 63 try { 64 var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction );64 var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction, ScaleInputValues); 65 65 ModelParameter.ActualValue = model; 66 66 NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood); 67 67 HyperparameterGradientsParameter.ActualValue = new RealVector(model.HyperparameterGradients); 68 68 return base.Apply(); 69 } 70 catch (ArgumentException) { } 71 catch (alglib.alglibexception) { } 69 } catch (ArgumentException) { } catch (alglib.alglibexception) { } 72 70 NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(1E300); 73 71 HyperparameterGradientsParameter.ActualValue = new RealVector(Hyperparameter.Count()); … … 75 73 } 76 74 77 public static IGaussianProcessModel Create(IClassificationProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction ) {78 return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction );75 public static IGaussianProcessModel Create(IClassificationProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction, bool scaleInputs = true) { 76 return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction, scaleInputs); 79 77 } 80 78 } -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r13145 r13147 124 124 this.meanFunction = cloner.Clone(original.meanFunction); 125 125 this.covarianceFunction = cloner.Clone(original.covarianceFunction); 126 this.inputScaling = cloner.Clone(original.inputScaling); 126 if (original.inputScaling != null) 127 this.inputScaling = cloner.Clone(original.inputScaling); 127 128 this.trainingDataset = cloner.Clone(original.trainingDataset); 128 129 this.negativeLogLikelihood = original.negativeLogLikelihood; … … 144 145 } 145 146 public GaussianProcessModel(IDataset ds, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows, 146 IEnumerable<double> hyp, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction) 147 IEnumerable<double> hyp, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction, 148 bool scaleInputs = true) 147 149 : base() { 148 150 this.name = ItemName; … … 163 165 .ToArray(); 164 166 sqrSigmaNoise = Math.Exp(2.0 * hyp.Last()); 165 CalculateModel(ds, rows );166 } 167 168 private void CalculateModel(IDataset ds, IEnumerable<int> rows ) {167 CalculateModel(ds, rows, scaleInputs); 168 } 169 170 private void CalculateModel(IDataset ds, IEnumerable<int> rows, bool scaleInputs = true) { 169 171 this.trainingDataset = (IDataset)ds.Clone(); 170 172 this.trainingRows = rows.ToArray(); 171 this.inputScaling = new Scaling(trainingDataset, allowedInputVariables, rows); 172 this.x = CalculateX(trainingDataset, allowedInputVariables, rows, inputScaling); 173 var y = ds.GetDoubleValues(targetVariable, rows); 173 this.inputScaling = scaleInputs ? new Scaling(ds, allowedInputVariables, rows) : null; 174 175 x = GetData(ds, this.allowedInputVariables, this.trainingRows, this.inputScaling); 176 177 IEnumerable<double> y; 178 y = ds.GetDoubleValues(targetVariable, rows); 174 179 175 180 int n = x.GetLength(0); … … 184 189 .Select(r => mean.Mean(x, r)) 185 190 .ToArray(); 186 187 188 191 189 192 // calculate sum of diagonal elements for likelihood … … 249 252 } 250 253 251 private static double[,] CalculateX(IDataset ds, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows, Scaling inputScaling) { 252 return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling); 254 private static double[,] GetData(IDataset ds, IEnumerable<string> allowedInputs, IEnumerable<int> rows, Scaling scaling) { 255 if (scaling != null) { 256 return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputs, rows, scaling); 257 } else { 258 return AlglibUtil.PrepareInputMatrix(ds, allowedInputs, rows); 259 } 253 260 } 254 261 … … 300 307 private IEnumerable<double> GetEstimatedValuesHelper(IDataset dataset, IEnumerable<int> rows) { 301 308 if (x == null) { 302 this.x = CalculateX(trainingDataset, allowedInputVariables, trainingRows, inputScaling);309 x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling); 303 310 } 304 311 int n = x.GetLength(0); 305 312 306 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);313 double[,] newX = GetData(dataset, allowedInputVariables, rows, inputScaling); 307 314 int newN = newX.GetLength(0); 308 315 … … 325 332 public IEnumerable<double> GetEstimatedVariance(IDataset dataset, IEnumerable<int> rows) { 326 333 if (x == null) { 327 this.x = CalculateX(trainingDataset, allowedInputVariables, trainingRows, inputScaling);334 x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling); 328 335 } 329 336 int n = x.GetLength(0); 330 337 331 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);338 var newX = GetData(dataset, allowedInputVariables, rows, inputScaling); 332 339 int newN = newX.GetLength(0); 333 340 -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModelCreator.cs
r12009 r13147 38 38 private const string NegativeLogLikelihoodParameterName = "NegativeLogLikelihood"; 39 39 private const string HyperparameterGradientsParameterName = "HyperparameterGradients"; 40 protected const string ScaleInputValuesParameterName = "ScaleInputValues"; 40 41 41 42 #region Parameter Properties … … 60 61 get { return (ILookupParameter<DoubleValue>)Parameters[NegativeLogLikelihoodParameterName]; } 61 62 } 62 63 public ILookupParameter<BoolValue> ScaleInputValuesParameter { 64 get { return (ILookupParameter<BoolValue>)Parameters[ScaleInputValuesParameterName]; } 65 } 63 66 #endregion 64 67 … … 67 70 protected IMeanFunction MeanFunction { get { return MeanFunctionParameter.ActualValue; } } 68 71 protected ICovarianceFunction CovarianceFunction { get { return CovarianceFunctionParameter.ActualValue; } } 72 public bool ScaleInputValues { get { return ScaleInputValuesParameter.ActualValue.Value; } } 69 73 #endregion 70 74 … … 82 86 Parameters.Add(new LookupParameter<RealVector>(HyperparameterGradientsParameterName, "The gradients of the hyperparameters for the produced Gaussian process model (necessary for hyperparameter optimization)")); 83 87 Parameters.Add(new LookupParameter<DoubleValue>(NegativeLogLikelihoodParameterName, "The negative log-likelihood of the produced Gaussian process model given the data.")); 88 89 90 Parameters.Add(new LookupParameter<BoolValue>(ScaleInputValuesParameterName, 91 "Determines if the input variable values are scaled to the range [0..1] for training.")); 92 Parameters[ScaleInputValuesParameterName].Hidden = true; 93 } 94 95 [StorableHook(HookType.AfterDeserialization)] 96 private void AfterDeserialization() { 97 if (!Parameters.ContainsKey(ScaleInputValuesParameterName)) { 98 Parameters.Add(new LookupParameter<BoolValue>(ScaleInputValuesParameterName, 99 "Determines if the input variable values are scaled to the range [0..1] for training.")); 100 Parameters[ScaleInputValuesParameterName].Hidden = true; 101 } 84 102 } 85 103 } -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs
r12009 r13147 62 62 public override IOperation Apply() { 63 63 try { 64 var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction );64 var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction, ScaleInputValues); 65 65 ModelParameter.ActualValue = model; 66 66 NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood); … … 75 75 } 76 76 77 public static IGaussianProcessModel Create(IRegressionProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction ) {78 return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction );77 public static IGaussianProcessModel Create(IRegressionProblemData problemData, double[] hyperparameter, IMeanFunction meanFunction, ICovarianceFunction covarianceFunction, bool scaleInputs = true) { 78 return new GaussianProcessModel(problemData.Dataset, problemData.TargetVariable, problemData.AllowedInputVariables, problemData.TrainingIndices, hyperparameter, meanFunction, covarianceFunction, scaleInputs); 79 79 } 80 80 }
Note: See TracChangeset
for help on using the changeset viewer.