Changeset 13200 for trunk/sources/HeuristicLab.Algorithms.DataAnalysis
- Timestamp:
- 11/16/15 23:00:32 (9 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessCovarianceOptimizationProblem.cs
r13160 r13200 138 138 get { return true; } // return log likelihood (instead of negative log likelihood as in GPR 139 139 } 140 141 // problem stores a few variables for information exchange from Evaluate() to Analyze() 142 private object problemStateLocker = new object(); 143 [Storable] 144 private double bestQ; 145 [Storable] 146 private double[] bestHyperParameters; 147 [Storable] 148 private IMeanFunction meanFunc; 149 [Storable] 150 private ICovarianceFunction covFunc; 140 151 141 152 public GaussianProcessCovarianceOptimizationProblem() … … 173 184 } 174 185 186 protected override void OnReset() { 187 base.OnReset(); 188 meanFunc = null; 189 covFunc = null; 190 bestQ = double.NegativeInfinity; 191 bestHyperParameters = null; 192 } 175 193 176 194 public override double Evaluate(ISymbolicExpressionTree tree, IRandom random) { … … 231 249 } 232 250 251 UpdateBestSoFar(bestObjValue[0], bestHyperParameters, meanFunction, covarianceFunction); 252 233 253 return bestObjValue[0]; 254 } 255 256 // updates the overall best quality and overall best model for Analyze() 257 private void UpdateBestSoFar(double bestQ, double[] bestHyperParameters, IMeanFunction meanFunc, ICovarianceFunction covFunc) { 258 lock (problemStateLocker) { 259 if (bestQ > this.bestQ) { 260 this.bestQ = bestQ; 261 this.bestHyperParameters = bestHyperParameters; 262 this.meanFunc = meanFunc; 263 this.covFunc = covFunc; 264 } 265 } 234 266 } 235 267 … … 252 284 results["Best Tree"].Value = bestClone; 253 285 results["Best Solution Quality"].Value = new DoubleValue(bestQuality); 254 results["Best Solution"].Value = CreateSolution(bestClone, random); 255 } 256 } 257 258 private IItem CreateSolution(ISymbolicExpressionTree tree, IRandom random) { 259 // again tune the hyper-parameters. 260 // this is suboptimal because 1) more effort and 2) we cannot be sure to find the same local optimum 261 var meanFunction = new MeanConst(); 286 results["Best Solution"].Value = CreateSolution(); 287 } 288 } 289 290 private IItem CreateSolution() { 262 291 var problemData = ProblemData; 263 292 var ds = problemData.Dataset; 264 293 var targetVariable = problemData.TargetVariable; 265 294 var allowedInputVariables = problemData.AllowedInputVariables.ToArray(); 266 var nVars = allowedInputVariables.Length;267 295 var trainingRows = problemData.TrainingIndices.ToArray(); 268 var bestObjValue = new double[1] { double.MinValue }; 269 270 // use the same covariance function for each restart 271 var covarianceFunction = TreeToCovarianceFunction(tree); 272 // data that is necessary for the objective function 273 var data = Tuple.Create(ds, targetVariable, allowedInputVariables, trainingRows, (IMeanFunction)meanFunction, covarianceFunction, bestObjValue); 274 275 // allocate hyperparameters 276 var hyperParameters = new double[meanFunction.GetNumberOfParameters(nVars) + covarianceFunction.GetNumberOfParameters(nVars) + 1]; // mean + cov + noise 277 278 // initialize hyperparameters 279 hyperParameters[0] = ds.GetDoubleValues(targetVariable).Average(); // mean const 280 281 for (int i = 0; i < covarianceFunction.GetNumberOfParameters(nVars); i++) { 282 hyperParameters[1 + i] = random.NextDouble() * 2.0 - 1.0; 283 } 284 hyperParameters[hyperParameters.Length - 1] = 1.0; // s² = exp(2), TODO: other inits better? 285 286 // use alglib.bfgs for hyper-parameter optimization ... 287 double epsg = 0; 288 double epsf = 0.00001; 289 double epsx = 0; 290 double stpmax = 1; 291 int maxits = ConstantOptIterations; 292 alglib.mincgstate state; 293 alglib.mincgreport rep; 294 295 alglib.mincgcreate(hyperParameters, out state); 296 alglib.mincgsetcond(state, epsg, epsf, epsx, maxits); 297 alglib.mincgsetstpmax(state, stpmax); 298 alglib.mincgoptimize(state, ObjectiveFunction, null, data); 299 300 alglib.mincgresults(state, out hyperParameters, out rep); 301 302 if (rep.terminationtype >= 0) { 303 304 var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, trainingRows, hyperParameters, meanFunction, covarianceFunction); 305 return model.CreateRegressionSolution(ProblemData); 306 } else return null; 296 297 lock (problemStateLocker) { 298 var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, trainingRows, bestHyperParameters, (IMeanFunction)meanFunc.Clone(), (ICovarianceFunction)covFunc.Clone()); 299 model.FixParameters(); 300 return model.CreateRegressionSolution((IRegressionProblemData)ProblemData.Clone()); 301 } 307 302 } 308 303 … … 387 382 private GaussianProcessCovarianceOptimizationProblem(GaussianProcessCovarianceOptimizationProblem original, Cloner cloner) 388 383 : base(original, cloner) { 384 bestQ = original.bestQ; 385 meanFunc = cloner.Clone(original.meanFunc); 386 covFunc = cloner.Clone(original.covFunc); 387 if (bestHyperParameters != null) { 388 bestHyperParameters = new double[original.bestHyperParameters.Length]; 389 Array.Copy(original.bestHyperParameters, bestHyperParameters, bestHyperParameters.Length); 390 } 389 391 } 390 392 public override IDeepCloneable Clone(Cloner cloner) {
Note: See TracChangeset
for help on using the changeset viewer.