Free cookie consent management tool by TermsFeed Policy Generator

Changeset 13290 for stable


Ignore:
Timestamp:
11/19/15 12:06:10 (9 years ago)
Author:
gkronber
Message:

#1967: merged r13200,r13201,r13209,r13234,r13242 from trunk to stable

Location:
stable
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • stable

  • stable/HeuristicLab.Algorithms.DataAnalysis

  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessCovarianceOptimizationProblem.cs

    r13287 r13290  
    3737  [Creatable(CreatableAttribute.Categories.GeneticProgrammingProblems, Priority = 300)]
    3838  [StorableClass]
    39   public sealed class GaussianProcessCovarianceOptimizationProblem : SymbolicExpressionTreeProblem, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
     39  public sealed class GaussianProcessCovarianceOptimizationProblem : SymbolicExpressionTreeProblem, IStatefulItem, IRegressionProblem, IProblemInstanceConsumer<IRegressionProblemData>, IProblemInstanceExporter<IRegressionProblemData> {
    4040    #region static variables and ctor
    4141    private static readonly CovarianceMaternIso maternIso1;
     
    138138      get { return true; } // return log likelihood (instead of negative log likelihood as in GPR
    139139    }
     140
     141    // problem stores a few variables for information exchange from Evaluate() to Analyze()
     142    private readonly object problemStateLocker = new object();
     143    [Storable]
     144    private double bestQ;
     145    [Storable]
     146    private double[] bestHyperParameters;
     147    [Storable]
     148    private IMeanFunction meanFunc;
     149    [Storable]
     150    private ICovarianceFunction covFunc;
    140151
    141152    public GaussianProcessCovarianceOptimizationProblem()
     
    173184    }
    174185
    175 
     186    public void InitializeState() { ClearState(); }
     187    public void ClearState() {
     188      meanFunc = null;
     189      covFunc = null;
     190      bestQ = double.NegativeInfinity;
     191      bestHyperParameters = null;
     192    }
     193
     194    private readonly object syncRoot = new object();
     195    // Does not produce the same result for the same seed when using parallel engine (see below)!
    176196    public override double Evaluate(ISymbolicExpressionTree tree, IRandom random) {
    177 
    178197      var meanFunction = new MeanConst();
    179198      var problemData = ProblemData;
     
    203222        hyperParameters[0] = ds.GetDoubleValues(targetVariable).Average(); // mean const
    204223
    205         for (int i = 0; i < covarianceFunction.GetNumberOfParameters(nVars); i++) {
    206           hyperParameters[1 + i] = random.NextDouble() * 2.0 - 1.0;
     224        // Evaluate might be called concurrently therefore access to random has to be synchronized.
     225        // However, results of multiple runs with the same seed will be different when using the parallel engine.
     226        lock (syncRoot) {
     227          for (int i = 0; i < covarianceFunction.GetNumberOfParameters(nVars); i++) {
     228            hyperParameters[1 + i] = random.NextDouble() * 2.0 - 1.0;
     229          }
    207230        }
    208231        hyperParameters[hyperParameters.Length - 1] = 1.0; // s² = exp(2), TODO: other inits better?
     
    231254      }
    232255
     256      UpdateBestSoFar(bestObjValue[0], bestHyperParameters, meanFunction, covarianceFunction);
     257
    233258      return bestObjValue[0];
     259    }
     260
     261    // updates the overall best quality and overall best model for Analyze()
     262    private void UpdateBestSoFar(double bestQ, double[] bestHyperParameters, IMeanFunction meanFunc, ICovarianceFunction covFunc) {
     263      lock (problemStateLocker) {
     264        if (bestQ > this.bestQ) {
     265          this.bestQ = bestQ;
     266          this.bestHyperParameters = new double[bestHyperParameters.Length];
     267          Array.Copy(bestHyperParameters, this.bestHyperParameters, this.bestHyperParameters.Length);
     268          this.meanFunc = meanFunc;
     269          this.covFunc = covFunc;
     270        }
     271      }
    234272    }
    235273
     
    252290        results["Best Tree"].Value = bestClone;
    253291        results["Best Solution Quality"].Value = new DoubleValue(bestQuality);
    254         results["Best Solution"].Value = CreateSolution(bestClone, random);
    255       }
    256     }
    257 
    258     private IItem CreateSolution(ISymbolicExpressionTree tree, IRandom random) {
    259       // again tune the hyper-parameters.
    260       // this is suboptimal because 1) more effort and 2) we cannot be sure to find the same local optimum
    261       var meanFunction = new MeanConst();
     292        results["Best Solution"].Value = CreateSolution();
     293      }
     294    }
     295
     296    private IItem CreateSolution() {
    262297      var problemData = ProblemData;
    263298      var ds = problemData.Dataset;
    264299      var targetVariable = problemData.TargetVariable;
    265300      var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
    266       var nVars = allowedInputVariables.Length;
    267301      var trainingRows = problemData.TrainingIndices.ToArray();
    268       var bestObjValue = new double[1] { double.MinValue };
    269 
    270       // use the same covariance function for each restart
    271       var covarianceFunction = TreeToCovarianceFunction(tree);
    272       // data that is necessary for the objective function
    273       var data = Tuple.Create(ds, targetVariable, allowedInputVariables, trainingRows, (IMeanFunction)meanFunction, covarianceFunction, bestObjValue);
    274 
    275       // allocate hyperparameters
    276       var hyperParameters = new double[meanFunction.GetNumberOfParameters(nVars) + covarianceFunction.GetNumberOfParameters(nVars) + 1]; // mean + cov + noise
    277 
    278       // initialize hyperparameters
    279       hyperParameters[0] = ds.GetDoubleValues(targetVariable).Average(); // mean const
    280 
    281       for (int i = 0; i < covarianceFunction.GetNumberOfParameters(nVars); i++) {
    282         hyperParameters[1 + i] = random.NextDouble() * 2.0 - 1.0;
    283       }
    284       hyperParameters[hyperParameters.Length - 1] = 1.0; // s² = exp(2), TODO: other inits better?
    285 
    286       // use alglib.bfgs for hyper-parameter optimization ...
    287       double epsg = 0;
    288       double epsf = 0.00001;
    289       double epsx = 0;
    290       double stpmax = 1;
    291       int maxits = ConstantOptIterations;
    292       alglib.mincgstate state;
    293       alglib.mincgreport rep;
    294 
    295       alglib.mincgcreate(hyperParameters, out state);
    296       alglib.mincgsetcond(state, epsg, epsf, epsx, maxits);
    297       alglib.mincgsetstpmax(state, stpmax);
    298       alglib.mincgoptimize(state, ObjectiveFunction, null, data);
    299 
    300       alglib.mincgresults(state, out hyperParameters, out rep);
    301 
    302       if (rep.terminationtype >= 0) {
    303 
    304         var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, trainingRows, hyperParameters, meanFunction, covarianceFunction);
    305         return model.CreateRegressionSolution(ProblemData);
    306       } else return null;
     302
     303      lock (problemStateLocker) {
     304        var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, trainingRows, bestHyperParameters, (IMeanFunction)meanFunc.Clone(), (ICovarianceFunction)covFunc.Clone());
     305        model.FixParameters();
     306        return model.CreateRegressionSolution((IRegressionProblemData)ProblemData.Clone());
     307      }
    307308    }
    308309
     
    326327        var gradients = model.HyperparameterGradients;
    327328        Array.Copy(gradients, grad, gradients.Length);
    328       } catch (ArgumentException) {
     329      }
     330      catch (ArgumentException) {
    329331        // building the GaussianProcessModel might fail, in this case we return the worst possible objective value
    330332        func = 1.0E+300;
     
    387389    private GaussianProcessCovarianceOptimizationProblem(GaussianProcessCovarianceOptimizationProblem original, Cloner cloner)
    388390      : base(original, cloner) {
     391      bestQ = original.bestQ;
     392      meanFunc = cloner.Clone(original.meanFunc);
     393      covFunc = cloner.Clone(original.covFunc);
     394      if (bestHyperParameters != null) {
     395        bestHyperParameters = new double[original.bestHyperParameters.Length];
     396        Array.Copy(original.bestHyperParameters, bestHyperParameters, bestHyperParameters.Length);
     397      }
    389398    }
    390399    public override IDeepCloneable Clone(Cloner cloner) {
Note: See TracChangeset for help on using the changeset viewer.