Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
12/01/12 19:02:47 (12 years ago)
Author:
gkronber
Message:

#1902: removed class HyperParameter and changed implementations of covariance and mean functions to remove the parameter value caching and event handlers for parameter caching. Instead it is now possible to create the actual covariance and mean functions as Func from templates and specified parameter values. The instances of mean and covariance functions configured in the GUI are actually templates where the structure and fixed parameters can be specified.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8623 r8982  
    8181
    8282    [Storable]
     83    private double[] meanParameter;
     84    [Storable]
     85    private double[] covarianceParameter;
     86
     87    [Storable]
    8388    private double[,] l;
    8489
     
    99104      this.targetVariable = original.targetVariable;
    100105      this.sqrSigmaNoise = original.sqrSigmaNoise;
     106      if (original.meanParameter != null) {
     107        this.meanParameter = (double[])original.meanParameter.Clone();
     108      }
     109      if (original.covarianceParameter != null) {
     110        this.covarianceParameter = (double[])original.covarianceParameter.Clone();
     111      }
    101112
    102113      // shallow copies of arrays because they cannot be modified
     
    118129
    119130      int nVariables = this.allowedInputVariables.Length;
    120       this.meanFunction.SetParameter(hyp
     131      meanParameter = hyp
    121132        .Take(this.meanFunction.GetNumberOfParameters(nVariables))
    122         .ToArray());
    123       this.covarianceFunction.SetParameter(hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables))
    124         .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
    125         .ToArray());
     133        .ToArray();
     134
     135      covarianceParameter = hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables))
     136                                             .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
     137                                             .ToArray();
    126138      sqrSigmaNoise = Math.Exp(2.0 * hyp.Last());
    127139
     
    138150
    139151      // calculate means and covariances
    140       double[] m = meanFunction.GetMean(x);
     152      var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, x.GetLength(1)));
     153      double[] m = Enumerable.Range(0, x.GetLength(0))
     154        .Select(r => mean.Mean(x, r))
     155        .ToArray();
     156
     157      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1)));
    141158      for (int i = 0; i < n; i++) {
    142159        for (int j = i; j < n; j++) {
    143           l[j, i] = covarianceFunction.GetCovariance(x, i, j) / sqrSigmaNoise;
     160          l[j, i] = cov.Covariance(x, i, j) / sqrSigmaNoise;
    144161          if (j == i) l[j, i] += 1.0;
    145162        }
    146163      }
     164
    147165
    148166      // cholesky decomposition
     
    181199
    182200      double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
    183       for (int i = 0; i < meanGradients.Length; i++) {
    184         var meanGrad = meanFunction.GetGradients(i, x);
    185         meanGradients[i] = -Util.ScalarProd(meanGrad, alpha);
     201      for (int k = 0; k < meanGradients.Length; k++) {
     202        var meanGrad = Enumerable.Range(0, alpha.Length)
     203        .Select(r => mean.Gradient(x, r, k));
     204        meanGradients[k] = -Util.ScalarProd(meanGrad, alpha);
    186205      }
    187206
     
    190209        for (int i = 0; i < n; i++) {
    191210          for (int j = 0; j < i; j++) {
    192             var g = covarianceFunction.GetGradient(x, i, j).ToArray();
     211            var g = cov.CovarianceGradient(x, i, j).ToArray();
    193212            for (int k = 0; k < covGradients.Length; k++) {
    194213              covGradients[k] += lCopy[i, j] * g[k];
     
    196215          }
    197216
    198           var gDiag = covarianceFunction.GetGradient(x, i, i).ToArray();
     217          var gDiag = cov.CovarianceGradient(x, i, i).ToArray();
    199218          for (int k = 0; k < covGradients.Length; k++) {
    200219            // diag
     
    216235    }
    217236
     237    // is called by the solution creator to set all parameter values of the covariance and mean function
     238    // to the optimized values (necessary to make the values visible in the GUI)
     239    public void FixParameters() {
     240      covarianceFunction.SetParameter(covarianceParameter);
     241      meanFunction.SetParameter(meanParameter);
     242      covarianceParameter = new double[0];
     243      meanParameter = new double[0];
     244    }
     245
    218246    #region IRegressionModel Members
    219247    public IEnumerable<double> GetEstimatedValues(Dataset dataset, IEnumerable<int> rows) {
     
    234262      int n = x.GetLength(0);
    235263      var Ks = new double[newN, n];
    236       var ms = meanFunction.GetMean(newX);
     264      var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, newX.GetLength(1)));
     265      var ms = Enumerable.Range(0, newX.GetLength(0))
     266      .Select(r => mean.Mean(newX, r))
     267      .ToArray();
     268      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));
    237269      for (int i = 0; i < newN; i++) {
    238270        for (int j = 0; j < n; j++) {
    239           Ks[i, j] = covarianceFunction.GetCrossCovariance(x, newX, j, i);
     271          Ks[i, j] = cov.CrossCovariance(x, newX, j, i);
    240272        }
    241273      }
     
    252284      var kss = new double[newN];
    253285      double[,] sWKs = new double[n, newN];
     286      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));
    254287
    255288      // for stddev
    256289      for (int i = 0; i < newN; i++)
    257         kss[i] = covarianceFunction.GetCovariance(newX, i, i);
     290        kss[i] = cov.Covariance(newX, i, i);
    258291
    259292      for (int i = 0; i < newN; i++) {
    260293        for (int j = 0; j < n; j++) {
    261           sWKs[j, i] = covarianceFunction.GetCrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
     294          sWKs[j, i] = cov.CrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
    262295        }
    263296      }
Note: See TracChangeset for help on using the changeset viewer.