Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/14/12 13:25:17 (12 years ago)
Author:
gkronber
Message:

#1902 changed interface for covariance functions to improve readability, fixed several bugs in the covariance functions and in the line chart for Gaussian process models.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs

    r8463 r8484  
    7373    public int GetNumberOfParameters(int numberOfVariables) {
    7474      this.numberOfVariables = numberOfVariables;
    75       return factors.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();
     75      return factors.Select(f => f.GetNumberOfParameters(numberOfVariables)).Sum();
    7676    }
    7777
    7878    public void SetParameter(double[] hyp) {
     79      if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function.");
    7980      int offset = 0;
    8081      foreach (var t in factors) {
     
    8485      }
    8586    }
    86     public void SetData(double[,] x) {
    87       SetData(x, x);
     87
     88    public double GetCovariance(double[,] x, int i, int j) {
     89      return factors.Select(f => f.GetCovariance(x, i, j)).Aggregate((a, b) => a * b);
    8890    }
    8991
    90     public void SetData(double[,] x, double[,] xt) {
    91       foreach (var t in factors) {
    92         t.SetData(x, xt);
     92    public IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     93      //if (cachedParameterMap == null) {
     94      //  CalculateParameterMap();
     95      //}
     96      //int ti = cachedParameterMap[k].Item1;
     97      //k = cachedParameterMap[k].Item2;
     98      //double gradient = 1.0;
     99      //for (int ii = 0; ii < factors.Count; ii++) {
     100      //  var f = factors[ii];
     101      //  if (ii == ti) {
     102      //    gradient *= f.GetGradient(x, i, j, k);
     103      //  } else {
     104      //    gradient *= f.GetCovariance(x, i, j);
     105      //  }
     106      //}
     107      //return gradient;
     108      var covariances = factors.Select(f => f.GetCovariance(x, i, j)).ToArray();
     109      for (int ii = 0; ii < factors.Count; ii++) {
     110        foreach (var g in factors[ii].GetGradient(x, i, j)) {
     111          double res = g;
     112          for (int jj = 0; jj < covariances.Length; jj++)
     113            if (ii != jj) res *= covariances[jj];
     114          yield return res;
     115        }
    93116      }
    94117    }
    95118
    96     public double GetCovariance(int i, int j) {
    97       return factors.Select(t => t.GetCovariance(i, j)).Aggregate((a, b) => a * b);
     119    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     120      return factors.Select(f => f.GetCrossCovariance(x, xt, i, j)).Aggregate((a, b) => a * b);
    98121    }
    99122
    100123    private Dictionary<int, Tuple<int, int>> cachedParameterMap;
    101     public double GetGradient(int i, int j, int k) {
    102       if (cachedParameterMap == null) {
    103         CalculateParameterMap();
    104       }
    105       int ti = cachedParameterMap[k].Item1;
    106       k = cachedParameterMap[k].Item2;
    107       double res = 1.0;
    108       for (int ii = 0; ii < factors.Count; ii++) {
    109         var f = factors[ii];
    110         if (ii == ti) {
    111           res *= f.GetGradient(i, j, k);
    112         } else {
    113           res *= f.GetCovariance(i, j);
    114         }
    115       }
    116       return res;
    117     }
    118 
    119124    private void ClearCache() {
    120125      cachedParameterMap = null;
Note: See TracChangeset for help on using the changeset viewer.