Changeset 8484 for trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs
- Timestamp:
- 08/14/12 13:25:17 (12 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs
r8463 r8484 73 73 public int GetNumberOfParameters(int numberOfVariables) { 74 74 this.numberOfVariables = numberOfVariables; 75 return factors.Select( t => t.GetNumberOfParameters(numberOfVariables)).Sum();75 return factors.Select(f => f.GetNumberOfParameters(numberOfVariables)).Sum(); 76 76 } 77 77 78 78 public void SetParameter(double[] hyp) { 79 if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function."); 79 80 int offset = 0; 80 81 foreach (var t in factors) { … … 84 85 } 85 86 } 86 public void SetData(double[,] x) { 87 SetData(x, x); 87 88 public double GetCovariance(double[,] x, int i, int j) { 89 return factors.Select(f => f.GetCovariance(x, i, j)).Aggregate((a, b) => a * b); 88 90 } 89 91 90 public void SetData(double[,] x, double[,] xt) { 91 foreach (var t in factors) { 92 t.SetData(x, xt); 92 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 93 //if (cachedParameterMap == null) { 94 // CalculateParameterMap(); 95 //} 96 //int ti = cachedParameterMap[k].Item1; 97 //k = cachedParameterMap[k].Item2; 98 //double gradient = 1.0; 99 //for (int ii = 0; ii < factors.Count; ii++) { 100 // var f = factors[ii]; 101 // if (ii == ti) { 102 // gradient *= f.GetGradient(x, i, j, k); 103 // } else { 104 // gradient *= f.GetCovariance(x, i, j); 105 // } 106 //} 107 //return gradient; 108 var covariances = factors.Select(f => f.GetCovariance(x, i, j)).ToArray(); 109 for (int ii = 0; ii < factors.Count; ii++) { 110 foreach (var g in factors[ii].GetGradient(x, i, j)) { 111 double res = g; 112 for (int jj = 0; jj < covariances.Length; jj++) 113 if (ii != jj) res *= covariances[jj]; 114 yield return res; 115 } 93 116 } 94 117 } 95 118 96 public double GetC ovariance(int i, int j) {97 return factors.Select( t => t.GetCovariance(i, j)).Aggregate((a, b) => a * b);119 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 120 return factors.Select(f => f.GetCrossCovariance(x, xt, i, j)).Aggregate((a, b) => a * b); 98 121 } 99 122 100 123 private Dictionary<int, Tuple<int, int>> cachedParameterMap; 101 public double GetGradient(int i, int j, int k) {102 if (cachedParameterMap == null) {103 CalculateParameterMap();104 }105 int ti = cachedParameterMap[k].Item1;106 k = cachedParameterMap[k].Item2;107 double res = 1.0;108 for (int ii = 0; ii < factors.Count; ii++) {109 var f = factors[ii];110 if (ii == ti) {111 res *= f.GetGradient(i, j, k);112 } else {113 res *= f.GetCovariance(i, j);114 }115 }116 return res;117 }118 119 124 private void ClearCache() { 120 125 cachedParameterMap = null;
Note: See TracChangeset
for help on using the changeset viewer.