Changeset 8484 for trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4
- Timestamp:
- 08/14/12 13:25:17 (12 years ago)
- Location:
- trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4
- Files:
-
- 1 added
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceConst.cs
r8473 r8484 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; … … 57 58 58 59 public void SetParameter(double[] hyp) { 60 if (hyp.Length != 1) throw new ArgumentException("CovarianceConst has only one hyperparameter", "k"); 59 61 this.sf2 = Math.Exp(2 * hyp[0]); 60 }61 public void SetData(double[,] x) {62 // nothing to do63 62 } 64 63 65 64 66 public void SetData(double[,] x, double[,] xt) { 67 // nothing to do 68 } 69 70 public double GetCovariance(int i, int j) { 65 public double GetCovariance(double[,] x, int i, int j) { 71 66 return sf2; 72 67 } 73 68 74 public double GetGradient(int i, int j, int k) { 75 if (k != 0) throw new ArgumentException("CovarianceConst has only one hyperparameters", "k"); 76 return 2 * sf2; 69 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 70 yield return 2 * sf2; 71 } 72 73 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 74 return sf2; 77 75 } 78 76 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceLinear.cs
r8455 r8484 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; … … 29 30 [Item(Name = "CovarianceLinear", Description = "Linear covariance function for Gaussian processes.")] 30 31 public class CovarianceLinear : Item, ICovarianceFunction { 31 [Storable]32 private double[,] x;33 [Storable]34 private double[,] xt;35 36 private double[,] k;37 private bool symmetric;38 39 32 public int GetNumberOfParameters(int numberOfVariables) { 40 33 return 0; … … 44 37 protected CovarianceLinear(CovarianceLinear original, Cloner cloner) 45 38 : base(original, cloner) { 46 if (original.x != null) {47 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];48 Array.Copy(original.x, this.x, x.Length);49 50 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];51 Array.Copy(original.xt, this.xt, xt.Length);52 53 this.k = new double[original.k.GetLength(0), original.k.GetLength(1)];54 Array.Copy(original.k, this.k, k.Length);55 }56 this.symmetric = original.symmetric;57 39 } 58 40 public CovarianceLinear() … … 66 48 public void SetParameter(double[] hyp) { 67 49 if (hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function."); 68 k = null;69 50 } 70 51 71 public void SetData(double[,] x) { 72 SetData(x, x); 73 this.symmetric = true; 52 public double GetCovariance(double[,] x, int i, int j) { 53 return Util.ScalarProd(Util.GetRow(x, i), Util.GetRow(x, j)); 74 54 } 75 55 76 public void SetData(double[,] x, double[,] xt) { 77 this.x = x; 78 this.xt = xt; 79 this.symmetric = false; 80 81 k = null; 56 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 57 yield break; 82 58 } 83 59 84 public double GetCovariance(int i, int j) { 85 if (k == null) CalculateInnerProduct(); 86 return k[i, j]; 87 } 88 89 public double GetGradient(int i, int j, int k) { 90 throw new NotSupportedException("CovarianceLinear does not have hyperparameters."); 91 } 92 93 94 private void CalculateInnerProduct() { 95 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException(); 96 int rows = x.GetLength(0); 97 int cols = xt.GetLength(0); 98 k = new double[rows, cols]; 99 if (symmetric) { 100 for (int i = 0; i < rows; i++) { 101 for (int j = i; j < cols; j++) { 102 k[i, j] = Util.ScalarProd(Util.GetRow(x, i), 103 Util.GetRow(x, j)); 104 k[j, i] = k[i, j]; 105 } 106 } 107 } else { 108 for (int i = 0; i < rows; i++) { 109 for (int j = 0; j < cols; j++) { 110 k[i, j] = Util.ScalarProd(Util.GetRow(x, i), 111 Util.GetRow(xt, j)); 112 } 113 } 114 } 60 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 61 return Util.ScalarProd(Util.GetRow(x, i), Util.GetRow(xt, j)); 115 62 } 116 63 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceNoise.cs
r8473 r8484 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; … … 59 60 this.sf2 = Math.Exp(2 * hyp[0]); 60 61 } 61 public void SetData(double[,] x) { 62 // nothing to do 62 63 public double GetCovariance(double[,] x, int i, int j) { 64 return sf2; 63 65 } 64 66 65 66 public void SetData(double[,] x, double[,] xt) { 67 // nothing to do 67 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 68 yield return 2 * sf2; 68 69 } 69 70 70 public double GetCovariance(int i, int j) { 71 if (i == j) return sf2; 72 else return 0.0; 73 } 74 75 public double GetGradient(int i, int j, int k) { 76 if (k != 0) throw new ArgumentException("CovarianceConst has only one hyperparameters", "k"); 77 if (i == j) 78 return 2 * sf2; 79 else 80 return 0.0; 71 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 72 return 0.0; 81 73 } 82 74 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovariancePeriodic.cs
r8473 r8484 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; … … 29 30 [Item(Name = "CovariancePeriodic", Description = "Periodic covariance function for Gaussian processes.")] 30 31 public class CovariancePeriodic : Item, ICovarianceFunction { 31 [Storable]32 private double[,] x;33 [Storable]34 private double[,] xt;35 32 [Storable] 36 33 private double sf2; … … 43 40 public double Period { get { return p; } } 44 41 45 private bool symmetric;46 47 private double[,] sd;48 42 public int GetNumberOfParameters(int numberOfVariables) { 49 43 return 3; … … 53 47 protected CovariancePeriodic(CovariancePeriodic original, Cloner cloner) 54 48 : base(original, cloner) { 55 if (original.x != null) {56 x = new double[original.x.GetLength(0), original.x.GetLength(1)];57 Array.Copy(original.x, x, x.Length);58 xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];59 Array.Copy(original.xt, xt, xt.Length);60 }61 49 sf2 = original.sf2; 62 50 l = original.l; 63 51 p = original.p; 64 symmetric = original.symmetric;65 52 } 66 53 public CovariancePeriodic() … … 77 64 this.p = Math.Exp(hyp[1]); 78 65 this.sf2 = Math.Exp(2 * hyp[2]); 79 // sf2 = Math.Min(10E6, sf2); // upper limit for the scale80 81 sd = null;82 }83 public void SetData(double[,] x) {84 SetData(x, x);85 this.symmetric = true;86 66 } 87 67 88 public void SetData(double[,] x, double[,] xt) { 89 this.x = x; 90 this.xt = xt; 91 this.symmetric = false; 92 93 sd = null; 94 } 95 96 public double GetCovariance(int i, int j) { 97 if (sd == null) CalculateSquaredDistances(); 98 double k = sd[i, j]; 68 public double GetCovariance(double[,] x, int i, int j) { 69 double k = i == j ? 0.0 : GetDistance(x, x, i, j); 99 70 k = Math.PI * k / p; 100 71 k = Math.Sin(k) / l; … … 104 75 } 105 76 106 public double GetGradient(int i, int j, int k) { 107 double v = Math.PI * sd[i, j] / p; 108 switch (k) { 109 case 0: { 110 double newK = Math.Sin(v) / l; 111 newK = newK * newK; 112 return 4 * sf2 * Math.Exp(-2 * newK) * newK; 113 } 114 case 1: { 115 double r = Math.Sin(v) / l; 116 return 4 * sf2 / l * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v; 117 } 118 case 2: { 119 double newK = Math.Sin(v) / l; 120 newK = newK * newK; 121 return 2 * sf2 * Math.Exp(-2 * newK); 122 123 } 124 default: { 125 throw new ArgumentException("CovariancePeriodic only has three hyperparameters.", "k"); 126 } 127 } 77 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 78 double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j) / p; 79 double gradient = Math.Sin(v) / l; 80 gradient *= gradient; 81 yield return 4.0 * sf2 * Math.Exp(-2.0 * gradient) * gradient; 82 double r = Math.Sin(v) / l; 83 yield return 4.0 * sf2 / l * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v; 84 yield return 2.0 * sf2 * Math.Exp(-2 * gradient); 128 85 } 129 86 130 p rivate void CalculateSquaredDistances() {131 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();132 int rows = x.GetLength(0);133 int cols = xt.GetLength(0);134 sd = new double[rows, cols];87 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 88 double k = GetDistance(x, xt, i, j); 89 k = Math.PI * k / p; 90 k = Math.Sin(k) / l; 91 k = k * k; 135 92 136 if (symmetric) { 137 for (int i = 0; i < rows; i++) { 138 for (int j = i; j < cols; j++) { 139 sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(x, j))); 140 sd[j, i] = sd[i, j]; 141 } 142 } 143 } else { 144 for (int i = 0; i < rows; i++) { 145 for (int j = 0; j < cols; j++) { 146 sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, j))); 147 } 148 } 149 } 93 return sf2 * Math.Exp(-2.0 * k); 94 } 95 96 private double GetDistance(double[,] x, double[,] xt, int i, int j) { 97 return Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, j))); 150 98 } 151 99 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs
r8463 r8484 73 73 public int GetNumberOfParameters(int numberOfVariables) { 74 74 this.numberOfVariables = numberOfVariables; 75 return factors.Select( t => t.GetNumberOfParameters(numberOfVariables)).Sum();75 return factors.Select(f => f.GetNumberOfParameters(numberOfVariables)).Sum(); 76 76 } 77 77 78 78 public void SetParameter(double[] hyp) { 79 if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function."); 79 80 int offset = 0; 80 81 foreach (var t in factors) { … … 84 85 } 85 86 } 86 public void SetData(double[,] x) { 87 SetData(x, x); 87 88 public double GetCovariance(double[,] x, int i, int j) { 89 return factors.Select(f => f.GetCovariance(x, i, j)).Aggregate((a, b) => a * b); 88 90 } 89 91 90 public void SetData(double[,] x, double[,] xt) { 91 foreach (var t in factors) { 92 t.SetData(x, xt); 92 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 93 //if (cachedParameterMap == null) { 94 // CalculateParameterMap(); 95 //} 96 //int ti = cachedParameterMap[k].Item1; 97 //k = cachedParameterMap[k].Item2; 98 //double gradient = 1.0; 99 //for (int ii = 0; ii < factors.Count; ii++) { 100 // var f = factors[ii]; 101 // if (ii == ti) { 102 // gradient *= f.GetGradient(x, i, j, k); 103 // } else { 104 // gradient *= f.GetCovariance(x, i, j); 105 // } 106 //} 107 //return gradient; 108 var covariances = factors.Select(f => f.GetCovariance(x, i, j)).ToArray(); 109 for (int ii = 0; ii < factors.Count; ii++) { 110 foreach (var g in factors[ii].GetGradient(x, i, j)) { 111 double res = g; 112 for (int jj = 0; jj < covariances.Length; jj++) 113 if (ii != jj) res *= covariances[jj]; 114 yield return res; 115 } 93 116 } 94 117 } 95 118 96 public double GetC ovariance(int i, int j) {97 return factors.Select( t => t.GetCovariance(i, j)).Aggregate((a, b) => a * b);119 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 120 return factors.Select(f => f.GetCrossCovariance(x, xt, i, j)).Aggregate((a, b) => a * b); 98 121 } 99 122 100 123 private Dictionary<int, Tuple<int, int>> cachedParameterMap; 101 public double GetGradient(int i, int j, int k) {102 if (cachedParameterMap == null) {103 CalculateParameterMap();104 }105 int ti = cachedParameterMap[k].Item1;106 k = cachedParameterMap[k].Item2;107 double res = 1.0;108 for (int ii = 0; ii < factors.Count; ii++) {109 var f = factors[ii];110 if (ii == ti) {111 res *= f.GetGradient(i, j, k);112 } else {113 res *= f.GetCovariance(i, j);114 }115 }116 return res;117 }118 119 124 private void ClearCache() { 120 125 cachedParameterMap = null; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceRQiso.cs
r8473 r8484 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using System.Linq; 24 25 using HeuristicLab.Common; … … 32 33 public class CovarianceRQiso : Item, ICovarianceFunction { 33 34 [Storable] 34 private double[,] x;35 [Storable]36 private double[,] xt;37 [Storable]38 35 private double sf2; 39 36 public double Scale { get { return sf2; } } … … 44 41 private double alpha; 45 42 public double Shape { get { return alpha; } } 46 [Storable]47 private bool symmetric;48 private double[,] d2;49 43 50 44 [StorableConstructor] … … 55 49 protected CovarianceRQiso(CovarianceRQiso original, Cloner cloner) 56 50 : base(original, cloner) { 57 if (original.x != null) {58 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];59 Array.Copy(original.x, this.x, x.Length);60 61 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];62 Array.Copy(original.xt, this.xt, xt.Length);63 64 this.d2 = new double[original.d2.GetLength(0), original.d2.GetLength(1)];65 Array.Copy(original.d2, this.d2, d2.Length);66 this.sf2 = original.sf2;67 }68 51 this.sf2 = original.sf2; 69 52 this.l = original.l; 70 53 this.alpha = original.alpha; 71 this.symmetric = original.symmetric;72 54 } 73 55 … … 85 67 86 68 public void SetParameter(double[] hyp) { 69 if (hyp.Length != 3) throw new ArgumentException("CovarianceRQiso has three hyperparameters", "k"); 87 70 this.l = Math.Exp(hyp[0]); 88 71 this.sf2 = Math.Exp(2 * hyp[1]); 89 72 this.alpha = Math.Exp(hyp[2]); 90 d2 = null;91 }92 public void SetData(double[,] x) {93 SetData(x, x);94 this.symmetric = true;95 73 } 96 74 97 75 98 public void SetData(double[,] x, double[,] xt) { 99 this.symmetric = false; 100 this.x = x; 101 this.xt = xt; 102 d2 = null; 76 public double GetCovariance(double[,] x, int i, int j) { 77 double lInv = 1.0 / l; 78 double d = i == j 79 ? 0.0 80 : Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(x, j).Select(e => e * lInv)); 81 return sf2 * Math.Pow(1 + 0.5 * d / alpha, -alpha); 103 82 } 104 83 105 public double GetCovariance(int i, int j) { 106 if (d2 == null) CalculateSquaredDistances(); 107 return sf2 * Math.Pow(1 + 0.5 * d2[i, j] / alpha, -alpha); 84 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 85 double lInv = 1.0 / l; 86 double d = i == j 87 ? 0.0 88 : Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(x, j).Select(e => e * lInv)); 89 90 double b = 1 + 0.5 * d / alpha; 91 yield return sf2 * Math.Pow(b, -alpha - 1) * d; 92 yield return 2 * sf2 * Math.Pow(b, -alpha); 93 yield return sf2 * Math.Pow(b, -alpha) * (0.5 * d / b - alpha * Math.Log(b)); 108 94 } 109 95 110 public double GetGradient(int i, int j, int k) { 111 switch (k) { 112 case 0: return sf2 * Math.Pow(1 + 0.5 * d2[i, j] / alpha, -alpha - 1) * d2[i, j]; 113 case 1: return 2 * sf2 * Math.Pow((1 + 0.5 * d2[i, j] / alpha), (-alpha)); 114 case 2: { 115 double g = (1 + 0.5 * d2[i, j] / alpha); 116 g = sf2 * Math.Pow(g, -alpha) * (0.5 * d2[i, j] / g - alpha * Math.Log(g)); 117 return g; 118 } 119 default: throw new ArgumentException("CovarianceRQiso has three hyperparameters", "k"); 120 } 121 } 122 123 private void CalculateSquaredDistances() { 124 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException(); 125 int rows = x.GetLength(0); 126 int cols = xt.GetLength(0); 127 d2 = new double[rows, cols]; 96 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 128 97 double lInv = 1.0 / l; 129 if (symmetric) { 130 for (int i = 0; i < rows; i++) { 131 for (int j = i; j < rows; j++) { 132 d2[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 133 d2[j, i] = d2[i, j]; 134 } 135 } 136 } else { 137 for (int i = 0; i < rows; i++) { 138 for (int j = 0; j < cols; j++) { 139 d2[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 140 } 141 } 142 } 98 double d = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 99 return sf2 * Math.Pow(1 + 0.5 * d / alpha, -alpha); 143 100 } 144 101 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEard.cs
r8473 r8484 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using System.Linq; 24 25 using HeuristicLab.Common; … … 30 31 [Item(Name = "CovarianceSEard", Description = "Squared exponential covariance function with automatic relevance determination for Gaussian processes.")] 31 32 public class CovarianceSEard : Item, ICovarianceFunction { 32 [Storable]33 private double[,] x;34 [Storable]35 private double[,] xt;36 33 [Storable] 37 34 private double sf2; … … 49 46 } 50 47 51 private double[,] sd;52 private bool symmetric;53 54 48 public int GetNumberOfParameters(int numberOfVariables) { 55 49 return numberOfVariables + 1; … … 59 53 protected CovarianceSEard(CovarianceSEard original, Cloner cloner) 60 54 : base(original, cloner) { 61 if (original.x != null) { 62 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)]; 63 Array.Copy(original.x, this.x, x.Length); 64 65 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)]; 66 Array.Copy(original.xt, this.xt, xt.Length); 67 68 this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)]; 69 Array.Copy(original.sd, this.sd, sd.Length); 70 55 if (original.l != null) { 71 56 this.l = new double[original.l.Length]; 72 57 Array.Copy(original.l, this.l, l.Length); 73 58 } 74 59 this.sf2 = original.sf2; 75 this.symmetric = original.symmetric;76 60 } 77 61 public CovarianceSEard() … … 86 70 this.l = hyp.Take(hyp.Length - 1).Select(Math.Exp).ToArray(); 87 71 this.sf2 = Math.Exp(2 * hyp[hyp.Length - 1]); 88 // sf2 = Math.Min(10E6, sf2); // upper limit for the scale89 90 sd = null;91 72 } 92 73 93 public void SetData(double[,] x) { 94 SetData(x, x); 95 this.symmetric = true; 74 public double GetCovariance(double[,] x, int i, int j) { 75 double d = i == j 76 ? 0.0 77 : Util.SqrDist(Util.GetRow(x, i).Select((e, k) => e / l[k]), 78 Util.GetRow(x, j).Select((e, k) => e / l[k])); 79 return sf2 * Math.Exp(-d / 2.0); 96 80 } 97 81 98 public void SetData(double[,] x, double[,] xt) { 99 this.x = x; 100 this.xt = xt; 101 this.symmetric = false; 82 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 83 double d = i == j 84 ? 0.0 85 : Util.SqrDist(Util.GetRow(x, i).Select((e, ii) => e / l[ii]), 86 Util.GetRow(x, j).Select((e, ii) => e / l[ii])); 102 87 103 sd = null; 88 for (int ii = 0; ii < l.Length; ii++) { 89 double sqrDist = Util.SqrDist(x[i, ii] / l[ii], x[j, ii] / l[ii]); 90 yield return sf2 * Math.Exp(d / 2.0) * sqrDist; 91 } 92 yield return 2.0 * sf2 * Math.Exp(d / 2.0); 104 93 } 105 94 106 public double GetCovariance(int i, int j) { 107 if (sd == null) CalculateSquaredDistances(); 108 return sf2 * Math.Exp(-sd[i, j] / 2.0); 109 } 110 111 public double GetGradient(int i, int j, int k) { 112 if (k < l.Length) { 113 double sqrDist = Util.SqrDist(x[i, k] / l[k], xt[j, k] / l[k]); 114 return sf2 * Math.Exp(-sd[i, j] / 2.0) * sqrDist; 115 } else if (k == l.Length) { 116 return 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0); 117 } else { 118 throw new ArgumentException("CovarianceSEard has dimension+1 hyperparameters.", "k"); 119 } 120 } 121 122 123 private void CalculateSquaredDistances() { 124 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException(); 125 int rows = x.GetLength(0); 126 int cols = xt.GetLength(0); 127 sd = new double[rows, cols]; 128 if (symmetric) { 129 for (int i = 0; i < rows; i++) { 130 for (int j = i; j < cols; j++) { 131 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select((e, k) => e / l[k]), 132 Util.GetRow(xt, j).Select((e, k) => e / l[k])); 133 sd[j, i] = sd[i, j]; 134 } 135 } 136 } else { 137 for (int i = 0; i < rows; i++) { 138 for (int j = 0; j < cols; j++) { 139 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select((e, k) => e / l[k]), 140 Util.GetRow(xt, j).Select((e, k) => e / l[k])); 141 } 142 } 143 } 95 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 96 double d = Util.SqrDist(Util.GetRow(x, i).Select((e, k) => e / l[k]), Util.GetRow(xt, j).Select((e, k) => e / l[k])); 97 return sf2 * Math.Exp(-d / 2.0); 144 98 } 145 99 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEiso.cs
r8473 r8484 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using System.Linq; 24 25 using HeuristicLab.Common; … … 32 33 public class CovarianceSEiso : Item, ICovarianceFunction { 33 34 [Storable] 34 private double[,] x;35 [Storable]36 private double[,] xt;37 [Storable]38 35 private double sf2; 39 36 public double Scale { get { return sf2; } } … … 41 38 private double l; 42 39 public double Length { get { return l; } } 43 [Storable]44 private bool symmetric;45 private double[,] sd;46 40 47 41 [StorableConstructor] … … 52 46 protected CovarianceSEiso(CovarianceSEiso original, Cloner cloner) 53 47 : base(original, cloner) { 54 if (original.x != null) {55 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];56 Array.Copy(original.x, this.x, x.Length);57 58 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];59 Array.Copy(original.xt, this.xt, xt.Length);60 61 this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)];62 Array.Copy(original.sd, this.sd, sd.Length);63 this.sf2 = original.sf2;64 }65 48 this.sf2 = original.sf2; 66 49 this.l = original.l; 67 this.symmetric = original.symmetric;68 50 } 69 51 … … 81 63 82 64 public void SetParameter(double[] hyp) { 65 if (hyp.Length != 2) throw new ArgumentException("CovarianceSEiso has two hyperparameters", "k"); 83 66 this.l = Math.Exp(hyp[0]); 84 67 this.sf2 = Math.Exp(2 * hyp[1]); 85 sd = null;86 }87 public void SetData(double[,] x) {88 SetData(x, x);89 this.symmetric = true;90 68 } 91 69 92 70 93 public void SetData(double[,] x, double[,] xt) { 94 this.symmetric = false; 95 this.x = x; 96 this.xt = xt; 97 sd = null; 71 public double GetCovariance(double[,] x, int i, int j) { 72 double lInv = 1.0 / l; 73 double d = i == j 74 ? 0.0 75 : Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(x, j).Select(e => e * lInv)); 76 return sf2 * Math.Exp(-d / 2.0); 98 77 } 99 78 100 public double GetCovariance(int i, int j) { 101 if (sd == null) CalculateSquaredDistances(); 102 return sf2 * Math.Exp(-sd[i, j] / 2.0); 79 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 80 double lInv = 1.0 / l; 81 double d = i == j 82 ? 0.0 83 : Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(x, j).Select(e => e * lInv)); 84 double g = Math.Exp(-d / 2.0); 85 yield return sf2 * g * d; 86 yield return 2.0 * sf2 * g; 103 87 } 104 88 105 public double GetGradient(int i, int j, int k) { 106 switch (k) { 107 case 0: return sf2 * Math.Exp(-sd[i, j] / 2.0) * sd[i, j]; 108 case 1: return 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0); 109 default: throw new ArgumentException("CovarianceSEiso has two hyperparameters", "k"); 110 } 111 } 112 113 private void CalculateSquaredDistances() { 114 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException(); 115 int rows = x.GetLength(0); 116 int cols = xt.GetLength(0); 117 sd = new double[rows, cols]; 89 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 118 90 double lInv = 1.0 / l; 119 if (symmetric) { 120 for (int i = 0; i < rows; i++) { 121 for (int j = i; j < rows; j++) { 122 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 123 sd[j, i] = sd[i, j]; 124 } 125 } 126 } else { 127 for (int i = 0; i < rows; i++) { 128 for (int j = 0; j < cols; j++) { 129 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 130 } 131 } 132 } 91 double d = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 92 return sf2 * Math.Exp(-d / 2.0); 133 93 } 134 94 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs
r8463 r8484 77 77 78 78 public void SetParameter(double[] hyp) { 79 if (terms.Count == 0) throw new ArgumentException("At least one term is needed for sum covariance function."); 79 80 int offset = 0; 80 81 foreach (var t in terms) { … … 84 85 } 85 86 } 86 public void SetData(double[,] x) {87 SetData(x, x);88 }89 87 90 public void SetData(double[,] x, double[,] xt) { 91 foreach (var t in terms) { 92 t.SetData(x, xt); 93 } 94 } 95 96 public double GetCovariance(int i, int j) { 97 return terms.Select(t => t.GetCovariance(i, j)).Sum(); 88 public double GetCovariance(double[,] x, int i, int j) { 89 return terms.Select(t => t.GetCovariance(x, i, j)).Sum(); 98 90 } 99 91 100 92 private Dictionary<int, Tuple<int, int>> cachedParameterMap; 101 public double GetGradient(int i, int j, int k) { 102 if (cachedParameterMap == null) { 103 CalculateParameterMap(); 104 } 105 int ti = cachedParameterMap[k].Item1; 106 k = cachedParameterMap[k].Item2; 107 return terms[ti].GetGradient(i, j, k); 93 public IEnumerable<double> GetGradient(double[,] x, int i, int j) { 94 //if (cachedParameterMap == null) { 95 // CalculateParameterMap(); 96 //} 97 //int ti = cachedParameterMap[k].Item1; 98 //k = cachedParameterMap[k].Item2; 99 //return terms[ti].GetGradient(x, i, j, k); 100 return terms.Select(t => t.GetGradient(x, i, j)).Aggregate(Enumerable.Concat); 108 101 } 102 103 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) { 104 return terms.Select(t => t.GetCrossCovariance(x, xt, i, j)).Sum(); 105 } 106 109 107 private void ClearCache() { 110 108 cachedParameterMap = null; 111 109 } 112 110 113 private void CalculateParameterMap() {114 cachedParameterMap = new Dictionary<int, Tuple<int, int>>();115 int k = 0;116 for (int ti = 0; ti < terms.Count; ti++) {117 for (int ti_k = 0; ti_k < terms[ti].GetNumberOfParameters(numberOfVariables); ti_k++) {118 cachedParameterMap[k++] = Tuple.Create(ti, ti_k);119 }120 }121 }111 //private void CalculateParameterMap() { 112 // cachedParameterMap = new Dictionary<int, Tuple<int, int>>(); 113 // int k = 0; 114 // for (int ti = 0; ti < terms.Count; ti++) { 115 // for (int ti_k = 0; ti_k < terms[ti].GetNumberOfParameters(numberOfVariables); ti_k++) { 116 // cachedParameterMap[k++] = Tuple.Create(ti, ti_k); 117 // } 118 // } 119 //} 122 120 } 123 121 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8475 r8484 39 39 public double NegativeLogLikelihood { 40 40 get { return negativeLogLikelihood; } 41 } 42 43 [Storable] 44 private double[] hyperparameterGradients; 45 public double[] HyperparameterGradients { 46 get { 47 var copy = new double[hyperparameterGradients.Length]; 48 Array.Copy(hyperparameterGradients, copy, copy.Length); 49 return copy; 50 } 41 51 } 42 52 … … 125 135 126 136 meanFunction.SetData(x); 127 covarianceFunction.SetData(x);128 137 129 138 // calculate means and covariances … … 131 140 for (int i = 0; i < n; i++) { 132 141 for (int j = i; j < n; j++) { 133 l[j, i] = covarianceFunction.GetCovariance( i, j) / sqrSigmaNoise;142 l[j, i] = covarianceFunction.GetCovariance(x, i, j) / sqrSigmaNoise; 134 143 if (j == i) l[j, i] += 1.0; 135 144 } … … 153 162 alpha[i] = alpha[i] / sqrSigmaNoise; 154 163 negativeLogLikelihood = 0.5 * Util.ScalarProd(ym, alpha) + diagSum + (n / 2.0) * Math.Log(2.0 * Math.PI * sqrSigmaNoise); 155 } 156 157 public double[] GetHyperparameterGradients() { 164 158 165 // derivatives 159 int n = x.GetLength(0);160 166 int nAllowedVariables = x.GetLength(1); 161 167 162 int info;163 168 alglib.matinvreport matInvRep; 164 169 double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)]; … … 183 188 if (covGradients.Length > 0) { 184 189 for (int i = 0; i < n; i++) { 190 for (int j = 0; j < i; j++) { 191 var g = covarianceFunction.GetGradient(x, i, j).ToArray(); 192 for (int k = 0; k < covGradients.Length; k++) { 193 covGradients[k] += lCopy[i, j] * g[k]; 194 } 195 } 196 197 var gDiag = covarianceFunction.GetGradient(x, i, i).ToArray(); 185 198 for (int k = 0; k < covGradients.Length; k++) { 186 for (int j = 0; j < i; j++) { 187 covGradients[k] += lCopy[i, j] * covarianceFunction.GetGradient(i, j, k); 188 } 189 covGradients[k] += 0.5 * lCopy[i, i] * covarianceFunction.GetGradient(i, i, k); 199 // diag 200 covGradients[k] += 0.5 * lCopy[i, i] * gDiag[k]; 190 201 } 191 202 } 192 203 } 193 204 194 return205 hyperparameterGradients = 195 206 meanGradients 196 207 .Concat(covGradients) 197 208 .Concat(new double[] { noiseGradient }).ToArray(); 209 198 210 } 199 211 … … 219 231 int newN = newX.GetLength(0); 220 232 int n = x.GetLength(0); 221 // var predMean = new double[newN];222 // predVar = new double[newN];223 224 225 226 // var kss = new double[newN];227 233 var Ks = new double[newN, n]; 228 //double[,] sWKs = new double[n, newN];229 // double[,] v;230 231 232 // for stddev233 //covarianceFunction.SetParameter(covHyp, newX);234 //kss = covarianceFunction.GetDiagonalCovariances();235 236 covarianceFunction.SetData(x, newX);237 234 meanFunction.SetData(newX); 238 235 var ms = meanFunction.GetMean(newX); 239 236 for (int i = 0; i < newN; i++) { 240 237 for (int j = 0; j < n; j++) { 241 Ks[i, j] = covarianceFunction.GetCovariance(j, i); 242 //sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise); 243 } 244 } 245 246 // for stddev 247 // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v); 238 Ks[i, j] = covarianceFunction.GetCrossCovariance(x, newX, j, i); 239 } 240 } 248 241 249 242 return Enumerable.Range(0, newN) 250 243 .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha)); 251 //for (int i = 0; i < newN; i++) {252 // // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);253 // // var sumV2 = prod(GetCol(v, i), GetCol(v, i));254 // // predVar[i] = kss[i] - sumV2;255 //}256 257 244 } 258 245 … … 266 253 267 254 // for stddev 268 covarianceFunction.SetData(newX);269 255 for (int i = 0; i < newN; i++) 270 kss[i] = covarianceFunction.GetCovariance(i, i); 271 272 covarianceFunction.SetData(x, newX); 256 kss[i] = covarianceFunction.GetCovariance(newX, i, i); 257 273 258 for (int i = 0; i < newN; i++) { 274 259 for (int j = 0; j < n; j++) { 275 sWKs[j, i] = covarianceFunction.GetC ovariance(j, i) / Math.Sqrt(sqrSigmaNoise);260 sWKs[j, i] = covarianceFunction.GetCrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise); 276 261 } 277 262 } 278 263 279 264 // for stddev 280 int info; 281 alglib.densesolverreport denseSolveRep; 282 double[,] v; 283 284 alglib.rmatrixsolvem(l, n, sWKs, newN, false, out info, out denseSolveRep, out v); 265 alglib.ablas.rmatrixlefttrsm(n, newN, l, 0, 0, false, false, 0, ref sWKs, 0, 0); 285 266 286 267 for (int i = 0; i < newN; i++) { 287 var sumV = Util.ScalarProd(Util.GetCol( v, i), Util.GetCol(v, i));268 var sumV = Util.ScalarProd(Util.GetCol(sWKs, i), Util.GetCol(sWKs, i)); 288 269 kss[i] -= sumV; 289 270 if (kss[i] < 0) kss[i] = 0; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs
r8473 r8484 65 65 ModelParameter.ActualValue = model; 66 66 NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood); 67 HyperparameterGradientsParameter.ActualValue = new RealVector(model. GetHyperparameterGradients());67 HyperparameterGradientsParameter.ActualValue = new RealVector(model.HyperparameterGradients); 68 68 return base.Apply(); 69 69 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs
r8455 r8484 20 20 #endregion 21 21 22 using System.Collections.Generic; 22 23 using HeuristicLab.Core; 23 24 … … 26 27 int GetNumberOfParameters(int numberOfVariables); 27 28 void SetParameter(double[] hyp); 28 void SetData(double[,] x); 29 void SetData(double[,] x, double[,] xt); 29 double GetCovariance(double[,] x, int i, int j); 30 IEnumerable<double> GetGradient(double[,] x, int i, int j); 31 double GetCrossCovariance(double[,] x, double[,] xt, int i, int j); 32 //void SetData(double[,] x); 33 //void SetData(double[,] x, double[,] xt); 30 34 31 double GetCovariance(int i, int j);32 double GetGradient(int i, int j, int k);35 //double GetCovariance(int i, int j); 36 //double GetGradient(int i, int j, int k); 33 37 } 34 38 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs
r8473 r8484 31 31 IMeanFunction MeanFunction { get; } 32 32 ICovarianceFunction CovarianceFunction { get; } 33 double[] GetHyperparameterGradients();33 double[] HyperparameterGradients { get; } 34 34 35 35 IEnumerable<double> GetEstimatedVariance(Dataset ds, IEnumerable<int> rows);
Note: See TracChangeset
for help on using the changeset viewer.