Changeset 8477 for branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
- Timestamp:
- 08/13/12 16:18:37 (12 years ago)
- Location:
- branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Algorithms.DataAnalysis merged: 8419,8421,8439,8448,8452,8455,8463-8465,8467,8471,8473,8475
- Property svn:mergeinfo changed
-
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8416 r8477 73 73 private double[,] x; 74 74 [Storable] 75 private Scaling scaling;75 private Scaling inputScaling; 76 76 77 77 … … 82 82 this.meanFunction = cloner.Clone(original.meanFunction); 83 83 this.covarianceFunction = cloner.Clone(original.covarianceFunction); 84 this. scaling = cloner.Clone(original.scaling);84 this.inputScaling = cloner.Clone(original.inputScaling); 85 85 this.negativeLogLikelihood = original.negativeLogLikelihood; 86 86 this.targetVariable = original.targetVariable; … … 103 103 this.allowedInputVariables = allowedInputVariables.ToArray(); 104 104 105 sqrSigmaNoise = Math.Exp(2.0 * hyp.First());106 sqrSigmaNoise = Math.Max(10E-6, sqrSigmaNoise); // lower limit for the noise level107 105 108 106 int nVariables = this.allowedInputVariables.Length; 109 this.meanFunction.SetParameter(hyp .Skip(1)107 this.meanFunction.SetParameter(hyp 110 108 .Take(this.meanFunction.GetNumberOfParameters(nVariables)) 111 109 .ToArray()); 112 this.covarianceFunction.SetParameter(hyp.Skip( 1 +this.meanFunction.GetNumberOfParameters(nVariables))110 this.covarianceFunction.SetParameter(hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables)) 113 111 .Take(this.covarianceFunction.GetNumberOfParameters(nVariables)) 114 112 .ToArray()); 113 sqrSigmaNoise = Math.Exp(2.0 * hyp.Last()); 115 114 116 115 CalculateModel(ds, rows); … … 118 117 119 118 private void CalculateModel(Dataset ds, IEnumerable<int> rows) { 120 scaling = new Scaling(ds, allowedInputVariables, rows); 121 x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling); 122 123 var y = ds.GetDoubleValues(targetVariable, rows).ToArray(); 119 inputScaling = new Scaling(ds, allowedInputVariables, rows); 120 x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling); 121 var y = ds.GetDoubleValues(targetVariable, rows); 124 122 125 123 int n = x.GetLength(0); … … 132 130 double[] m = meanFunction.GetMean(x); 133 131 for (int i = 0; i < n; i++) { 134 135 132 for (int j = i; j < n; j++) { 136 133 l[j, i] = covarianceFunction.GetCovariance(i, j) / sqrSigmaNoise; … … 144 141 145 142 var res = alglib.trfac.spdmatrixcholesky(ref l, n, false); 146 if (!res) throw new InvalidOperationException("Matrix is not positive semidefinite");143 if (!res) throw new ArgumentException("Matrix is not positive semidefinite"); 147 144 148 145 // calculate sum of diagonal elements for likelihood … … 162 159 int n = x.GetLength(0); 163 160 int nAllowedVariables = x.GetLength(1); 164 double[,] q = new double[n, n];165 double[,] eye = new double[n, n];166 for (int i = 0; i < n; i++) eye[i, i] = 1.0;167 161 168 162 int info; 169 alglib.densesolverreport denseSolveRep; 170 171 alglib.spdmatrixcholeskysolvem(l, n, false, eye, n, out info, out denseSolveRep, out q); 172 // double[,] a2 = outerProd(alpha, alpha); 163 alglib.matinvreport matInvRep; 164 double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)]; 165 Array.Copy(l, lCopy, lCopy.Length); 166 167 alglib.spdmatrixcholeskyinverse(ref lCopy, n, false, out info, out matInvRep); 168 if (info != 1) throw new ArgumentException("Can't invert matrix to calculate gradients."); 173 169 for (int i = 0; i < n; i++) { 174 for (int j = 0; j < n; j++)175 q[i, j] = q[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; // a2[i,j];176 } 177 178 double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => q[i, i]).Sum();170 for (int j = 0; j <= i; j++) 171 lCopy[i, j] = lCopy[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; 172 } 173 174 double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => lCopy[i, i]).Sum(); 179 175 180 176 double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)]; … … 187 183 if (covGradients.Length > 0) { 188 184 for (int i = 0; i < n; i++) { 189 for (int j = 0; j < n; j++) { 190 var covDeriv = covarianceFunction.GetGradient(i, j); 191 for (int k = 0; k < covGradients.Length; k++) { 192 covGradients[k] += q[i, j] * covDeriv[k]; 185 for (int k = 0; k < covGradients.Length; k++) { 186 for (int j = 0; j < i; j++) { 187 covGradients[k] += lCopy[i, j] * covarianceFunction.GetGradient(i, j, k); 193 188 } 189 covGradients[k] += 0.5 * lCopy[i, i] * covarianceFunction.GetGradient(i, i, k); 194 190 } 195 191 } 196 covGradients = covGradients.Select(g => g / 2.0).ToArray();197 } 198 199 return new double[] { noiseGradient }200 .Concat( meanGradients)201 .Concat( covGradients).ToArray();192 } 193 194 return 195 meanGradients 196 .Concat(covGradients) 197 .Concat(new double[] { noiseGradient }).ToArray(); 202 198 } 203 199 … … 220 216 221 217 private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) { 222 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, scaling);218 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling); 223 219 int newN = newX.GetLength(0); 224 220 int n = x.GetLength(0); … … 230 226 // var kss = new double[newN]; 231 227 var Ks = new double[newN, n]; 232 double[,] sWKs = new double[n, newN];228 //double[,] sWKs = new double[n, newN]; 233 229 // double[,] v; 234 230 … … 242 238 var ms = meanFunction.GetMean(newX); 243 239 for (int i = 0; i < newN; i++) { 244 245 240 for (int j = 0; j < n; j++) { 246 241 Ks[i, j] = covarianceFunction.GetCovariance(j, i); 247 sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise);242 //sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise); 248 243 } 249 244 } … … 252 247 // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v); 253 248 254 249 return Enumerable.Range(0, newN) 250 .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha)); 251 //for (int i = 0; i < newN; i++) { 252 // // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha); 253 // // var sumV2 = prod(GetCol(v, i), GetCol(v, i)); 254 // // predVar[i] = kss[i] - sumV2; 255 //} 256 257 } 258 259 public IEnumerable<double> GetEstimatedVariance(Dataset dataset, IEnumerable<int> rows) { 260 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling); 261 int newN = newX.GetLength(0); 262 int n = x.GetLength(0); 263 264 var kss = new double[newN]; 265 double[,] sWKs = new double[n, newN]; 266 267 // for stddev 268 covarianceFunction.SetData(newX); 269 for (int i = 0; i < newN; i++) 270 kss[i] = covarianceFunction.GetCovariance(i, i); 271 272 covarianceFunction.SetData(x, newX); 255 273 for (int i = 0; i < newN; i++) { 256 // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha); 257 yield return ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha); 258 // var sumV2 = prod(GetCol(v, i), GetCol(v, i)); 259 // predVar[i] = kss[i] - sumV2; 260 } 261 274 for (int j = 0; j < n; j++) { 275 sWKs[j, i] = covarianceFunction.GetCovariance(j, i) / Math.Sqrt(sqrSigmaNoise); 276 } 277 } 278 279 // for stddev 280 int info; 281 alglib.densesolverreport denseSolveRep; 282 double[,] v; 283 284 alglib.rmatrixsolvem(l, n, sWKs, newN, false, out info, out denseSolveRep, out v); 285 286 for (int i = 0; i < newN; i++) { 287 var sumV = Util.ScalarProd(Util.GetCol(v, i), Util.GetCol(v, i)); 288 kss[i] -= sumV; 289 if (kss[i] < 0) kss[i] = 0; 290 } 291 return kss; 262 292 } 263 293 }
Note: See TracChangeset
for help on using the changeset viewer.