- Timestamp:
- 08/11/12 14:45:15 (12 years ago)
- Location:
- trunk/sources
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis.Views/3.4/GaussianProcessRegressionSolutionLineChartView.cs
r8473 r8475 63 63 this.chart.ChartAreas[0].AxisX.Maximum = Content.ProblemData.Dataset.Rows - 1; 64 64 65 this.chart.Series.Add(TARGETVARIABLE_SERIES_NAME);66 this.chart.Series[TARGETVARIABLE_SERIES_NAME].LegendText = Content.ProblemData.TargetVariable;67 this.chart.Series[TARGETVARIABLE_SERIES_NAME].ChartType = SeriesChartType.FastLine;68 this.chart.Series[TARGETVARIABLE_SERIES_NAME].Points.DataBindXY(Enumerable.Range(0, Content.ProblemData.Dataset.Rows).ToArray(),69 Content.ProblemData.Dataset.GetDoubleValues(Content.ProblemData.TargetVariable).ToArray());70 65 // training series 71 66 this.chart.Series.Add(ESTIMATEDVALUES_TRAINING_SERIES_NAME); … … 75 70 var mean = Content.EstimatedTrainingValues.ToArray(); 76 71 var s2 = Content.EstimatedTrainingVariance.ToArray(); 77 var lower = mean.Zip(s2, (m, s) => m - s).ToArray();78 var upper = mean.Zip(s2, (m, s) => m + s).ToArray();72 var lower = mean.Zip(s2, (m, s) => m - 1.96 * Math.Sqrt(s)).ToArray(); 73 var upper = mean.Zip(s2, (m, s) => m + 1.96 * Math.Sqrt(s)).ToArray(); 79 74 this.chart.Series[ESTIMATEDVALUES_TRAINING_SERIES_NAME].Points.DataBindXY(Content.ProblemData.TrainingIndices.ToArray(), lower, upper); 80 75 this.InsertEmptyPoints(this.chart.Series[ESTIMATEDVALUES_TRAINING_SERIES_NAME]); 81 76 this.chart.Series[ESTIMATEDVALUES_TRAINING_SERIES_NAME].Tag = Content; 77 82 78 // test series 83 79 this.chart.Series.Add(ESTIMATEDVALUES_TEST_SERIES_NAME); … … 87 83 mean = Content.EstimatedTestValues.ToArray(); 88 84 s2 = Content.EstimatedTestVariance.ToArray(); 89 lower = mean.Zip(s2, (m, s) => m - s).ToArray();90 upper = mean.Zip(s2, (m, s) => m + s).ToArray();85 lower = mean.Zip(s2, (m, s) => m - 1.96 * Math.Sqrt(s)).ToArray(); 86 upper = mean.Zip(s2, (m, s) => m + 1.96 * Math.Sqrt(s)).ToArray(); 91 87 this.chart.Series[ESTIMATEDVALUES_TEST_SERIES_NAME].Points.DataBindXY(Content.ProblemData.TestIndices.ToArray(), lower, upper); 92 88 this.InsertEmptyPoints(this.chart.Series[ESTIMATEDVALUES_TEST_SERIES_NAME]); 93 89 this.chart.Series[ESTIMATEDVALUES_TEST_SERIES_NAME].Tag = Content; 90 94 91 // series of remaining points 95 92 int[] allIndices = Enumerable.Range(0, Content.ProblemData.Dataset.Rows).Except(Content.ProblemData.TrainingIndices).Except(Content.ProblemData.TestIndices).ToArray(); … … 102 99 this.InsertEmptyPoints(this.chart.Series[ESTIMATEDVALUES_ALL_SERIES_NAME]); 103 100 this.chart.Series[ESTIMATEDVALUES_ALL_SERIES_NAME].Tag = Content; 101 102 // target 103 this.chart.Series.Add(TARGETVARIABLE_SERIES_NAME); 104 this.chart.Series[TARGETVARIABLE_SERIES_NAME].LegendText = Content.ProblemData.TargetVariable; 105 this.chart.Series[TARGETVARIABLE_SERIES_NAME].ChartType = SeriesChartType.FastLine; 106 this.chart.Series[TARGETVARIABLE_SERIES_NAME].Points.DataBindXY(Enumerable.Range(0, Content.ProblemData.Dataset.Rows).ToArray(), 107 Content.ProblemData.Dataset.GetDoubleValues(Content.ProblemData.TargetVariable).ToArray()); 108 104 109 this.ToggleSeriesData(this.chart.Series[ESTIMATEDVALUES_ALL_SERIES_NAME]); 105 110 … … 243 248 mean = Content.EstimatedValues.ToArray(); 244 249 s2 = Content.EstimatedVariance.ToArray(); 245 lower = mean.Zip(s2, (m, s) => m - s).ToArray();246 upper = mean.Zip(s2, (m, s) => m + s).ToArray();250 lower = mean.Zip(s2, (m, s) => m - 1.96 * Math.Sqrt(s)).ToArray(); 251 upper = mean.Zip(s2, (m, s) => m + 1.96 * Math.Sqrt(s)).ToArray(); 247 252 lower = indices.Select(index => lower[index]).ToArray(); 248 253 upper = indices.Select(index => upper[index]).ToArray(); … … 252 257 mean = Content.EstimatedTrainingValues.ToArray(); 253 258 s2 = Content.EstimatedTrainingVariance.ToArray(); 254 lower = mean.Zip(s2, (m, s) => m - s).ToArray();255 upper = mean.Zip(s2, (m, s) => m + s).ToArray();259 lower = mean.Zip(s2, (m, s) => m - 1.96 * Math.Sqrt(s)).ToArray(); 260 upper = mean.Zip(s2, (m, s) => m + 1.96 * Math.Sqrt(s)).ToArray(); 256 261 break; 257 262 case ESTIMATEDVALUES_TEST_SERIES_NAME: -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8473 r8475 130 130 double[] m = meanFunction.GetMean(x); 131 131 for (int i = 0; i < n; i++) { 132 133 132 for (int j = i; j < n; j++) { 134 133 l[j, i] = covarianceFunction.GetCovariance(i, j) / sqrSigmaNoise; … … 163 162 int info; 164 163 alglib.matinvreport matInvRep; 165 166 alglib.spdmatrixcholeskyinverse(ref l, n, false, out info, out matInvRep); 164 double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)]; 165 Array.Copy(l, lCopy, lCopy.Length); 166 167 alglib.spdmatrixcholeskyinverse(ref lCopy, n, false, out info, out matInvRep); 167 168 if (info != 1) throw new ArgumentException("Can't invert matrix to calculate gradients."); 168 169 for (int i = 0; i < n; i++) { 169 170 for (int j = 0; j <= i; j++) 170 l [i, j] = l[i, j] / sqrSigmaNoise - alpha[i] * alpha[j];171 } 172 173 double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => l [i, i]).Sum();171 lCopy[i, j] = lCopy[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; 172 } 173 174 double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => lCopy[i, i]).Sum(); 174 175 175 176 double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)]; … … 184 185 for (int k = 0; k < covGradients.Length; k++) { 185 186 for (int j = 0; j < i; j++) { 186 covGradients[k] += l [i, j] * covarianceFunction.GetGradient(i, j, k);187 covGradients[k] += lCopy[i, j] * covarianceFunction.GetGradient(i, j, k); 187 188 } 188 covGradients[k] += 0.5 * l [i, i] * covarianceFunction.GetGradient(i, i, k);189 covGradients[k] += 0.5 * lCopy[i, i] * covarianceFunction.GetGradient(i, i, k); 189 190 } 190 191 } … … 264 265 double[,] sWKs = new double[n, newN]; 265 266 266 267 267 // for stddev 268 268 covarianceFunction.SetData(newX); … … 271 271 272 272 covarianceFunction.SetData(x, newX); 273 for (int i = 0; i < n ; i++) {274 for (int j = 0; j < n ewN; j++) {275 sWKs[ i, j] = covarianceFunction.GetCovariance(i, j) / Math.Sqrt(sqrSigmaNoise);273 for (int i = 0; i < newN; i++) { 274 for (int j = 0; j < n; j++) { 275 sWKs[j, i] = covarianceFunction.GetCovariance(j, i) / Math.Sqrt(sqrSigmaNoise); 276 276 } 277 277 } … … 281 281 alglib.densesolverreport denseSolveRep; 282 282 double[,] v; 283 double[,] lTrans = new double[l.GetLength(1), l.GetLength(0)]; 284 for (int i = 0; i < lTrans.GetLength(0); i++) 285 for (int j = 0; j < lTrans.GetLength(1); j++) 286 lTrans[i, j] = l[j, i]; 287 alglib.rmatrixsolvem(lTrans, n, sWKs, newN, true, out info, out denseSolveRep, out v); // not working! 288 // alglib.spdmatrixcholeskysolvem(lTrans, n, true, sWKs, newN, out info, out denseSolveRep, out v); 283 284 alglib.rmatrixsolvem(l, n, sWKs, newN, false, out info, out denseSolveRep, out v); 289 285 290 286 for (int i = 0; i < newN; i++) { 291 var sumV2 = Util.ScalarProd(Util.GetCol(v, i), Util.GetCol(v, i)); 292 yield return kss[i] - sumV2; 293 } 287 var sumV = Util.ScalarProd(Util.GetCol(v, i), Util.GetCol(v, i)); 288 kss[i] -= sumV; 289 if (kss[i] < 0) kss[i] = 0; 290 } 291 return kss; 294 292 } 295 293 }
Note: See TracChangeset
for help on using the changeset viewer.