- Timestamp:
- 01/29/14 14:34:44 (11 years ago)
- Location:
- branches/HeuristicLab.Problems.GaussianProcessTuning
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/HeuristicLab.Problems.GaussianProcessTuning/GaussianProcessDemo/Form1.cs
r9387 r10422 4 4 using System.Data; 5 5 using System.Drawing; 6 using System.Globalization; 6 7 using System.Linq; 7 8 using System.Text; … … 28 29 29 30 var sum = new CovarianceSum(); 30 var t = new Covariance NeuralNetwork();31 var t = new CovarianceSquaredExponentialIso(); 31 32 sum.Terms.Add(t); 32 sum.Terms.Add(new CovarianceNoise());33 //sum.Terms.Add(new CovarianceNoise()); 33 34 this.covFunction = sum; 34 35 UpdateSliders(); … … 52 53 53 54 private void InitData() { 54 int n = 200;55 int n = 5000; 55 56 data = new List<List<double>>(); 56 data.Add( ValueGenerator.GenerateSteps(0, 1, 1.0 / n).ToList());57 data.Add(Enumerable.Range(0, n).Select(e => e / 200.0).ToList()); 57 58 58 59 // sample from GP 59 60 var normalRand = new NormalDistributedRandom(random, 0, 1); 60 alpha = (from i in Enumerable.Range(0, n + 1)61 alpha = (from i in Enumerable.Range(0, n) 61 62 select normalRand.NextDouble()).ToArray(); 62 63 } … … 72 73 } 73 74 74 var trainingData = new List<List<double>>(); 75 var trainingIndices = RandomEnumerable.SampleRandomWithoutRepetition(Enumerable.Range(0, y.Count), random, 10); 76 var trainingY = trainingIndices.Select(i => y[i]).ToList(); 77 var trainingX = trainingIndices.Select(i => data[0][i]).ToList(); 78 trainingData.Add(trainingY); 79 trainingData.Add(trainingX); 80 81 //chart1.Series[2].Points.Clear(); 82 //foreach (var p in trainingY.Zip(trainingX, (t, x) => new { t, x })) { 83 // chart1.Series[2].Points.AddXY(p.x, p.t); 75 //var trainingData = new List<List<double>>(); 76 //var trainingIndices = RandomEnumerable.SampleRandomWithoutRepetition(Enumerable.Range(0, y.Count), random, 10); 77 //var trainingY = trainingIndices.Select(i => y[i]).ToList(); 78 //var trainingX = trainingIndices.Select(i => data[0][i]).ToList(); 79 //trainingData.Add(trainingY); 80 //trainingData.Add(trainingX); 81 // 82 ////chart1.Series[2].Points.Clear(); 83 ////foreach (var p in trainingY.Zip(trainingX, (t, x) => new { t, x })) { 84 //// chart1.Series[2].Points.AddXY(p.x, p.t); 85 ////} 86 // 87 //var allData = new List<List<double>>(); 88 //allData.Add(y); 89 //allData.Add(data[0]); 90 //var variableNames = new string[] { "y", "x" }; 91 //var fullDataSet = new Dataset(variableNames, allData); 92 //var trainingDataSet = new Dataset(variableNames, trainingData); 93 //var trainingRows = Enumerable.Range(0, trainingIndices.Count()); 94 //var fullRows = Enumerable.Range(0, data[0].Count); 95 //var correctModel = new GaussianProcessModel(fullDataSet, variableNames.First(), variableNames.Skip(1), fullRows, hyp, new MeanZero(), 96 // (ICovarianceFunction)covFunction.Clone()); 97 //var yPred = correctModel.GetEstimatedValues(fullDataSet, fullRows); 98 //chart1.Series[1].Points.Clear(); 99 //foreach (var p in yPred.Zip(data[0], (t, x) => new { t, x })) { 100 // chart1.Series[1].Points.AddXY(p.x, p.t); 84 101 //} 85 86 var allData = new List<List<double>>();87 allData.Add(y);88 allData.Add(data[0]);89 var variableNames = new string[] { "y", "x" };90 var fullDataSet = new Dataset(variableNames, allData);91 var trainingDataSet = new Dataset(variableNames, trainingData);92 var trainingRows = Enumerable.Range(0, trainingIndices.Count());93 var fullRows = Enumerable.Range(0, data[0].Count);94 var correctModel = new GaussianProcessModel(fullDataSet, variableNames.First(), variableNames.Skip(1), fullRows, hyp, new MeanZero(),95 (ICovarianceFunction)covFunction.Clone());96 var yPred = correctModel.GetEstimatedValues(fullDataSet, fullRows);97 chart1.Series[1].Points.Clear();98 foreach (var p in yPred.Zip(data[0], (t, x) => new { t, x })) {99 chart1.Series[1].Points.AddXY(p.x, p.t);100 }101 102 } 102 103 … … 124 125 private string DataToText() { 125 126 var str = new StringBuilder(); 126 foreach (var p in chart1.Series[ 1].Points) {127 str.AppendLine(p.XValue + "\t" + p.YValues.First());127 foreach (var p in chart1.Series[0].Points) { 128 str.AppendLine(p.XValue.ToString(CultureInfo.InvariantCulture) + "\t" + p.YValues.First().ToString(CultureInfo.InvariantCulture)); 128 129 } 129 130 return str.ToString(); -
branches/HeuristicLab.Problems.GaussianProcessTuning/HeuristicLab.Algorithms.DataAnalysis.Experimental/HeuristicLab.Algorithms.DataAnalysis.Experimental.csproj
r9562 r10422 72 72 </PropertyGroup> 73 73 <ItemGroup> 74 <Reference Include="ALGLIB-3. 6.0, Version=3.6.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=x86">74 <Reference Include="ALGLIB-3.7.0, Version=3.7.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 75 75 <SpecificVersion>False</SpecificVersion> 76 <HintPath>..\..\..\trunk\sources\bin\ALGLIB-3. 6.0.dll</HintPath>76 <HintPath>..\..\..\trunk\sources\bin\ALGLIB-3.7.0.dll</HintPath> 77 77 </Reference> 78 78 <Reference Include="HeuristicLab.Algorithms.DataAnalysis-3.4"> -
branches/HeuristicLab.Problems.GaussianProcessTuning/HeuristicLab.Problems.Instances.DataAnalysis.GaussianProcessRegression/Instances.DataAnalysis.GaussianProcessRegression-3.3.csproj
r9338 r10422 98 98 </PropertyGroup> 99 99 <ItemGroup> 100 <Reference Include="ALGLIB-3.6.0, Version=3.6.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 101 <SpecificVersion>False</SpecificVersion> 102 <HintPath>..\..\..\trunk\sources\bin\ALGLIB-3.6.0.dll</HintPath> 103 <Private>False</Private> 100 <Reference Include="ALGLIB-3.7.0, Version=3.7.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 101 <SpecificVersion>False</SpecificVersion> 102 <HintPath>..\..\..\trunk\sources\bin\ALGLIB-3.7.0.dll</HintPath> 104 103 </Reference> 105 104 <Reference Include="HeuristicLab.Algorithms.DataAnalysis-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> -
branches/HeuristicLab.Problems.GaussianProcessTuning/HeuristicLab.Problems.Instances.DataAnalysis.GaussianProcessRegression/Plugin.cs.frame
r9112 r10422 25 25 [Plugin("HeuristicLab.Problems.Instances.DataAnalysis.GaussianProcessRegression", "3.3.7.$WCREV$")] 26 26 [PluginFile("HeuristicLab.Problems.Instances.DataAnalysis.GaussianProcessRegression-3.3.dll", PluginFileType.Assembly)] 27 [PluginDependency("HeuristicLab.ALGLIB", "3. 6.0")]27 [PluginDependency("HeuristicLab.ALGLIB", "3.7.0")] 28 28 [PluginDependency("HeuristicLab.Algorithms.DataAnalysis", "3.4")] 29 29 [PluginDependency("HeuristicLab.Collections", "3.3")] -
branches/HeuristicLab.Problems.GaussianProcessTuning/HeuristicLab.Problems.Instances.DataAnalysis.GaussianProcessRegression/Util.cs
r9124 r10422 52 52 K[i, j] = covFunction.Covariance(x, i, j); 53 53 54 if (!alglib.spdmatrixcholesky(ref K, K.GetLength(0), true)) throw new ArgumentException();55 54 // if (!alglib.spdmatrixcholesky(ref K, K.GetLength(0), true)) throw new ArgumentException(); 55 K = toeplitz_cholesky_lower(K.GetLength(0), K); 56 56 List<double> target = new List<double>(K.GetLength(0)); 57 57 for (int i = 0; i < K.GetLength(0); i++) { … … 67 67 return target; 68 68 } 69 70 //****************************************************************************80 71 72 public static double[,] toeplitz_cholesky_lower(int n, double[,] a) 73 74 //****************************************************************************80 75 // 76 // Purpose: 77 // 78 // TOEPLITZ_CHOLESKY_LOWER: lower Cholesky factor of a Toeplitz matrix. 79 // 80 // Discussion: 81 // 82 // The Toeplitz matrix must be positive semi-definite. 83 // 84 // After factorization, A = L * L'. 85 // 86 // Licensing: 87 // 88 // This code is distributed under the GNU LGPL license. 89 // 90 // Modified: 91 // 92 // 13 November 2012 93 // 29 January 2014: adapted to C# by Gabriel Kronberger 94 // Author: 95 // 96 // John Burkardt 97 // 98 // Reference: 99 // 100 // Michael Stewart, 101 // Cholesky factorization of semi-definite Toeplitz matrices. 102 // 103 // Parameters: 104 // 105 // Input, int N, the order of the matrix. 106 // 107 // Input, double A[N,N], the Toeplitz matrix. 108 // 109 // Output, double TOEPLITZ_CHOLESKY_LOWER[N,N], the lower Cholesky factor. 110 // 111 { 112 double div; 113 double[] g; 114 double g1j; 115 double g2j; 116 int i; 117 int j; 118 double[,] l; 119 double rho; 120 121 l = new double[n, n]; 122 123 for (j = 0; j < n; j++) { 124 for (i = 0; i < n; i++) { 125 l[i, j] = 0.0; 126 } 127 } 128 g = new double[2 * n]; 129 130 for (j = 0; j < n; j++) { 131 g[0 + j * 2] = a[0, j]; 132 } 133 g[1 + 0 * 2] = 0.0; 134 for (j = 1; j < n; j++) { 135 g[1 + j * 2] = a[j, 0]; 136 } 137 138 for (i = 0; i < n; i++) { 139 l[i, 0] = g[0 + i * 2]; 140 } 141 for (j = n - 1; 1 <= j; j--) { 142 g[0 + j * 2] = g[0 + (j - 1) * 2]; 143 } 144 g[0 + 0 * 2] = 0.0; 145 146 for (i = 1; i < n; i++) { 147 rho = -g[1 + i * 2] / g[0 + i * 2]; 148 div = Math.Sqrt((1.0 - rho) * (1.0 + rho)); 149 150 for (j = i; j < n; j++) { 151 g1j = g[0 + j * 2]; 152 g2j = g[1 + j * 2]; 153 g[0 + j * 2] = (g1j + rho * g2j) / div; 154 g[1 + j * 2] = (rho * g1j + g2j) / div; 155 } 156 157 for (j = i; j < n; j++) { 158 l[j, i] = g[0 + j * 2]; 159 } 160 for (j = n - 1; i < j; j--) { 161 g[0 + j * 2] = g[0 + (j - 1) * 2]; 162 } 163 g[0 + i * 2] = 0.0; 164 } 165 166 167 return l; 168 } 169 //****************************************************************************80 170 171 public static double[,] toeplitz_cholesky_upper(int n, double[,] a) 172 173 //****************************************************************************80 174 // 175 // Purpose: 176 // 177 // TOEPLITZ_CHOLESKY_UPPER: upper Cholesky factor of a Toeplitz matrix. 178 // 179 // Discussion: 180 // 181 // The Toeplitz matrix must be positive semi-definite. 182 // 183 // After factorization, A = R' * R. 184 // 185 // Licensing: 186 // 187 // This code is distributed under the GNU LGPL license. 188 // 189 // Modified: 190 // 191 // 14 November 2012 192 // 29 January 2014: adapted to C# by Gabriel Kronberger 193 // 194 // Author: 195 // 196 // John Burkardt 197 // 198 // Reference: 199 // 200 // Michael Stewart, 201 // Cholesky factorization of semi-definite Toeplitz matrices. 202 // 203 // Parameters: 204 // 205 // Input, int N, the order of the matrix. 206 // 207 // Input, double A[N,N], the Toeplitz matrix. 208 // 209 // Output, double TOEPLITZ_CHOLESKY_UPPER[N,N], the upper Cholesky factor. 210 // 211 { 212 double div; 213 double[] g; 214 double g1j; 215 double g2j; 216 int i; 217 int j; 218 double[,] r; 219 double rho; 220 221 r = new double[n, n]; 222 223 for (j = 0; j < n; j++) { 224 for (i = 0; i < n; i++) { 225 r[i, j] = 0.0; 226 } 227 } 228 g = new double[2 * n]; 229 230 for (j = 0; j < n; j++) { 231 g[0 + j * 2] = a[0, j]; 232 } 233 234 g[1 + 0 * 2] = 0.0; 235 for (j = 1; j < n; j++) { 236 g[1 + j * 2] = a[j, 0]; 237 } 238 for (j = 0; j < n; j++) { 239 r[0, j] = g[0 + j * 2]; 240 } 241 for (j = n - 1; 1 <= j; j--) { 242 g[0 + j * 2] = g[0 + (j - 1) * 2]; 243 } 244 g[0 + 0 * 2] = 0.0; 245 246 for (i = 1; i < n; i++) { 247 rho = -g[1 + i * 2] / g[0 + i * 2]; 248 div = Math.Sqrt((1.0 - rho) * (1.0 + rho)); 249 for (j = i; j < n; j++) { 250 g1j = g[0 + j * 2]; 251 g2j = g[1 + j * 2]; 252 g[0 + j * 2] = (g1j + rho * g2j) / div; 253 g[1 + j * 2] = (rho * g1j + g2j) / div; 254 } 255 for (j = i; j < n; j++) { 256 r[i, j] = g[0 + j * 2]; 257 } 258 for (j = n - 1; i < j; j--) { 259 g[0 + j * 2] = g[0 + (j - 1) * 2]; 260 } 261 g[0 + i * 2] = 0.0; 262 } 263 264 return r; 265 } 69 266 } 70 267 }
Note: See TracChangeset
for help on using the changeset viewer.