Changeset 8463
- Timestamp:
- 08/09/12 16:32:44 (12 years ago)
- Location:
- trunk/sources
- Files:
-
- 6 added
- 15 edited
- 1 moved
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis.Views/3.4/HeuristicLab.Algorithms.DataAnalysis.Views-3.4.csproj
r8416 r8463 118 118 </ItemGroup> 119 119 <ItemGroup> 120 <Compile Include="MeanProdView.cs"> 121 <SubType>UserControl</SubType> 122 </Compile> 123 <Compile Include="MeanProdView.Designer.cs"> 124 <DependentUpon>MeanProdView.cs</DependentUpon> 125 </Compile> 126 <Compile Include="MeanSumView.cs"> 127 <SubType>UserControl</SubType> 128 </Compile> 129 <Compile Include="MeanSumView.Designer.cs"> 130 <DependentUpon>MeanSumView.cs</DependentUpon> 131 </Compile> 120 132 <Compile Include="CovarianceProdView.cs"> 121 133 <SubType>UserControl</SubType> -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovariancePeriodic.cs
r8455 r8463 102 102 } 103 103 104 105 public double[] GetDiagonalCovariances() {106 if (x != xt) throw new InvalidOperationException();107 int rows = x.GetLength(0);108 var cov = new double[rows];109 for (int i = 0; i < rows; i++) {110 double k = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, i)));111 k = Math.PI * k / p;112 k = Math.Sin(k) / l;113 k = k * k;114 cov[i] = sf2 * Math.Exp(-2.0 * k);115 }116 return cov;117 }118 119 104 public double GetGradient(int i, int j, int k) { 120 105 double v = Math.PI * sd[i, j] / p; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs
r8455 r8463 20 20 #endregion 21 21 22 using System; 23 using System.Collections.Generic; 22 24 using System.Linq; 23 25 using HeuristicLab.Common; … … 48 50 this.factors = cloner.Clone(original.factors); 49 51 this.numberOfVariables = original.numberOfVariables; 52 AttachEventHandlers(); 50 53 } 51 54 … … 53 56 : base() { 54 57 this.factors = new ItemList<ICovarianceFunction>(); 58 AttachEventHandlers(); 59 } 60 61 private void AttachEventHandlers() { 62 this.factors.CollectionReset += (sender, args) => ClearCache(); 63 this.factors.ItemsAdded += (sender, args) => ClearCache(); 64 this.factors.ItemsRemoved += (sender, args) => ClearCache(); 65 this.factors.ItemsReplaced += (sender, args) => ClearCache(); 66 this.factors.ItemsMoved += (sender, args) => ClearCache(); 55 67 } 56 68 … … 86 98 } 87 99 100 private Dictionary<int, Tuple<int, int>> cachedParameterMap; 88 101 public double GetGradient(int i, int j, int k) { 89 // map from parameter index to factor 90 var vi = factors.Select((f, idx) => Enumerable.Repeat(idx, f.GetNumberOfParameters(numberOfVariables))).SelectMany(x => x).ToArray(); 102 if (cachedParameterMap == null) { 103 CalculateParameterMap(); 104 } 105 int ti = cachedParameterMap[k].Item1; 106 k = cachedParameterMap[k].Item2; 91 107 double res = 1.0; 92 int jj = Enumerable.Range(0, k).Count(e => vi[e] == vi[k]);93 108 for (int ii = 0; ii < factors.Count; ii++) { 94 109 var f = factors[ii]; 95 if (ii == vi[k]) {96 res *= f.GetGradient(i, j, jj);110 if (ii == ti) { 111 res *= f.GetGradient(i, j, k); 97 112 } else { 98 113 res *= f.GetCovariance(i, j); … … 101 116 return res; 102 117 } 118 119 private void ClearCache() { 120 cachedParameterMap = null; 121 } 122 123 private void CalculateParameterMap() { 124 cachedParameterMap = new Dictionary<int, Tuple<int, int>>(); 125 int k = 0; 126 for (int ti = 0; ti < factors.Count; ti++) { 127 for (int ti_k = 0; ti_k < factors[ti].GetNumberOfParameters(numberOfVariables); ti_k++) { 128 cachedParameterMap[k++] = Tuple.Create(ti, ti_k); 129 } 130 } 131 } 103 132 } 104 133 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEiso.cs
r8455 r8463 114 114 int cols = xt.GetLength(0); 115 115 sd = new double[rows, cols]; 116 double lInv = 1.0 / l; 116 117 if (symmetric) { 117 118 for (int i = 0; i < rows; i++) { 118 119 for (int j = i; j < rows; j++) { 119 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));120 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 120 121 sd[j, i] = sd[i, j]; 121 122 } … … 124 125 for (int i = 0; i < rows; i++) { 125 126 for (int j = 0; j < cols; j++) { 126 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));127 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv)); 127 128 } 128 129 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs
r8455 r8463 20 20 #endregion 21 21 22 using System; 23 using System.Collections.Generic; 22 24 using System.Linq; 23 25 using HeuristicLab.Common; … … 48 50 this.terms = cloner.Clone(original.terms); 49 51 this.numberOfVariables = original.numberOfVariables; 52 AttachEventHandlers(); 50 53 } 51 54 … … 53 56 : base() { 54 57 this.terms = new ItemList<ICovarianceFunction>(); 58 AttachEventHandlers(); 59 } 60 61 private void AttachEventHandlers() { 62 this.terms.CollectionReset += (sender, args) => ClearCache(); 63 this.terms.ItemsAdded += (sender, args) => ClearCache(); 64 this.terms.ItemsRemoved += (sender, args) => ClearCache(); 65 this.terms.ItemsReplaced += (sender, args) => ClearCache(); 66 this.terms.ItemsMoved += (sender, args) => ClearCache(); 55 67 } 56 68 … … 86 98 } 87 99 100 private Dictionary<int, Tuple<int, int>> cachedParameterMap; 88 101 public double GetGradient(int i, int j, int k) { 89 int ii = 0; 90 while (k > terms[ii].GetNumberOfParameters(numberOfVariables)) { 91 k -= terms[ii].GetNumberOfParameters(numberOfVariables); 102 if (cachedParameterMap == null) { 103 CalculateParameterMap(); 92 104 } 93 return terms[ii].GetGradient(i, j, k); 105 int ti = cachedParameterMap[k].Item1; 106 k = cachedParameterMap[k].Item2; 107 return terms[ti].GetGradient(i, j, k); 108 } 109 private void ClearCache() { 110 cachedParameterMap = null; 111 } 112 113 private void CalculateParameterMap() { 114 cachedParameterMap = new Dictionary<int, Tuple<int, int>>(); 115 int k = 0; 116 for (int ti = 0; ti < terms.Count; ti++) { 117 for (int ti_k = 0; ti_k < terms[ti].GetNumberOfParameters(numberOfVariables); ti_k++) { 118 cachedParameterMap[k++] = Tuple.Create(ti, ti_k); 119 } 120 } 94 121 } 95 122 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessHyperparameterInitializer.cs
r8419 r8463 91 91 var rand = RandomParameter.ActualValue; 92 92 for (int i = 0; i < r.Length; i++) 93 r[i] = rand.NextDouble() * 4 - 2;93 r[i] = rand.NextDouble() * 2 - 1; 94 94 95 95 HyperparameterParameter.ActualValue = r; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8455 r8463 73 73 private double[,] x; 74 74 [Storable] 75 private Scaling scaling; 75 private Scaling inputScaling; 76 [Storable] 77 private Scaling targetScaling; 76 78 77 79 … … 82 84 this.meanFunction = cloner.Clone(original.meanFunction); 83 85 this.covarianceFunction = cloner.Clone(original.covarianceFunction); 84 this.scaling = cloner.Clone(original.scaling); 86 this.inputScaling = cloner.Clone(original.inputScaling); 87 this.targetScaling = cloner.Clone(original.targetScaling); 85 88 this.negativeLogLikelihood = original.negativeLogLikelihood; 86 89 this.targetVariable = original.targetVariable; … … 118 121 119 122 private void CalculateModel(Dataset ds, IEnumerable<int> rows) { 120 scaling = new Scaling(ds, allowedInputVariables, rows); 121 x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling); 122 123 var y = ds.GetDoubleValues(targetVariable, rows).ToArray(); 123 inputScaling = new Scaling(ds, allowedInputVariables, rows); 124 x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling); 125 126 127 targetScaling = new Scaling(ds, new string[] { targetVariable }, rows); 128 var y = targetScaling.GetScaledValues(ds, targetVariable, rows); 124 129 125 130 int n = x.GetLength(0); … … 162 167 int n = x.GetLength(0); 163 168 int nAllowedVariables = x.GetLength(1); 164 double[,] q = new double[n, n];165 double[,] eye = new double[n, n];166 for (int i = 0; i < n; i++) eye[i, i] = 1.0;167 169 168 170 int info; 169 alglib. densesolverreport denseSolveRep;170 171 alglib.spdmatrixcholesky solvem(l, n, false, eye, n, out info, out denseSolveRep, out q);172 // double[,] a2 = outerProd(alpha, alpha);171 alglib.matinvreport matInvRep; 172 173 alglib.spdmatrixcholeskyinverse(ref l, n, false, out info, out matInvRep); 174 if (info != 1) throw new ArgumentException("Can't invert matrix to calculate gradients."); 173 175 for (int i = 0; i < n; i++) { 174 for (int j = 0; j < n; j++)175 q[i, j] = q[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; // a2[i,j];176 } 177 178 double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => q[i, i]).Sum();176 for (int j = 0; j <= i; j++) 177 l[i, j] = l[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; 178 } 179 180 double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => l[i, i]).Sum(); 179 181 180 182 double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)]; … … 187 189 if (covGradients.Length > 0) { 188 190 for (int i = 0; i < n; i++) { 189 for (int j = 0; j < n; j++) {190 for (int k = 0; k < covGradients.Length; k++) {191 covGradients[k] += q[i, j] * covarianceFunction.GetGradient(i, j, k);191 for (int k = 0; k < covGradients.Length; k++) { 192 for (int j = 0; j < i; j++) { 193 covGradients[k] += l[i, j] * covarianceFunction.GetGradient(i, j, k); 192 194 } 195 covGradients[k] += 0.5 * l[i, i] * covarianceFunction.GetGradient(i, i, k); 193 196 } 194 197 } 195 covGradients = covGradients.Select(g => g / 2.0).ToArray();196 198 } 197 199 … … 219 221 220 222 private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) { 221 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, scaling);223 var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling); 222 224 int newN = newX.GetLength(0); 223 225 int n = x.GetLength(0); … … 251 253 // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v); 252 254 253 254 for (int i = 0; i < newN; i++) { 255 // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha); 256 yield return ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha); 257 // var sumV2 = prod(GetCol(v, i), GetCol(v, i)); 258 // predVar[i] = kss[i] - sumV2; 259 } 255 double targetScaleMin, targetScaleMax; 256 targetScaling.GetScalingParameters(targetVariable, out targetScaleMin, out targetScaleMax); 257 return Enumerable.Range(0, newN) 258 .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha)) 259 .Select(m => m * (targetScaleMax - targetScaleMin) + targetScaleMin); 260 //for (int i = 0; i < newN; i++) { 261 // // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha); 262 // // var sumV2 = prod(GetCol(v, i), GetCol(v, i)); 263 // // predVar[i] = kss[i] - sumV2; 264 //} 260 265 261 266 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs
r8416 r8463 75 75 76 76 public override IOperation Apply() { 77 var m = ModelParameter.ActualValue;78 var data = ProblemDataParameter.ActualValue;77 var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone(); 78 var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone(); 79 79 var s = new GaussianProcessRegressionSolution(m, data); 80 80 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanProd.cs
r8439 r8463 82 82 public double[] GetGradients(int k, double[,] x) { 83 83 double[] res = Enumerable.Repeat(1.0, x.GetLength(0)).ToArray(); 84 foreach (var f in factors) { 85 var numParam = f.GetNumberOfParameters(numberOfVariables); 86 if (k >= 0 && k < numParam) { 84 // find index of factor for the given k 85 int j = 0; 86 while (k >= factors[j].GetNumberOfParameters(numberOfVariables)) { 87 k -= factors[j].GetNumberOfParameters(numberOfVariables); 88 j++; 89 } 90 for (int i = 0; i < factors.Count; i++) { 91 var f = factors[i]; 92 if (i == j) { 87 93 // multiply gradient 88 94 var g = f.GetGradients(k, x); 89 for (int i = 0; i < res.Length; i++) res[i] *= g[i]; 90 k -= numParam; 95 for (int ii = 0; ii < res.Length; ii++) res[ii] *= g[ii]; 91 96 } else { 92 97 // multiply mean 93 98 var m = f.GetMean(x); 94 for (int i = 0; i < res.Length; i++) res[i] *= m[i]; 95 k -= numParam; 99 for (int ii = 0; ii < res.Length; ii++) res[ii] *= m[ii]; 96 100 } 97 101 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs
r8401 r8463 20 20 #endregion 21 21 22 using System;23 22 using System.Collections.Generic; 24 23 using System.Linq; … … 32 31 public static double SqrDist(double x, double y) { 33 32 double d = x - y; 34 return Math.Max(d * d, 0.0);33 return d * d; 35 34 } 36 35 37 36 public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) { 38 return x.Zip(y, SqrDist).Sum();37 return x.Zip(y, (a, b) => (a - b) * (a - b)).Sum(); 39 38 } 40 39 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/Scaling.cs
r8416 r8463 59 59 return ds.GetDoubleValues(variable, rows).Select(x => (x - min) / (max - min)); 60 60 } 61 62 public void GetScalingParameters(string variable, out double min, out double max) { 63 min = scalingParameters[variable].Item1; 64 max = scalingParameters[variable].Item2; 65 } 61 66 } 62 67 } -
trunk/sources/HeuristicLab.ExtLibs/HeuristicLab.ALGLIB/3.6.0/ALGLIB-3.6.0
- Property svn:ignore
-
old new 1 1 obj 2 *.user
-
- Property svn:ignore
-
trunk/sources/HeuristicLab.ExtLibs/HeuristicLab.ALGLIB/3.6.0/HeuristicLab.ALGLIB-3.6.0
- Property svn:ignore
-
old new 1 1 obj 2 2 Plugin.cs 3 *.user
-
- Property svn:ignore
-
trunk/sources/HeuristicLab.Tests/HeuristicLab.Algorithms.DataAnalysis-3.4/GaussianProcessFunctionsTest.cs
r8455 r8463 123 123 ); 124 124 125 //prod.Factors.Clear();126 //prod.Factors.Add(new MeanConst());127 //prod.Factors.Add(new MeanLinear());128 //TestMeanFunction(prod, 1,129 // new double[] { 2.9087, 1.8646, 3.1971, 1.9889, 1.7639, 1.2801, 2.8059, 3.7243, 3.1901, 1.4979},130 //new double[][]131 //{132 // new double[] { 1.8436, 2.1279, 3.1166, 1.6886, 3.2505, 2.9613, 2.3260, 1.7293, 1.7697, 2.3982},133 // new double[] { 0.6443, 0.3786, 0.8116, 0.5328, 0.3507, 0.9390, 0.8759, 0.5502, 0.6225, 0.5870},134 // new double[] { 0.2077, 0.3012, 0.4709, 0.2305, 0.8443, 0.1948, 0.2259, 0.1707, 0.2277, 0.4357},135 // new double[] { 0.3111, 0.9234, 0.4302, 0.1848, 0.9049, 0.9797, 0.4389, 0.1111, 0.2581, 0.4087},136 // new double[] { 0.5949, 0.2622, 0.6028, 0.7112, 0.2217, 0.1174, 0.2967, 0.3188, 0.4242, 0.5079},137 // new double[] { 0.0855, 0.2625, 0.8010, 0.0292, 0.9289, 0.7303, 0.4886, 0.5785, 0.2373, 0.4588 }138 //}139 //);125 prod.Factors.Clear(); 126 prod.Factors.Add(new MeanConst()); 127 prod.Factors.Add(new MeanLinear()); 128 TestMeanFunction(prod, 1, 129 new double[] { 1.843574580675791, 2.127929337522181, 3.116568910281474, 1.688566127130978, 3.250517738447450, 2.961262743634280, 2.326019412613392, 1.729286976436201, 1.769654419215176, 2.398170347588917 }, 130 new double[][] 131 { 132 new double[] { 2.9088, 1.8646, 3.1971, 1.9890, 1.7640, 1.2801, 2.8059, 3.7243, 3.1899 ,1.4978 }, 133 new double[] { 0.4173, 0.0497, 0.9027, 0.9448, 0.4909, 0.4893, 0.3377, 0.9001, 0.3692 ,0.1112 }, 134 new double[] { 0.7803, 0.3897, 0.2417, 0.4039, 0.0965, 0.1320, 0.9421, 0.9561, 0.5752 ,0.0598 }, 135 new double[] { 0.2348, 0.3532, 0.8212, 0.0154, 0.0430, 0.1690, 0.6491, 0.7317, 0.6477 ,0.4509 }, 136 new double[] { 0.5470, 0.2963, 0.7447, 0.1890, 0.6868, 0.1835, 0.3685, 0.6256, 0.7802 ,0.0811 }, 137 new double[] { 0.9294, 0.7757, 0.4868, 0.4359, 0.4468, 0.3063, 0.5085, 0.5108, 0.8176 ,0.7948 } 138 } 139 ); 140 140 } 141 141 -
trunk/sources/HeuristicLab.Tests/HeuristicLab.Algorithms.DataAnalysis-3.4/GaussianProcessRegressionTest.cs
r8455 r8463 21 21 22 22 using System; 23 using System.Collections.Generic;24 23 using System.Linq; 25 24 using System.Threading; 26 25 using HeuristicLab.Algorithms.DataAnalysis; 27 26 using HeuristicLab.Common; 28 using HeuristicLab.Core;29 27 using HeuristicLab.Problems.DataAnalysis; 30 using HeuristicLab. SequentialEngine;28 using HeuristicLab.Problems.Instances.DataAnalysis; 31 29 using Microsoft.VisualStudio.TestTools.UnitTesting; 32 30 33 namespace HeuristicLab _33.Tests {31 namespace HeuristicLab.Algorithms.DataAnalysis_34.Tests { 34 32 [TestClass] 33 [DeploymentItem(@"HeuristicLab.Algorithms.DataAnalysis-3.4/co2.txt")] 35 34 public class GaussianProcessRegressionTest { 36 35 public GaussianProcessRegressionTest() { } … … 58 57 ex = null; 59 58 60 var cv = new CrossValidation();61 59 var alg = new GaussianProcessRegression(); 62 alg.Engine = new SequentialEngine(); 63 cv.Algorithm = alg; 60 alg.Engine = new HeuristicLab.SequentialEngine.SequentialEngine(); 64 61 65 cv.Problem = new RegressionProblem();66 var rand = new HeuristicLab.Random.MersenneTwister();67 double[,] data = GenerateData(100, rand);68 List<string> variables = new List<string>() { "x1", "x2", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x10", "y" };69 Dataset ds = new Dataset(variables, data);70 cv.Problem.ProblemDataParameter.ActualValue = new RegressionProblemData(ds, variables.Take(10), variables.Last());71 cv.Algorithm.Prepare();72 cv.Folds.Value = 5;73 cv.SamplesStart.Value = 0;74 cv.SamplesEnd.Value = 99;62 alg.Problem = new RegressionProblem(); 63 var provider = new RegressionCSVInstanceProvider(); 64 var problemData = (RegressionProblemData)provider.ImportData("co2.txt"); 65 problemData.TargetVariableParameter.ActualValue = problemData.TargetVariableParameter.ValidValues.First(x => x.Value == "interpolated"); 66 problemData.InputVariables.SetItemCheckedState(problemData.InputVariables.First(x => x.Value == "year"), false); 67 problemData.InputVariables.SetItemCheckedState(problemData.InputVariables.First(x => x.Value == "month"), false); 68 problemData.InputVariables.SetItemCheckedState(problemData.InputVariables.First(x => x.Value == "average"), false); 69 problemData.InputVariables.SetItemCheckedState(problemData.InputVariables.First(x => x.Value == "interpolated"), false); 70 problemData.InputVariables.SetItemCheckedState(problemData.InputVariables.First(x => x.Value == "trend"), false); 71 problemData.InputVariables.SetItemCheckedState(problemData.InputVariables.First(x => x.Value == "#days"), false); 75 72 76 cv.ExceptionOccurred += new EventHandler<EventArgs<Exception>>(cv_ExceptionOccurred); 77 cv.Stopped += new EventHandler(cv_Stopped); 73 alg.Problem.ProblemDataParameter.Value = problemData; 78 74 79 cv.Prepare(); 80 cv.Start(); 75 alg.ExceptionOccurred += new EventHandler<EventArgs<Exception>>(cv_ExceptionOccurred); 76 alg.Stopped += new EventHandler(cv_Stopped); 77 78 alg.Prepare(); 79 alg.Start(); 81 80 trigger.WaitOne(); 82 81 if (ex != null) throw ex; 83 82 84 TestContext.WriteLine("Runtime: {0}", cv.ExecutionTime.ToString()); 85 86 } 87 88 // poly-10: y = x1 x2 + x3 x4 + x5 x6 + x1 x7 x9 + x3 x6 x10 89 private double[,] GenerateData(int n, IRandom random) { 90 double[,] data = new double[n, 11]; 91 for (int i = 0; i < n; i++) { 92 for (int c = 0; c < 10; c++) { 93 data[i, c] = random.NextDouble() * 2.0 - 1.0; 94 } 95 data[i, 10] = 96 data[i, 0] * data[i, 1] + 97 data[i, 2] * data[i, 3] + 98 data[i, 4] * data[i, 5] + 99 data[i, 0] * data[i, 6] * data[i, 8] + 100 data[i, 2] * data[i, 5] * data[i, 9]; 101 } 102 return data; 83 TestContext.WriteLine("Runtime: {0}", alg.ExecutionTime.ToString()); 103 84 } 104 85 -
trunk/sources/HeuristicLab.Tests/HeuristicLab.Tests.csproj
r8439 r8463 279 279 <Compile Include="HeuristicLab-3.3\ContentViewTests.cs" /> 280 280 <Compile Include="HeuristicLab-3.3\ParameterVisibilityTest.cs" /> 281 <Compile Include="HeuristicLab-3.3\GaussianProcessRegressionTest.cs" />282 281 <Compile Include="HeuristicLab-3.3\DeepCloneableCloningTest.cs" /> 283 282 <Compile Include="HeuristicLab-3.3\GeneticAlgorithmTest.cs" /> … … 290 289 <Compile Include="HeuristicLab-3.3\ThreadSafeLogTest.cs" /> 291 290 <Compile Include="HeuristicLab-3.3\ToStringTest.cs" /> 291 <Compile Include="HeuristicLab.Algorithms.DataAnalysis-3.4\GaussianProcessModelTest.cs" /> 292 292 <Compile Include="HeuristicLab.Algorithms.DataAnalysis-3.4\GaussianProcessFunctionsTest.cs" /> 293 <Compile Include="HeuristicLab.Algorithms.DataAnalysis-3.4\GaussianProcessRegressionTest.cs" /> 293 294 <Compile Include="HeuristicLab.Analysis-3.3\MultidimensionalScalingTest.cs" /> 294 295 <Compile Include="HeuristicLab.Encodings.BinaryVectorEncoding-3.3\Auxiliary.cs" /> … … 388 389 </ItemGroup> 389 390 <ItemGroup> 391 <Content Include="HeuristicLab.Algorithms.DataAnalysis-3.4\co2.txt"> 392 <CopyToOutputDirectory>Always</CopyToOutputDirectory> 393 </Content> 390 394 <None Include="app.config" /> 391 395 <None Include="Builder.testsettings">
Note: See TracChangeset
for help on using the changeset viewer.