Changeset 8416
- Timestamp:
- 08/06/12 15:02:34 (12 years ago)
- Location:
- trunk/sources
- Files:
-
- 6 added
- 17 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis.Views/3.4/HeuristicLab.Algorithms.DataAnalysis.Views-3.4.csproj
r7021 r8416 118 118 </ItemGroup> 119 119 <ItemGroup> 120 <Compile Include="CovarianceProdView.cs"> 121 <SubType>UserControl</SubType> 122 </Compile> 123 <Compile Include="CovarianceProdView.Designer.cs"> 124 <DependentUpon>CovarianceProdView.cs</DependentUpon> 125 </Compile> 126 <Compile Include="CovarianceSumView.cs"> 127 <SubType>UserControl</SubType> 128 </Compile> 129 <Compile Include="CovarianceSumView.Designer.cs"> 130 <DependentUpon>CovarianceSumView.cs</DependentUpon> 131 </Compile> 132 <Compile Include="GaussianProcessModelView.cs"> 133 <SubType>UserControl</SubType> 134 </Compile> 135 <Compile Include="GaussianProcessModelView.Designer.cs"> 136 <DependentUpon>GaussianProcessModelView.cs</DependentUpon> 137 </Compile> 120 138 <Compile Include="KMeansClusteringModelView.cs"> 121 139 <SubType>UserControl</SubType> -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceLinear.cs
r8401 r8416 29 29 [Item(Name = "CovarianceLinear", Description = "Linear covariance function with for Gaussian processes.")] 30 30 public class CovarianceLinear : Item, ICovarianceFunction { 31 private static readonly double[] emptyArray = new double[0]; 32 31 33 [Storable] 32 34 private double[,] x; 33 35 [Storable] 34 36 private double[,] xt; 35 36 37 37 38 private double[,] k; … … 45 46 protected CovarianceLinear(CovarianceLinear original, Cloner cloner) 46 47 : base(original, cloner) { 47 // note: using shallow copies here!48 this.x = original.x;49 this.xt = original.xt;48 if (original.x != null) { 49 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)]; 50 Array.Copy(original.x, this.x, x.Length); 50 51 52 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)]; 53 Array.Copy(original.xt, this.xt, xt.Length); 54 55 this.k = new double[original.k.GetLength(0), original.k.GetLength(1)]; 56 Array.Copy(original.k, this.k, k.Length); 57 } 58 this.symmetric = original.symmetric; 51 59 } 52 60 public CovarianceLinear() … … 58 66 } 59 67 60 public void SetParameter(double[] hyp, double[,] x) { 61 if (hyp.Length > 0) throw new ArgumentException(); 62 SetParameter(hyp, x, x); 68 public void SetParameter(double[] hyp) { 69 if (hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function."); 70 k = null; 71 } 72 73 public void SetData(double[,] x) { 74 SetData(x, x); 63 75 this.symmetric = true; 64 76 } 65 77 66 public void Set Parameter(double[] hyp,double[,] x, double[,] xt) {78 public void SetData(double[,] x, double[,] xt) { 67 79 this.x = x; 68 80 this.xt = xt; … … 77 89 } 78 90 79 80 public double[] GetDiagonalCovariances() {81 if (x != xt) throw new InvalidOperationException();82 int rows = x.GetLength(0);83 int cols = x.GetLength(1);84 var k = new double[rows];85 for (int i = 0; i < rows; i++) {86 k[i] = 0;87 for (int j = 0; j < cols; j++) {88 k[i] += x[i, j] * x[i, j];89 }90 }91 return k;92 }93 94 91 public double[] GetGradient(int i, int j) { 95 throw new NotSupportedException();92 return emptyArray; 96 93 } 97 94 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs
r8323 r8416 1 using System.Collections.Generic; 1 #region License Information 2 /* HeuristicLab 3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 * 5 * This file is part of HeuristicLab. 6 * 7 * HeuristicLab is free software: you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License as published by 9 * the Free Software Foundation, either version 3 of the License, or 10 * (at your option) any later version. 11 * 12 * HeuristicLab is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 * GNU General Public License for more details. 16 * 17 * You should have received a copy of the GNU General Public License 18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>. 19 */ 20 #endregion 21 2 22 using System.Linq; 23 using HeuristicLab.Common; 24 using HeuristicLab.Core; 25 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 3 26 4 namespace HeuristicLab.Algorithms.DataAnalysis.GaussianProcess { 5 public class CovarianceProd : ICovarianceFunction { 6 private IList<ICovarianceFunction> covariances; 27 namespace HeuristicLab.Algorithms.DataAnalysis { 28 [StorableClass] 29 [Item(Name = "CovarianceProd", 30 Description = "Product covariance function for Gaussian processes.")] 31 public class CovarianceProd : Item, ICovarianceFunction { 32 [Storable] 33 private ItemList<ICovarianceFunction> factors; 7 34 8 public int NumberOfParameters { 9 get { return covariances.Sum(c => c.NumberOfParameters); } 35 [Storable] 36 private int numberOfVariables; 37 public ItemList<ICovarianceFunction> Factors { 38 get { return factors; } 10 39 } 11 40 12 public CovarianceProd(IEnumerable<ICovarianceFunction> covariances) { 13 this.covariances = covariances.ToList(); 41 [StorableConstructor] 42 protected CovarianceProd(bool deserializing) 43 : base(deserializing) { 14 44 } 15 45 16 public void SetMatrix(double[,] x) { 17 foreach (var covariance in covariances) { 18 covariance.SetMatrix(x, x); 46 protected CovarianceProd(CovarianceProd original, Cloner cloner) 47 : base(original, cloner) { 48 this.factors = cloner.Clone(original.factors); 49 this.numberOfVariables = original.numberOfVariables; 50 } 51 52 public CovarianceProd() 53 : base() { 54 this.factors = new ItemList<ICovarianceFunction>(); 55 } 56 57 public override IDeepCloneable Clone(Cloner cloner) { 58 return new CovarianceProd(this, cloner); 59 } 60 61 public int GetNumberOfParameters(int numberOfVariables) { 62 this.numberOfVariables = numberOfVariables; 63 return factors.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum(); 64 } 65 66 public void SetParameter(double[] hyp) { 67 int offset = 0; 68 foreach (var t in factors) { 69 var numberOfParameters = t.GetNumberOfParameters(numberOfVariables); 70 t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray()); 71 offset += numberOfParameters; 19 72 } 20 73 } 21 22 public void SetMatrix(double[,] x, double[,] xt) { 23 foreach (var covariance in covariances) { 24 covariance.SetMatrix(x, xt); 25 } 74 public void SetData(double[,] x) { 75 SetData(x, x); 26 76 } 27 77 28 public void SetHyperparamter(double[] hyp) { 29 int i = 0; 30 foreach (var covariance in covariances) { 31 int n = covariance.NumberOfParameters; 32 covariance.SetHyperparamter(hyp.Skip(i).Take(n).ToArray()); 33 i += n; 78 public void SetData(double[,] x, double[,] xt) { 79 foreach (var t in factors) { 80 t.SetData(x, xt); 34 81 } 35 82 } 36 83 37 84 public double GetCovariance(int i, int j) { 38 return covariances.Select(c => c.GetCovariance(i, j)) 39 .Aggregate((a, b) => a * b); 85 return factors.Select(t => t.GetCovariance(i, j)).Aggregate((a, b) => a * b); 40 86 } 41 87 42 43 public double[] GetDiagonalCovariances() { 44 return covariances 45 .Select(c => c.GetDiagonalCovariances()) 46 .Aggregate((s, d) => s.Zip(d, (a, b) => a * b).ToArray()) 47 .ToArray(); 48 } 49 50 public double[] GetDerivatives(int i, int j) { 51 return covariances 52 .Select(c => c.GetDerivatives(i, j)) 53 .Aggregate(Enumerable.Empty<double>(), (h0, h1) => h0.Concat(h1)) 54 .ToArray(); 88 public double[] GetGradient(int i, int j) { 89 return factors.Select(t => t.GetGradient(i, j)).SelectMany(seq => seq).ToArray(); 55 90 } 56 91 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEard.cs
r8403 r8416 49 49 protected CovarianceSEard(CovarianceSEard original, Cloner cloner) 50 50 : base(original, cloner) { 51 // note: using shallow copies here! 52 this.x = original.x; 53 this.xt = original.xt; 51 if (original.x != null) { 52 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)]; 53 Array.Copy(original.x, this.x, x.Length); 54 55 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)]; 56 Array.Copy(original.xt, this.xt, xt.Length); 57 58 this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)]; 59 Array.Copy(original.sd, this.sd, sd.Length); 60 61 this.l = new double[original.l.Length]; 62 Array.Copy(original.l, this.l, l.Length); 63 } 54 64 this.sf2 = original.sf2; 55 this. l = original.l;65 this.symmetric = original.symmetric; 56 66 } 57 67 public CovarianceSEard() … … 63 73 } 64 74 65 public void SetParameter(double[] hyp, double[,] x) { 66 SetParameter(hyp, x, x); 67 this.symmetric = true; 68 } 69 70 public void SetParameter(double[] hyp, double[,] x, double[,] xt) { 71 this.x = x; 72 this.xt = xt; 73 this.symmetric = false; 74 75 public void SetParameter(double[] hyp) { 75 76 this.l = hyp.Take(hyp.Length - 1).Select(Math.Exp).ToArray(); 76 77 this.sf2 = Math.Exp(2 * hyp[hyp.Length - 1]); … … 80 81 } 81 82 83 public void SetData(double[,] x) { 84 SetData(x, x); 85 this.symmetric = true; 86 } 87 88 public void SetData(double[,] x, double[,] xt) { 89 this.x = x; 90 this.xt = xt; 91 this.symmetric = false; 92 93 sd = null; 94 } 95 82 96 public double GetCovariance(int i, int j) { 83 97 if (sd == null) CalculateSquaredDistances(); 84 98 return sf2 * Math.Exp(-sd[i, j] / 2.0); 85 }86 87 88 public double[] GetDiagonalCovariances() {89 if (x != xt) throw new InvalidOperationException();90 int rows = x.GetLength(0);91 var sd = new double[rows];92 for (int i = 0; i < rows; i++) {93 sd[i] = Util.SqrDist(94 Util.GetRow(x, i).Select((e, k) => e / l[k]),95 Util.GetRow(xt, i).Select((e, k) => e / l[k]));96 }97 return sd.Select(d => sf2 * Math.Exp(-d / 2.0)).ToArray();98 99 } 99 100 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEiso.cs
r8401 r8416 50 50 protected CovarianceSEiso(CovarianceSEiso original, Cloner cloner) 51 51 : base(original, cloner) { 52 // note: using shallow copies here 53 this.x = original.x; 54 this.xt = original.xt; 52 if (original.x != null) { 53 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)]; 54 Array.Copy(original.x, this.x, x.Length); 55 56 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)]; 57 Array.Copy(original.xt, this.xt, xt.Length); 58 59 this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)]; 60 Array.Copy(original.sd, this.sd, sd.Length); 61 this.sf2 = original.sf2; 62 } 55 63 this.sf2 = original.sf2; 56 64 this.l = original.l; … … 70 78 } 71 79 72 public void SetParameter(double[] hyp, double[,] x) { 73 SetParameter(hyp, x, x); 80 public void SetParameter(double[] hyp) { 81 this.l = Math.Exp(hyp[0]); 82 this.sf2 = Math.Min(1E6, Math.Exp(2 * hyp[1])); // upper limit for scale 83 sd = null; 84 } 85 public void SetData(double[,] x) { 86 SetData(x, x); 74 87 this.symmetric = true; 75 88 } 76 89 77 90 78 public void SetParameter(double[] hyp, double[,] x, double[,] xt) { 79 this.l = Math.Exp(hyp[0]); 80 this.sf2 = Math.Exp(2 * hyp[1]); 81 91 public void SetData(double[,] x, double[,] xt) { 82 92 this.symmetric = false; 83 93 this.x = x; … … 90 100 return sf2 * Math.Exp(-sd[i, j] / 2.0); 91 101 } 92 93 94 public double[] GetDiagonalCovariances() {95 if (x != xt) throw new InvalidOperationException();96 int rows = x.GetLength(0);97 var sd = new double[rows];98 for (int i = 0; i < rows; i++) {99 sd[i] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, i).Select(e => e / l));100 }101 return sd.Select(d => sf2 * Math.Exp(-d / 2.0)).ToArray();102 }103 104 102 105 103 public double[] GetGradient(int i, int j) { -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs
r8366 r8416 1 using System; 1 #region License Information 2 /* HeuristicLab 3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 * 5 * This file is part of HeuristicLab. 6 * 7 * HeuristicLab is free software: you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License as published by 9 * the Free Software Foundation, either version 3 of the License, or 10 * (at your option) any later version. 11 * 12 * HeuristicLab is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 * GNU General Public License for more details. 16 * 17 * You should have received a copy of the GNU General Public License 18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>. 19 */ 20 #endregion 21 2 22 using System.Linq; 3 23 using HeuristicLab.Common; … … 5 25 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 6 26 7 namespace HeuristicLab.Algorithms.DataAnalysis .GaussianProcess{27 namespace HeuristicLab.Algorithms.DataAnalysis { 8 28 [StorableClass] 9 29 [Item(Name = "CovarianceSum", … … 26 46 protected CovarianceSum(CovarianceSum original, Cloner cloner) 27 47 : base(original, cloner) { 28 this.terms = cloner.Clone(terms); 48 this.terms = cloner.Clone(original.terms); 49 this.numberOfVariables = original.numberOfVariables; 29 50 } 30 51 31 52 public CovarianceSum() 32 53 : base() { 54 this.terms = new ItemList<ICovarianceFunction>(); 33 55 } 34 56 … … 42 64 } 43 65 44 public void SetParameter(double[] hyp , double[,] x) {66 public void SetParameter(double[] hyp) { 45 67 int offset = 0; 46 68 foreach (var t in terms) { 47 t.SetParameter(hyp.Skip(offset).Take(t.GetNumberOfParameters(numberOfVariables)), x); 48 offset += numberOfVariables; 69 var numberOfParameters = t.GetNumberOfParameters(numberOfVariables); 70 t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray()); 71 offset += numberOfParameters; 72 } 73 } 74 public void SetData(double[,] x) { 75 SetData(x, x); 76 } 77 78 public void SetData(double[,] x, double[,] xt) { 79 foreach (var t in terms) { 80 t.SetData(x, xt); 49 81 } 50 82 } 51 83 52 53 public void SetParameter(double[] hyp, double[,] x, double[,] xt) { 54 this.l = Math.Exp(hyp[0]); 55 this.sf2 = Math.Exp(2 * hyp[1]); 56 57 this.symmetric = false; 58 this.x = x; 59 this.xt = xt; 60 sd = null; 84 public double GetCovariance(int i, int j) { 85 return terms.Select(t => t.GetCovariance(i, j)).Sum(); 61 86 } 62 87 63 public double GetCovariance(int i, int j) {64 if (sd == null) CalculateSquaredDistances();65 return sf2 * Math.Exp(-sd[i, j] / 2.0);66 }67 68 69 public double[] GetDiagonalCovariances() {70 if (x != xt) throw new InvalidOperationException();71 int rows = x.GetLength(0);72 var sd = new double[rows];73 for (int i = 0; i < rows; i++) {74 sd[i] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, i).Select(e => e / l));75 }76 return sd.Select(d => sf2 * Math.Exp(-d / 2.0)).ToArray();77 }78 79 80 88 public double[] GetGradient(int i, int j) { 81 var res = new double[2]; 82 res[0] = sf2 * Math.Exp(-sd[i, j] / 2.0) * sd[i, j]; 83 res[1] = 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0); 84 return res; 85 } 86 87 private void CalculateSquaredDistances() { 88 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException(); 89 int rows = x.GetLength(0); 90 int cols = xt.GetLength(0); 91 sd = new double[rows, cols]; 92 if (symmetric) { 93 for (int i = 0; i < rows; i++) { 94 for (int j = i; j < rows; j++) { 95 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l)); 96 sd[j, i] = sd[i, j]; 97 } 98 } 99 } else { 100 for (int i = 0; i < rows; i++) { 101 for (int j = 0; j < cols; j++) { 102 sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l)); 103 } 104 } 105 } 89 return terms.Select(t => t.GetGradient(i, j)).SelectMany(seq => seq).ToArray(); 106 90 } 107 91 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8396 r8416 37 37 [Storable] 38 38 private double negativeLogLikelihood; 39 40 39 public double NegativeLogLikelihood { 41 40 get { return negativeLogLikelihood; } … … 53 52 } 54 53 [Storable] 55 private double[] hyp;56 public IEnumerable<double> Hyperparameters {57 get { return hyp; }58 }59 [Storable]60 54 private string targetVariable; 61 55 public string TargetVariable { … … 72 66 [Storable] 73 67 private double sqrSigmaNoise; 74 [Storable]75 private double[] meanHyp;76 [Storable]77 private double[] covHyp;78 68 79 69 [Storable] … … 90 80 private GaussianProcessModel(GaussianProcessModel original, Cloner cloner) 91 81 : base(original, cloner) { 92 this.hyp = original.hyp;93 82 this.meanFunction = cloner.Clone(original.meanFunction); 94 83 this.covarianceFunction = cloner.Clone(original.covarianceFunction); 84 this.scaling = cloner.Clone(original.scaling); 95 85 this.negativeLogLikelihood = original.negativeLogLikelihood; 96 86 this.targetVariable = original.targetVariable; 87 this.sqrSigmaNoise = original.sqrSigmaNoise; 88 89 // shallow copies of arrays because they cannot be modified 97 90 this.allowedInputVariables = original.allowedInputVariables; 98 91 this.alpha = original.alpha; 99 this.sqrSigmaNoise = original.sqrSigmaNoise;100 this.scaling = cloner.Clone(original.scaling);101 this.meanHyp = original.meanHyp;102 this.covHyp = original.covHyp;103 92 this.l = original.l; 104 93 this.x = original.x; … … 109 98 this.name = ItemName; 110 99 this.description = ItemDescription; 111 this.hyp = hyp.ToArray(); 112 this.meanFunction = meanFunction; 113 this.covarianceFunction = covarianceFunction; 100 this.meanFunction = (IMeanFunction)meanFunction.Clone(); 101 this.covarianceFunction = (ICovarianceFunction)covarianceFunction.Clone(); 114 102 this.targetVariable = targetVariable; 115 103 this.allowedInputVariables = allowedInputVariables.ToArray(); 116 int nAllowedVariables = allowedInputVariables.Count();117 104 118 105 sqrSigmaNoise = Math.Exp(2.0 * hyp.First()); 119 106 sqrSigmaNoise = Math.Max(10E-6, sqrSigmaNoise); // lower limit for the noise level 120 meanHyp = hyp.Skip(1).Take(meanFunction.GetNumberOfParameters(nAllowedVariables)).ToArray(); 121 covHyp = hyp.Skip(1 + meanFunction.GetNumberOfParameters(nAllowedVariables)).Take(covarianceFunction.GetNumberOfParameters(nAllowedVariables)).ToArray(); 122 123 CalculateModel(ds, targetVariable, allowedInputVariables, rows); 124 } 125 126 private void CalculateModel(Dataset ds, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows) { 107 108 int nVariables = this.allowedInputVariables.Length; 109 this.meanFunction.SetParameter(hyp.Skip(1) 110 .Take(this.meanFunction.GetNumberOfParameters(nVariables)) 111 .ToArray()); 112 this.covarianceFunction.SetParameter(hyp.Skip(1 + this.meanFunction.GetNumberOfParameters(nVariables)) 113 .Take(this.covarianceFunction.GetNumberOfParameters(nVariables)) 114 .ToArray()); 115 116 CalculateModel(ds, rows); 117 } 118 119 private void CalculateModel(Dataset ds, IEnumerable<int> rows) { 127 120 scaling = new Scaling(ds, allowedInputVariables, rows); 128 121 x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling); … … 133 126 l = new double[n, n]; 134 127 135 meanFunction.Set Parameter(meanHyp,x);136 covarianceFunction.Set Parameter(covHyp,x);128 meanFunction.SetData(x); 129 covarianceFunction.SetData(x); 137 130 138 131 // calculate means and covariances … … 245 238 //kss = covarianceFunction.GetDiagonalCovariances(); 246 239 247 covarianceFunction.Set Parameter(covHyp,x, newX);248 meanFunction.Set Parameter(meanHyp,newX);240 covarianceFunction.SetData(x, newX); 241 meanFunction.SetData(newX); 249 242 var ms = meanFunction.GetMean(newX); 250 243 for (int i = 0; i < newN; i++) { … … 268 261 269 262 } 270 271 #region events272 public event EventHandler Changed;273 private void OnChanged(EventArgs e) {274 var handlers = Changed;275 if (handlers != null)276 handlers(this, e);277 }278 #endregion279 263 } 280 264 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs
r8401 r8416 83 83 var results = ResultsParameter.ActualValue; 84 84 if (!results.ContainsKey(SolutionParameterName)) { 85 results.Add(new Result( ResultsParameterName, "The Gaussian process regression solution", s));85 results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s)); 86 86 results.Add(new Result(TrainingRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the training partition.", new DoubleValue(s.TrainingRSquared))); 87 87 results.Add(new Result(TestRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the test partition.", new DoubleValue(s.TestRSquared))); 88 88 } else { 89 results[ ResultsParameterName].Value = s;89 results[SolutionParameterName].Value = s; 90 90 results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared); 91 91 results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs
r8401 r8416 25 25 public interface ICovarianceFunction : IItem { 26 26 int GetNumberOfParameters(int numberOfVariables); 27 void SetParameter(double[] hyp, double[,] x); 28 void SetParameter(double[] hyp, double[,] x, double[,] xt); 27 void SetParameter(double[] hyp); 28 void SetData(double[,] x); 29 void SetData(double[,] x, double[,] xt); 29 30 30 31 double GetCovariance(int i, int j); 31 double[] GetDiagonalCovariances();32 32 double[] GetGradient(int i, int j); 33 33 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs
r8401 r8416 24 24 public interface IMeanFunction : IItem { 25 25 int GetNumberOfParameters(int numberOfVariables); 26 void SetParameter(double[] hyp, double[,] x); 26 void SetParameter(double[] hyp); 27 void SetData(double[,] x); 27 28 double[] GetMean(double[,] x); 28 29 double[] GetGradients(int k, double[,] x); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanConst.cs
r8401 r8416 32 32 [Storable] 33 33 private double c; 34 [Storable]35 private int n;36 34 public int GetNumberOfParameters(int numberOfVariables) { 37 35 return 1; … … 42 40 : base(original, cloner) { 43 41 this.c = original.c; 44 this.n = original.n;45 42 } 46 43 public MeanConst() … … 48 45 } 49 46 50 public void SetParameter(double[] hyp , double[,] x) {47 public void SetParameter(double[] hyp) { 51 48 if (hyp.Length != 1) throw new ArgumentException("Only one hyper-parameter allowed for constant mean function.", "hyp"); 52 49 this.c = hyp[0]; 53 this.n = x.GetLength(0); 50 } 51 public void SetData(double[,] x) { 52 // nothing to do 54 53 } 55 54 56 55 public double[] GetMean(double[,] x) { 57 return Enumerable.Repeat(c, n).ToArray();56 return Enumerable.Repeat(c, x.GetLength(0)).ToArray(); 58 57 } 59 58 60 59 public double[] GetGradients(int k, double[,] x) { 61 60 if (k > 0) throw new ArgumentException(); 62 return Enumerable.Repeat(1.0, n).ToArray();61 return Enumerable.Repeat(1.0, x.GetLength(0)).ToArray(); 63 62 } 64 63 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanLinear.cs
r8401 r8416 31 31 [Storable] 32 32 private double[] alpha; 33 [Storable]34 private int n;35 33 public int GetNumberOfParameters(int numberOfVariables) { 36 34 return numberOfVariables; … … 44 42 Array.Copy(original.alpha, alpha, original.alpha.Length); 45 43 } 46 this.n = original.n;47 44 } 48 45 public MeanLinear() … … 50 47 } 51 48 52 public void SetParameter(double[] hyp, double[,] x) { 53 if (hyp.Length != x.GetLength(1)) throw new ArgumentException("Number of hyper-parameters must match the number of variables.", "hyp"); 49 public void SetParameter(double[] hyp) { 54 50 this.alpha = new double[hyp.Length]; 55 51 Array.Copy(hyp, alpha, hyp.Length); 56 this.n = x.GetLength(0); 52 } 53 public void SetData(double[,] x) { 54 // nothing to do 57 55 } 58 56 59 57 public double[] GetMean(double[,] x) { 58 // sanity check 59 if (alpha.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function."); 60 60 int cols = x.GetLength(1); 61 int n = x.GetLength(0); 61 62 return (from i in Enumerable.Range(0, n) 62 63 let rowVector = from j in Enumerable.Range(0, cols) … … 68 69 public double[] GetGradients(int k, double[,] x) { 69 70 int cols = x.GetLength(1); 71 int n = x.GetLength(0); 70 72 if (k > cols) throw new ArgumentException(); 71 73 return (from r in Enumerable.Range(0, n) -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanZero.cs
r8401 r8416 29 29 [Item(Name = "MeanZero", Description = "Constant zero mean function for Gaussian processes.")] 30 30 public class MeanZero : Item, IMeanFunction { 31 [Storable]32 private int n;33 31 public int GetNumberOfParameters(int numberOfVariables) { 34 32 return 0; … … 38 36 protected MeanZero(MeanZero original, Cloner cloner) 39 37 : base(original, cloner) { 40 this.n = original.n;41 38 } 42 39 public MeanZero() { 43 40 } 44 41 45 public void SetParameter(double[] hyp , double[,] x) {42 public void SetParameter(double[] hyp) { 46 43 if (hyp.Length > 0) throw new ArgumentException("No hyper-parameters allowed for zero mean function.", "hyp"); 47 this.n = x.GetLength(0); 44 } 45 46 public void SetData(double[,] x) { 47 // do nothing 48 48 } 49 49 50 50 public double[] GetMean(double[,] x) { 51 return Enumerable.Repeat(0.0, n).ToArray();51 return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray(); 52 52 } 53 53 54 54 public double[] GetGradients(int k, double[,] x) { 55 55 if (k > 0) throw new ArgumentException(); 56 return Enumerable.Repeat(0.0, n).ToArray();56 return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray(); 57 57 } 58 58 public override IDeepCloneable Clone(Cloner cloner) { -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj
r8401 r8416 120 120 </Compile> 121 121 <Compile Include="FixedDataAnalysisAlgorithm.cs" /> 122 <Compile Include="GaussianProcess\CovarianceProd.cs" /> 123 <Compile Include="GaussianProcess\CovarianceSum.cs" /> 122 124 <Compile Include="GaussianProcess\GaussianProcessHyperparameterInitializer.cs" /> 123 125 <Compile Include="GaussianProcess\GaussianProcessRegressionSolutionCreator.cs" /> -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs
r8371 r8416 20 20 #endregion 21 21 22 using System.Collections.Generic;23 22 using HeuristicLab.Problems.DataAnalysis; 24 23 … … 27 26 /// Interface to represent a Gaussian process posterior 28 27 /// </summary> 29 public interface IGaussianProcessModel : I DataAnalysisModel, IRegressionModel {28 public interface IGaussianProcessModel : IRegressionModel { 30 29 double NegativeLogLikelihood { get; } 31 30 IMeanFunction MeanFunction { get; } 32 31 ICovarianceFunction CovarianceFunction { get; } 33 IEnumerable<double> Hyperparameters { get; }34 32 double[] GetHyperparameterGradients(); 35 33 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/Scaling.cs
r8323 r8416 30 30 namespace HeuristicLab.Algorithms.DataAnalysis { 31 31 [StorableClass] 32 [Item(Name = "Scaling", Description = "Contains information about scaling of variables for data-analysis algorithms.")] 32 33 public class Scaling : Item { 33 34 [Storable] -
trunk/sources/HeuristicLab.Algorithms.GradientDescent/3.3/LbfgsState.cs
r8401 r8416 269 269 private double[] RstateRa { get { return state.rstate.ra; } set { state.rstate.ra = value; } } 270 270 [Storable] 271 private int RstateStage { get { return state.rstate.stage; } set { state. p= value; } }271 private int RstateStage { get { return state.rstate.stage; } set { state.rstate.stage = value; } } 272 272 [Storable] 273 273 private double[] S { get { return state.s; } set { state.s = value; } }
Note: See TracChangeset
for help on using the changeset viewer.