- Timestamp:
- 12/19/12 11:16:51 (12 years ago)
- Location:
- branches/RuntimeOptimizer
- Files:
-
- 1 deleted
- 29 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/RuntimeOptimizer
- Property svn:mergeinfo changed
/trunk/sources merged: 8972-8974,8976,8978-8994,8999-9019,9021-9031,9033-9039,9043,9049,9052,9055-9057,9063,9068,9072,9075-9076
- Property svn:mergeinfo changed
-
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Algorithms.DataAnalysis merged: 8982
- Property svn:mergeinfo changed
-
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceConst.cs
r8929 r9078 25 25 using HeuristicLab.Core; 26 26 using HeuristicLab.Data; 27 using HeuristicLab.Parameters; 27 28 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 29 … … 32 33 Description = "Constant covariance function for Gaussian processes.")] 33 34 public sealed class CovarianceConst : ParameterizedNamedItem, ICovarianceFunction { 34 35 [Storable]36 private double scale;37 [Storable]38 private readonly HyperParameter<DoubleValue> scaleParameter;39 35 public IValueParameter<DoubleValue> ScaleParameter { 40 get { return scaleParameter; }36 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 41 37 } 42 38 … … 48 44 private CovarianceConst(CovarianceConst original, Cloner cloner) 49 45 : base(original, cloner) { 50 this.scaleParameter = cloner.Clone(original.scaleParameter);51 this.scale = original.scale;52 53 RegisterEvents();54 46 } 55 47 … … 59 51 Description = ItemDescription; 60 52 61 scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the constant covariance function."); 62 Parameters.Add(scaleParameter); 63 RegisterEvents(); 53 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale of the constant covariance function.")); 64 54 } 65 66 [StorableHook(HookType.AfterDeserialization)]67 private void AfterDeserialization() {68 RegisterEvents();69 }70 71 // caching72 private void RegisterEvents() {73 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; });74 }75 76 55 77 56 public override IDeepCloneable Clone(Cloner cloner) { … … 80 59 81 60 public int GetNumberOfParameters(int numberOfVariables) { 82 return scaleParameter.Fixed? 0 : 1;61 return ScaleParameter.Value != null ? 0 : 1; 83 62 } 84 63 85 public void SetParameter(double[] hyp) { 86 if (!scaleParameter.Fixed && hyp.Length == 1) { 87 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0]))); 88 } else { 89 throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceConst", "hyp"); 90 } 64 public void SetParameter(double[] p) { 65 double scale; 66 GetParameterValues(p, out scale); 67 ScaleParameter.Value = new DoubleValue(scale); 91 68 } 92 69 93 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 94 return scale; 70 private void GetParameterValues(double[] p, out double scale) { 71 int c = 0; 72 // gather parameter values 73 if (ScaleParameter.Value != null) { 74 scale = ScaleParameter.Value.Value; 75 } else { 76 scale = Math.Exp(2 * p[c]); 77 c++; 78 } 79 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceConst", "p"); 95 80 } 96 81 97 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 98 yield return 2.0 * scale; 82 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 83 double scale; 84 GetParameterValues(p, out scale); 85 // create functions 86 var cov = new ParameterizedCovarianceFunction(); 87 cov.Covariance = (x, i, j) => scale; 88 cov.CrossCovariance = (x, xt, i, j) => scale; 89 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, columnIndices); 90 return cov; 99 91 } 100 92 101 p ublic double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {102 returnscale;93 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double scale, IEnumerable<int> columnIndices) { 94 yield return 2.0 * scale; 103 95 } 104 96 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinear.cs
r8931 r9078 48 48 } 49 49 50 public void SetParameter(double[] hyp) {51 if ( hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function.");50 public void SetParameter(double[] p) { 51 if (p.Length > 0) throw new ArgumentException("No parameters are allowed for the linear covariance function."); 52 52 } 53 53 54 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 55 return Util.ScalarProd(x, i, j, 1, columnIndices); 56 } 57 58 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 59 yield break; 60 } 61 62 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 63 return Util.ScalarProd(x, i, xt, j, 1.0 , columnIndices); 54 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 55 if (p.Length > 0) throw new ArgumentException("No parameters are allowed for the linear covariance function."); 56 // create functions 57 var cov = new ParameterizedCovarianceFunction(); 58 cov.Covariance = (x, i, j) => Util.ScalarProd(x, i, j, 1, columnIndices); 59 cov.CrossCovariance = (x, xt, i, j) => Util.ScalarProd(x, i, xt, j, 1.0 , columnIndices); 60 cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>(); 61 return cov; 64 62 } 65 63 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinearArd.cs
r8933 r9078 26 26 using HeuristicLab.Core; 27 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 28 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 29 30 … … 33 34 Description = "Linear covariance function with automatic relevance determination for Gaussian processes.")] 34 35 public sealed class CovarianceLinearArd : ParameterizedNamedItem, ICovarianceFunction { 35 [Storable]36 private double[] inverseLength;37 [Storable]38 private readonly HyperParameter<DoubleArray> inverseLengthParameter;39 36 public IValueParameter<DoubleArray> InverseLengthParameter { 40 get { return inverseLengthParameter; }37 get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; } 41 38 } 42 39 … … 45 42 private CovarianceLinearArd(CovarianceLinearArd original, Cloner cloner) 46 43 : base(original, cloner) { 47 inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);48 if (original.inverseLength != null) {49 this.inverseLength = new double[original.inverseLength.Length];50 Array.Copy(original.inverseLength, inverseLength, inverseLength.Length);51 }52 53 RegisterEvents();54 44 } 55 45 public CovarianceLinearArd() … … 58 48 Description = ItemDescription; 59 49 60 inverseLengthParameter = new HyperParameter<DoubleArray>("InverseLength", 61 "The inverse length parameter for ARD."); 62 Parameters.Add(inverseLengthParameter); 63 RegisterEvents(); 64 } 65 66 [StorableHook(HookType.AfterDeserialization)] 67 private void AfterDeserialization() { 68 RegisterEvents(); 50 Parameters.Add(new OptionalValueParameter<DoubleArray>("InverseLength", 51 "The inverse length parameter for ARD.")); 69 52 } 70 53 … … 73 56 } 74 57 75 // caching76 private void RegisterEvents() {77 Util.AttachArrayChangeHandler<DoubleArray, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.ToArray(); });78 }79 80 81 58 public int GetNumberOfParameters(int numberOfVariables) { 82 if ( !inverseLengthParameter.Fixed)59 if (InverseLengthParameter.Value == null) 83 60 return numberOfVariables; 84 61 else … … 86 63 } 87 64 88 public void SetParameter(double[] hyp) {89 if (!inverseLengthParameter.Fixed && hyp.Length > 0) {90 inverseLengthParameter.SetValue(new DoubleArray(hyp.Select(e => 1.0 / Math.Exp(e)).ToArray()));91 } else throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceLinearArd", "hyp");65 public void SetParameter(double[] p) { 66 double[] inverseLength; 67 GetParameterValues(p, out inverseLength); 68 InverseLengthParameter.Value = new DoubleArray(inverseLength); 92 69 } 93 70 94 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 95 return Util.ScalarProd(x, i, j, inverseLength, columnIndices); 71 private void GetParameterValues(double[] p, out double[] inverseLength) { 72 // gather parameter values 73 if (InverseLengthParameter.Value != null) { 74 inverseLength = InverseLengthParameter.Value.ToArray(); 75 } else { 76 inverseLength = p.Select(e => 1.0 / Math.Exp(e)).ToArray(); 77 } 96 78 } 97 79 98 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 80 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 81 double[] inverseLength; 82 GetParameterValues(p, out inverseLength); 83 // create functions 84 var cov = new ParameterizedCovarianceFunction(); 85 cov.Covariance = (x, i, j) => Util.ScalarProd(x, i, j, inverseLength, columnIndices); 86 cov.CrossCovariance = (x, xt, i, j) => Util.ScalarProd(x, i, xt, j, inverseLength, columnIndices); 87 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, inverseLength, columnIndices); 88 return cov; 89 } 90 91 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double[] inverseLength, IEnumerable<int> columnIndices) { 99 92 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1)); 100 93 … … 105 98 } 106 99 } 107 108 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {109 return Util.ScalarProd(x, i, xt, j, inverseLength, columnIndices);110 }111 100 } 112 101 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMask.cs
r8933 r9078 23 23 using System.Collections.Generic; 24 24 using System.Linq; 25 using System.Linq.Expressions; 25 26 using HeuristicLab.Common; 26 27 using HeuristicLab.Core; … … 34 35 Description = "Masking covariance function for dimension selection can be used to apply a covariance function only on certain input dimensions.")] 35 36 public sealed class CovarianceMask : ParameterizedNamedItem, ICovarianceFunction { 36 [Storable]37 private int[] selectedDimensions;38 [Storable]39 private readonly ValueParameter<IntArray> selectedDimensionsParameter;40 37 public IValueParameter<IntArray> SelectedDimensionsParameter { 41 get { return selectedDimensionsParameter; }38 get { return (IValueParameter<IntArray>)Parameters["SelectedDimensions"]; } 42 39 } 43 44 [Storable]45 private ICovarianceFunction cov;46 [Storable]47 private readonly ValueParameter<ICovarianceFunction> covParameter;48 40 public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter { 49 get { return covParameter; }41 get { return (IValueParameter<ICovarianceFunction>)Parameters["CovarianceFunction"]; } 50 42 } 51 43 … … 57 49 private CovarianceMask(CovarianceMask original, Cloner cloner) 58 50 : base(original, cloner) { 59 this.selectedDimensionsParameter = cloner.Clone(original.selectedDimensionsParameter);60 if (original.selectedDimensions != null) {61 this.selectedDimensions = (int[])original.selectedDimensions.Clone();62 }63 64 this.covParameter = cloner.Clone(original.covParameter);65 this.cov = cloner.Clone(original.cov);66 RegisterEvents();67 51 } 68 52 … … 72 56 Description = ItemDescription; 73 57 74 this.selectedDimensionsParameter = new ValueParameter<IntArray>("SelectedDimensions", "The dimensions on which the specified covariance function should be applied to."); 75 this.covParameter = new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso()); 76 cov = covParameter.Value; 77 78 Parameters.Add(selectedDimensionsParameter); 79 Parameters.Add(covParameter); 80 81 RegisterEvents(); 58 Parameters.Add(new OptionalValueParameter<IntArray>("SelectedDimensions", "The dimensions on which the specified covariance function should be applied to.")); 59 Parameters.Add(new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso())); 82 60 } 83 61 … … 86 64 } 87 65 88 [StorableHook(HookType.AfterDeserialization)]89 private void AfterDeserialization() {90 RegisterEvents();66 public int GetNumberOfParameters(int numberOfVariables) { 67 if (SelectedDimensionsParameter.Value == null) return CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables); 68 else return CovarianceFunctionParameter.Value.GetNumberOfParameters(SelectedDimensionsParameter.Value.Length); 91 69 } 92 70 93 private void RegisterEvents() { 94 Util.AttachArrayChangeHandler<IntArray, int>(selectedDimensionsParameter, () => { 95 selectedDimensions = selectedDimensionsParameter.Value 96 .OrderBy(x => x) 97 .Distinct() 98 .ToArray(); 99 if (selectedDimensions.Length == 0) selectedDimensions = null; 100 }); 101 covParameter.ValueChanged += (sender, args) => { cov = covParameter.Value; }; 71 public void SetParameter(double[] p) { 72 CovarianceFunctionParameter.Value.SetParameter(p); 102 73 } 103 74 104 public int GetNumberOfParameters(int numberOfVariables) { 105 if (selectedDimensions == null) return cov.GetNumberOfParameters(numberOfVariables); 106 else return cov.GetNumberOfParameters(selectedDimensions.Length); 107 } 75 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 76 if (columnIndices != null) 77 throw new InvalidOperationException("Stacking of masking covariance functions is not supported."); 78 var cov = CovarianceFunctionParameter.Value; 79 var selectedDimensions = SelectedDimensionsParameter.Value; 108 80 109 public void SetParameter(double[] hyp) { 110 cov.SetParameter(hyp); 111 } 112 113 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 114 // cov mask overwrites the previously selected columnIndices 115 // -> stacking of CovarianceMask is not supported 116 if (columnIndices != null && columnIndices.Count() != x.GetLength(1)) 117 throw new InvalidOperationException("Stacking of masking covariance functions is not supported."); 118 119 return cov.GetCovariance(x, i, j, selectedDimensions); 120 } 121 122 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 123 if (columnIndices != null && columnIndices.Count() != x.GetLength(1)) 124 throw new InvalidOperationException("Stacking of masking covariance functions is not supported."); 125 126 return cov.GetGradient(x, i, j, selectedDimensions); 127 } 128 129 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 130 if (columnIndices != null && columnIndices.Count() != x.GetLength(1)) 131 throw new InvalidOperationException("Stacking of masking covariance functions is not supported."); 132 133 return cov.GetCrossCovariance(x, xt, i, j, selectedDimensions); 81 return cov.GetParameterizedCovarianceFunction(p, selectedDimensions); 134 82 } 135 83 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMaternIso.cs
r8929 r9078 34 34 Description = "Matern covariance function for Gaussian processes.")] 35 35 public sealed class CovarianceMaternIso : ParameterizedNamedItem, ICovarianceFunction { 36 [Storable]37 private double inverseLength;38 [Storable]39 private readonly HyperParameter<DoubleValue> inverseLengthParameter;40 36 public IValueParameter<DoubleValue> InverseLengthParameter { 41 get { return inverseLengthParameter; }37 get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; } 42 38 } 43 39 44 [Storable]45 private double sf2;46 [Storable]47 private readonly HyperParameter<DoubleValue> scaleParameter;48 40 public IValueParameter<DoubleValue> ScaleParameter { 49 get { return scaleParameter; }41 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 50 42 } 51 43 52 [Storable]53 private int d;54 [Storable]55 private readonly ConstrainedValueParameter<IntValue> dParameter;56 44 public IConstrainedValueParameter<IntValue> DParameter { 57 get { return dParameter; }45 get { return (IConstrainedValueParameter<IntValue>)Parameters["D"]; } 58 46 } 59 47 … … 66 54 private CovarianceMaternIso(CovarianceMaternIso original, Cloner cloner) 67 55 : base(original, cloner) { 68 this.scaleParameter = cloner.Clone(original.scaleParameter);69 this.sf2 = original.sf2;70 this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);71 this.inverseLength = original.inverseLength;72 this.dParameter = cloner.Clone(original.dParameter);73 this.d = original.d;74 RegisterEvents();75 56 } 76 57 … … 80 61 Description = ItemDescription; 81 62 82 inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric Matern covariance function.");83 scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the isometric Matern covariance function.");63 Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric Matern covariance function.")); 64 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric Matern covariance function.")); 84 65 var validDValues = new ItemSet<IntValue>(); 85 66 validDValues.Add((IntValue)new IntValue(1).AsReadOnly()); 86 67 validDValues.Add((IntValue)new IntValue(3).AsReadOnly()); 87 68 validDValues.Add((IntValue)new IntValue(5).AsReadOnly()); 88 dParameter = new ConstrainedValueParameter<IntValue>("D", "The d parameter (allowed values: 1, 3, or 5) of the isometric Matern covariance function.", validDValues, validDValues.First()); 89 d = dParameter.Value.Value; 90 91 Parameters.Add(inverseLengthParameter); 92 Parameters.Add(scaleParameter); 93 Parameters.Add(dParameter); 94 95 RegisterEvents(); 96 } 97 98 [StorableHook(HookType.AfterDeserialization)] 99 private void AfterDeserialization() { 100 RegisterEvents(); 69 Parameters.Add(new ConstrainedValueParameter<IntValue>("D", "The d parameter (allowed values: 1, 3, or 5) of the isometric Matern covariance function.", validDValues, validDValues.First())); 101 70 } 102 71 … … 105 74 } 106 75 107 // caching 108 private void RegisterEvents() { 109 Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; }); 110 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; }); 111 Util.AttachValueChangeHandler<IntValue, int>(dParameter, () => { d = dParameter.Value.Value; }); 76 public int GetNumberOfParameters(int numberOfVariables) { 77 return 78 (InverseLengthParameter.Value != null ? 0 : 1) + 79 (ScaleParameter.Value != null ? 0 : 1); 112 80 } 113 81 114 public int GetNumberOfParameters(int numberOfVariables) { 115 return 116 (inverseLengthParameter.Fixed ? 0 : 1) + 117 (scaleParameter.Fixed ? 0 : 1); 82 public void SetParameter(double[] p) { 83 double inverseLength, scale; 84 GetParameterValues(p, out scale, out inverseLength); 85 InverseLengthParameter.Value = new DoubleValue(inverseLength); 86 ScaleParameter.Value = new DoubleValue(scale); 118 87 } 119 88 120 public void SetParameter(double[] hyp) { 121 int i = 0; 122 if (!inverseLengthParameter.Fixed) { 123 inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i]))); 124 i++; 89 private void GetParameterValues(double[] p, out double scale, out double inverseLength) { 90 // gather parameter values 91 int c = 0; 92 if (InverseLengthParameter.Value != null) { 93 inverseLength = InverseLengthParameter.Value.Value; 94 } else { 95 inverseLength = 1.0 / Math.Exp(p[c]); 96 c++; 125 97 } 126 if (!scaleParameter.Fixed) { 127 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 128 i++; 98 99 if (ScaleParameter.Value != null) { 100 scale = ScaleParameter.Value.Value; 101 } else { 102 scale = Math.Exp(2 * p[c]); 103 c++; 129 104 } 130 if ( hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceMaternIso", "hyp");105 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceMaternIso", "p"); 131 106 } 132 107 108 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 109 double inverseLength, scale; 110 int d = DParameter.Value.Value; 111 GetParameterValues(p, out scale, out inverseLength); 112 // create functions 113 var cov = new ParameterizedCovarianceFunction(); 114 cov.Covariance = (x, i, j) => { 115 double dist = i == j 116 ? 0.0 117 : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices)); 118 return scale * m(d, dist); 119 }; 120 cov.CrossCovariance = (x, xt, i, j) => { 121 double dist = Math.Sqrt(Util.SqrDist(x, i, xt, j, Math.Sqrt(d) * inverseLength, columnIndices)); 122 return scale * m(d, dist); 123 }; 124 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, d, scale, inverseLength, columnIndices); 125 return cov; 126 } 133 127 134 private double m(double t) {128 private static double m(int d, double t) { 135 129 double f; 136 130 switch (d) { … … 143 137 } 144 138 145 private double dm(double t) {139 private static double dm(int d, double t) { 146 140 double df; 147 141 switch (d) { … … 154 148 } 155 149 156 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {157 double dist = i == j158 ? 0.0159 : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices));160 return sf2 * m(dist);161 }162 150 163 p ublic IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {151 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, int d, double scale, double inverseLength, IEnumerable<int> columnIndices) { 164 152 double dist = i == j 165 153 ? 0.0 166 154 : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices)); 167 155 168 yield return sf2 * dm(dist); 169 yield return 2 * sf2 * m(dist); 170 } 171 172 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 173 double dist = Math.Sqrt(Util.SqrDist(x, i, xt, j, Math.Sqrt(d) * inverseLength, columnIndices)); 174 return sf2 * m(dist); 156 yield return scale * dm(d, dist); 157 yield return 2 * scale * m(d, dist); 175 158 } 176 159 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceNoise.cs
r8929 r9078 22 22 using System; 23 23 using System.Collections.Generic; 24 using System.Linq; 24 25 using HeuristicLab.Common; 25 26 using HeuristicLab.Core; 26 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 27 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 30 … … 32 34 Description = "Noise covariance function for Gaussian processes.")] 33 35 public sealed class CovarianceNoise : ParameterizedNamedItem, ICovarianceFunction { 34 35 36 [Storable]37 private double sf2;38 [Storable]39 private readonly HyperParameter<DoubleValue> scaleParameter;40 36 public IValueParameter<DoubleValue> ScaleParameter { 41 get { return scaleParameter; }37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 42 38 } 43 39 … … 49 45 private CovarianceNoise(CovarianceNoise original, Cloner cloner) 50 46 : base(original, cloner) { 51 this.scaleParameter = cloner.Clone(original.scaleParameter);52 this.sf2 = original.sf2;53 RegisterEvents();54 47 } 55 48 … … 59 52 Description = ItemDescription; 60 53 61 this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of noise."); 62 Parameters.Add(this.scaleParameter); 63 64 RegisterEvents(); 54 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale of noise.")); 65 55 } 66 56 … … 69 59 } 70 60 71 [StorableHook(HookType.AfterDeserialization)] 72 private void AfterDeserialization() { 73 RegisterEvents(); 61 public int GetNumberOfParameters(int numberOfVariables) { 62 return ScaleParameter.Value != null ? 0 : 1; 74 63 } 75 64 76 private void RegisterEvents() { 77 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; }); 65 public void SetParameter(double[] p) { 66 double scale; 67 GetParameterValues(p, out scale); 68 ScaleParameter.Value = new DoubleValue(scale); 78 69 } 79 70 80 public int GetNumberOfParameters(int numberOfVariables) { 81 return scaleParameter.Fixed ? 0 : 1; 71 private void GetParameterValues(double[] p, out double scale) { 72 int c = 0; 73 // gather parameter values 74 if (ScaleParameter.Value != null) { 75 scale = ScaleParameter.Value.Value; 76 } else { 77 scale = Math.Exp(2 * p[c]); 78 c++; 79 } 80 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceNoise", "p"); 82 81 } 83 82 84 public void SetParameter(double[] hyp) { 85 if (!scaleParameter.Fixed) { 86 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0]))); 87 } else { 88 if (hyp.Length > 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceNoise", "hyp"); 89 } 90 } 91 92 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 93 return i == j ? sf2 : 0.0; 94 } 95 96 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 97 yield return i == j ? 2 * sf2 : 0.0; 98 } 99 100 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 101 return 0.0; 83 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 84 double scale; 85 GetParameterValues(p, out scale); 86 // create functions 87 var cov = new ParameterizedCovarianceFunction(); 88 cov.Covariance = (x, i, j) => i == j ? scale : 0.0; 89 cov.CrossCovariance = (x, xt, i, j) => 0.0; 90 cov.CovarianceGradient = (x, i, j) => Enumerable.Repeat(i == j ? 2.0 * scale : 0.0, 1); 91 return cov; 102 92 } 103 93 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovariancePeriodic.cs
r8929 r9078 26 26 using HeuristicLab.Core; 27 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 28 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 29 30 … … 33 34 public sealed class CovariancePeriodic : ParameterizedNamedItem, ICovarianceFunction { 34 35 35 [Storable]36 private double scale;37 [Storable]38 private readonly HyperParameter<DoubleValue> scaleParameter;39 36 public IValueParameter<DoubleValue> ScaleParameter { 40 get { return scaleParameter; }37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 41 38 } 42 39 43 [Storable]44 private double inverseLength;45 [Storable]46 private readonly HyperParameter<DoubleValue> inverseLengthParameter;47 40 public IValueParameter<DoubleValue> InverseLengthParameter { 48 get { return inverseLengthParameter; }41 get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; } 49 42 } 50 43 51 [Storable]52 private double period;53 [Storable]54 private readonly HyperParameter<DoubleValue> periodParameter;55 44 public IValueParameter<DoubleValue> PeriodParameter { 56 get { return periodParameter; }45 get { return (IValueParameter<DoubleValue>)Parameters["Period"]; } 57 46 } 58 47 … … 62 51 private CovariancePeriodic(CovariancePeriodic original, Cloner cloner) 63 52 : base(original, cloner) { 64 this.scaleParameter = cloner.Clone(original.scaleParameter);65 this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);66 this.periodParameter = cloner.Clone(original.periodParameter);67 this.scale = original.scale;68 this.inverseLength = original.inverseLength;69 this.period = original.period;70 71 RegisterEvents();72 53 } 73 54 … … 77 58 Description = ItemDescription; 78 59 79 scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the periodic covariance function."); 80 inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter for the periodic covariance function."); 81 periodParameter = new HyperParameter<DoubleValue>("Period", "The period parameter for the periodic covariance function."); 82 Parameters.Add(scaleParameter); 83 Parameters.Add(inverseLengthParameter); 84 Parameters.Add(periodParameter); 85 86 RegisterEvents(); 87 } 88 89 [StorableHook(HookType.AfterDeserialization)] 90 private void AfterDeserialization() { 91 RegisterEvents(); 60 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale of the periodic covariance function.")); 61 Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter for the periodic covariance function.")); 62 Parameters.Add(new OptionalValueParameter<DoubleValue>("Period", "The period parameter for the periodic covariance function.")); 92 63 } 93 64 … … 96 67 } 97 68 98 // caching 99 private void RegisterEvents() { 100 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; }); 101 Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; }); 102 Util.AttachValueChangeHandler<DoubleValue, double>(periodParameter, () => { period = periodParameter.Value.Value; }); 69 public int GetNumberOfParameters(int numberOfVariables) { 70 return (ScaleParameter.Value != null ? 0 : 1) + 71 (PeriodParameter.Value != null ? 0 : 1) + 72 (InverseLengthParameter.Value != null ? 0 : 1); 103 73 } 104 74 105 public int GetNumberOfParameters(int numberOfVariables) { 106 return 107 (new[] { scaleParameter, inverseLengthParameter, periodParameter }).Count(p => !p.Fixed); 75 public void SetParameter(double[] p) { 76 double scale, inverseLength, period; 77 GetParameterValues(p, out scale, out period, out inverseLength); 78 ScaleParameter.Value = new DoubleValue(scale); 79 PeriodParameter.Value = new DoubleValue(period); 80 InverseLengthParameter.Value = new DoubleValue(inverseLength); 108 81 } 109 82 110 public void SetParameter(double[] hyp) { 111 int i = 0; 112 if (!inverseLengthParameter.Fixed) { 113 inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i]))); 114 i++; 83 84 private void GetParameterValues(double[] p, out double scale, out double period, out double inverseLength) { 85 // gather parameter values 86 int c = 0; 87 if (InverseLengthParameter.Value != null) { 88 inverseLength = InverseLengthParameter.Value.Value; 89 } else { 90 inverseLength = 1.0 / Math.Exp(p[c]); 91 c++; 115 92 } 116 if (!periodParameter.Fixed) { 117 periodParameter.SetValue(new DoubleValue(Math.Exp(hyp[i]))); 118 i++; 93 if (PeriodParameter.Value != null) { 94 period = PeriodParameter.Value.Value; 95 } else { 96 period = Math.Exp(p[c]); 97 c++; 119 98 } 120 if (!scaleParameter.Fixed) { 121 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 122 i++; 99 if (ScaleParameter.Value != null) { 100 scale = ScaleParameter.Value.Value; 101 } else { 102 scale = Math.Exp(2 * p[c]); 103 c++; 123 104 } 124 if ( hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovariancePeriod", "hyp");105 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovariancePeriodic", "p"); 125 106 } 126 107 127 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 128 double k = i == j ? 0.0 : GetDistance(x, x, i, j, columnIndices); 129 k = Math.PI * k / period; 130 k = Math.Sin(k) * inverseLength; 131 k = k * k; 108 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 109 double inverseLength, period, scale; 110 GetParameterValues(p, out scale, out period, out inverseLength); 111 // create functions 112 var cov = new ParameterizedCovarianceFunction(); 113 cov.Covariance = (x, i, j) => { 114 double k = i == j ? 0.0 : GetDistance(x, x, i, j, columnIndices); 115 k = Math.PI * k / period; 116 k = Math.Sin(k) * inverseLength; 117 k = k * k; 132 118 133 return scale * Math.Exp(-2.0 * k); 119 return scale * Math.Exp(-2.0 * k); 120 }; 121 cov.CrossCovariance = (x, xt, i, j) => { 122 double k = GetDistance(x, xt, i, j, columnIndices); 123 k = Math.PI * k / period; 124 k = Math.Sin(k) * inverseLength; 125 k = k * k; 126 127 return scale * Math.Exp(-2.0 * k); 128 }; 129 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, period, inverseLength); 130 return cov; 134 131 } 135 132 136 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 133 134 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double period, double inverseLength) { 137 135 double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period; 138 136 double gradient = Math.Sin(v) * inverseLength; … … 144 142 } 145 143 146 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 147 double k = GetDistance(x, xt, i, j, columnIndices); 148 k = Math.PI * k / period; 149 k = Math.Sin(k) * inverseLength; 150 k = k * k; 151 152 return scale * Math.Exp(-2.0 * k); 153 } 154 155 private double GetDistance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 144 private static double GetDistance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 156 145 return Math.Sqrt(Util.SqrDist(x, i, xt, j, 1, columnIndices)); 157 146 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceProduct.cs
r8929 r9078 23 23 using System.Collections.Generic; 24 24 using System.Linq; 25 using System.Linq.Expressions; 25 26 using HeuristicLab.Common; 26 27 using HeuristicLab.Core; … … 66 67 } 67 68 68 public void SetParameter(double[] hyp) { 69 if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function."); 69 public void SetParameter(double[] p) { 70 70 int offset = 0; 71 foreach (var tin factors) {72 var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);73 t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());71 foreach (var f in factors) { 72 var numberOfParameters = f.GetNumberOfParameters(numberOfVariables); 73 f.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray()); 74 74 offset += numberOfParameters; 75 75 } 76 76 } 77 77 78 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 79 return factors.Select(f => f.GetCovariance(x, i, j, columnIndices)).Aggregate((a, b) => a * b); 78 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 79 if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function."); 80 var functions = new List<ParameterizedCovarianceFunction>(); 81 foreach (var f in factors) { 82 int numberOfParameters = f.GetNumberOfParameters(numberOfVariables); 83 functions.Add(f.GetParameterizedCovarianceFunction(p.Take(numberOfParameters).ToArray(), columnIndices)); 84 p = p.Skip(numberOfParameters).ToArray(); 85 } 86 87 88 var product = new ParameterizedCovarianceFunction(); 89 product.Covariance = (x, i, j) => functions.Select(e => e.Covariance(x, i, j)).Aggregate((a, b) => a * b); 90 product.CrossCovariance = (x, xt, i, j) => functions.Select(e => e.CrossCovariance(x, xt, i, j)).Aggregate((a, b) => a * b); 91 product.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, functions); 92 return product; 80 93 } 81 94 82 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {83 var covariances = factor s.Select(f => f.GetCovariance(x, i, j, columnIndices)).ToArray();84 for (int ii = 0; ii < factor s.Count; ii++) {85 foreach (var g in factor s[ii].GetGradient(x, i, j, columnIndices)) {95 public static IEnumerable<double> GetGradient(double[,] x, int i, int j, List<ParameterizedCovarianceFunction> factorFunctions) { 96 var covariances = factorFunctions.Select(f => f.Covariance(x, i, j)).ToArray(); 97 for (int ii = 0; ii < factorFunctions.Count; ii++) { 98 foreach (var g in factorFunctions[ii].CovarianceGradient(x, i, j)) { 86 99 double res = g; 87 100 for (int jj = 0; jj < covariances.Length; jj++) … … 91 104 } 92 105 } 93 94 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {95 return factors.Select(f => f.GetCrossCovariance(x, xt, i, j, columnIndices)).Aggregate((a, b) => a * b);96 }97 106 } 98 107 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticArd.cs
r8933 r9078 26 26 using HeuristicLab.Core; 27 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 28 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 29 30 … … 33 34 Description = "Rational quadratic covariance function with automatic relevance determination for Gaussian processes.")] 34 35 public sealed class CovarianceRationalQuadraticArd : ParameterizedNamedItem, ICovarianceFunction { 35 [Storable]36 private double sf2;37 [Storable]38 private readonly HyperParameter<DoubleValue> scaleParameter;39 36 public IValueParameter<DoubleValue> ScaleParameter { 40 get { return scaleParameter; }37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 41 38 } 42 39 43 [Storable]44 private double[] inverseLength;45 [Storable]46 private readonly HyperParameter<DoubleArray> inverseLengthParameter;47 40 public IValueParameter<DoubleArray> InverseLengthParameter { 48 get { return inverseLengthParameter; }41 get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; } 49 42 } 50 43 51 [Storable]52 private double shape;53 [Storable]54 private readonly HyperParameter<DoubleValue> shapeParameter;55 44 public IValueParameter<DoubleValue> ShapeParameter { 56 get { return shapeParameter; }45 get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; } 57 46 } 58 47 … … 64 53 private CovarianceRationalQuadraticArd(CovarianceRationalQuadraticArd original, Cloner cloner) 65 54 : base(original, cloner) { 66 this.scaleParameter = cloner.Clone(original.scaleParameter);67 this.sf2 = original.sf2;68 69 this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);70 if (original.inverseLength != null) {71 this.inverseLength = new double[original.inverseLength.Length];72 Array.Copy(original.inverseLength, inverseLength, inverseLength.Length);73 }74 75 this.shapeParameter = cloner.Clone(original.shapeParameter);76 this.shape = original.shape;77 78 RegisterEvents();79 55 } 80 56 … … 84 60 Description = ItemDescription; 85 61 86 this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the rational quadratic covariance function with ARD."); 87 this.inverseLengthParameter = new HyperParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination."); 88 this.shapeParameter = new HyperParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the rational quadratic covariance function with ARD."); 89 90 Parameters.Add(scaleParameter); 91 Parameters.Add(inverseLengthParameter); 92 Parameters.Add(shapeParameter); 93 94 RegisterEvents(); 62 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the rational quadratic covariance function with ARD.")); 63 Parameters.Add(new OptionalValueParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination.")); 64 Parameters.Add(new OptionalValueParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the rational quadratic covariance function with ARD.")); 95 65 } 96 66 … … 99 69 } 100 70 101 [StorableHook(HookType.AfterDeserialization)] 102 private void AfterDeserialization() { 103 RegisterEvents(); 71 public int GetNumberOfParameters(int numberOfVariables) { 72 return 73 (ScaleParameter.Value != null ? 0 : 1) + 74 (ShapeParameter.Value != null ? 0 : 1) + 75 (InverseLengthParameter.Value != null ? 0 : numberOfVariables); 104 76 } 105 77 106 private void RegisterEvents() { 107 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; }); 108 Util.AttachValueChangeHandler<DoubleValue, double>(shapeParameter, () => { shape = shapeParameter.Value.Value; }); 109 Util.AttachArrayChangeHandler<DoubleArray, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.ToArray(); }); 78 public void SetParameter(double[] p) { 79 double scale, shape; 80 double[] inverseLength; 81 GetParameterValues(p, out scale, out shape, out inverseLength); 82 ScaleParameter.Value = new DoubleValue(scale); 83 ShapeParameter.Value = new DoubleValue(shape); 84 InverseLengthParameter.Value = new DoubleArray(inverseLength); 110 85 } 111 86 112 public int GetNumberOfParameters(int numberOfVariables) { 113 return 114 (scaleParameter.Fixed ? 0 : 1) + 115 (shapeParameter.Fixed ? 0 : 1) + 116 (inverseLengthParameter.Fixed ? 0 : numberOfVariables); 87 private void GetParameterValues(double[] p, out double scale, out double shape, out double[] inverseLength) { 88 int c = 0; 89 // gather parameter values 90 if (ScaleParameter.Value != null) { 91 scale = ScaleParameter.Value.Value; 92 } else { 93 scale = Math.Exp(2 * p[c]); 94 c++; 95 } 96 if (ShapeParameter.Value != null) { 97 shape = ShapeParameter.Value.Value; 98 } else { 99 shape = Math.Exp(p[c]); 100 c++; 101 } 102 if (InverseLengthParameter.Value != null) { 103 inverseLength = InverseLengthParameter.Value.ToArray(); 104 } else { 105 inverseLength = p.Skip(2).Select(e => 1.0 / Math.Exp(e)).ToArray(); 106 c += inverseLength.Length; 107 } 108 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticArd", "p"); 117 109 } 118 110 119 public void SetParameter(double[] hyp) { 120 int i = 0; 121 if (!scaleParameter.Fixed) { 122 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 123 i++; 124 } 125 if (!shapeParameter.Fixed) { 126 shapeParameter.SetValue(new DoubleValue(Math.Exp(hyp[i]))); 127 i++; 128 } 129 if (!inverseLengthParameter.Fixed) { 130 inverseLengthParameter.SetValue(new DoubleArray(hyp.Skip(i).Select(e => 1.0 / Math.Exp(e)).ToArray())); 131 i += hyp.Skip(i).Count(); 132 } 133 if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticArd", "hyp"); 111 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 112 double scale, shape; 113 double[] inverseLength; 114 GetParameterValues(p, out scale, out shape, out inverseLength); 115 // create functions 116 var cov = new ParameterizedCovarianceFunction(); 117 cov.Covariance = (x, i, j) => { 118 double d = i == j 119 ? 0.0 120 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 121 return scale * Math.Pow(1 + 0.5 * d / shape, -shape); 122 }; 123 cov.CrossCovariance = (x, xt, i, j) => { 124 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices); 125 return scale * Math.Pow(1 + 0.5 * d / shape, -shape); 126 }; 127 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength); 128 return cov; 134 129 } 135 130 136 137 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 138 double d = i == j 139 ? 0.0 140 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 141 return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape); 142 } 143 144 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 131 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double[] inverseLength) { 145 132 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1)); 146 133 double d = i == j … … 150 137 int k = 0; 151 138 foreach (var columnIndex in columnIndices) { 152 yield return s f2* Math.Pow(b, -shape - 1) * Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);139 yield return scale * Math.Pow(b, -shape - 1) * Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]); 153 140 k++; 154 141 } 155 yield return 2 * sf2 * Math.Pow(b, -shape); 156 yield return sf2 * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b)); 157 } 158 159 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 160 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices); 161 return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape); 142 yield return 2 * scale * Math.Pow(b, -shape); 143 yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b)); 162 144 } 163 145 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticIso.cs
r8929 r9078 25 25 using HeuristicLab.Core; 26 26 using HeuristicLab.Data; 27 using HeuristicLab.Parameters; 27 28 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 29 … … 32 33 Description = "Isotropic rational quadratic covariance function for Gaussian processes.")] 33 34 public sealed class CovarianceRationalQuadraticIso : ParameterizedNamedItem, ICovarianceFunction { 34 [Storable] 35 private double sf2; 36 [Storable] 37 private readonly HyperParameter<DoubleValue> scaleParameter; 38 public IValueParameter<DoubleValue> ScaleParameter { get { return scaleParameter; } } 35 public IValueParameter<DoubleValue> ScaleParameter { 36 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 37 } 39 38 40 [Storable] 41 private double inverseLength; 42 [Storable] 43 private readonly HyperParameter<DoubleValue> inverseLengthParameter; 44 public IValueParameter<DoubleValue> InverseLengthParameter { get { return inverseLengthParameter; } } 39 public IValueParameter<DoubleValue> InverseLengthParameter { 40 get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; } 41 } 45 42 46 [Storable] 47 private double shape; 48 [Storable] 49 private readonly HyperParameter<DoubleValue> shapeParameter; 50 public IValueParameter<DoubleValue> ShapeParameter { get { return shapeParameter; } } 51 43 public IValueParameter<DoubleValue> ShapeParameter { 44 get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; } 45 } 52 46 [StorableConstructor] 53 47 private CovarianceRationalQuadraticIso(bool deserializing) … … 57 51 private CovarianceRationalQuadraticIso(CovarianceRationalQuadraticIso original, Cloner cloner) 58 52 : base(original, cloner) { 59 this.sf2 = original.sf2;60 this.scaleParameter = cloner.Clone(original.scaleParameter);61 62 this.inverseLength = original.inverseLength;63 this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);64 65 this.shape = original.shape;66 this.shapeParameter = cloner.Clone(original.shapeParameter);67 68 RegisterEvents();69 53 } 70 54 … … 74 58 Description = ItemDescription; 75 59 76 this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the isometric rational quadratic covariance function."); 77 this.inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric rational quadratic covariance function."); 78 this.shapeParameter = new HyperParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the isometric rational quadratic covariance function."); 79 80 Parameters.Add(scaleParameter); 81 Parameters.Add(inverseLengthParameter); 82 Parameters.Add(shapeParameter); 83 84 RegisterEvents(); 60 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric rational quadratic covariance function.")); 61 Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric rational quadratic covariance function.")); 62 Parameters.Add(new OptionalValueParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the isometric rational quadratic covariance function.")); 85 63 } 86 64 … … 89 67 } 90 68 91 [StorableHook(HookType.AfterDeserialization)] 92 private void AfterDeserialization() { 93 RegisterEvents(); 69 public int GetNumberOfParameters(int numberOfVariables) { 70 return (ScaleParameter.Value != null ? 0 : 1) + 71 (ShapeParameter.Value != null ? 0 : 1) + 72 (InverseLengthParameter.Value != null ? 0 : 1); 94 73 } 95 74 96 private void RegisterEvents() { 97 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; }); 98 Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; }); 99 Util.AttachValueChangeHandler<DoubleValue, double>(shapeParameter, () => { shape = shapeParameter.Value.Value; }); 75 public void SetParameter(double[] p) { 76 double scale, shape, inverseLength; 77 GetParameterValues(p, out scale, out shape, out inverseLength); 78 ScaleParameter.Value = new DoubleValue(scale); 79 ShapeParameter.Value = new DoubleValue(shape); 80 InverseLengthParameter.Value = new DoubleValue(inverseLength); 100 81 } 101 82 102 public int GetNumberOfParameters(int numberOfVariables) { 103 return 104 (scaleParameter.Fixed ? 0 : 1) + 105 (inverseLengthParameter.Fixed ? 0 : 1) + 106 (shapeParameter.Fixed ? 0 : 1); 83 private void GetParameterValues(double[] p, out double scale, out double shape, out double inverseLength) { 84 int c = 0; 85 // gather parameter values 86 if (ScaleParameter.Value != null) { 87 scale = ScaleParameter.Value.Value; 88 } else { 89 scale = Math.Exp(2 * p[c]); 90 c++; 91 } 92 if (ShapeParameter.Value != null) { 93 shape = ShapeParameter.Value.Value; 94 } else { 95 shape = Math.Exp(p[c]); 96 c++; 97 } 98 if (InverseLengthParameter.Value != null) { 99 inverseLength = InverseLengthParameter.Value.Value; 100 } else { 101 inverseLength = 1.0 / Math.Exp(p[c]); 102 c++; 103 } 104 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticIso", "p"); 107 105 } 108 106 109 public void SetParameter(double[] hyp) { 110 int i = 0; 111 if (!scaleParameter.Fixed) { 112 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 113 i++; 114 } 115 if (!shapeParameter.Fixed) { 116 shapeParameter.SetValue(new DoubleValue(Math.Exp(hyp[i]))); 117 i++; 118 } 119 if (!inverseLengthParameter.Fixed) { 120 inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i]))); 121 i++; 122 } 123 if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticIso", "hyp"); 107 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 108 double scale, shape, inverseLength; 109 GetParameterValues(p, out scale, out shape, out inverseLength); 110 // create functions 111 var cov = new ParameterizedCovarianceFunction(); 112 cov.Covariance = (x, i, j) => { 113 double d = i == j 114 ? 0.0 115 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 116 return shape * Math.Pow(1 + 0.5 * d / shape, -shape); 117 }; 118 cov.CrossCovariance = (x, xt, i, j) => { 119 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices); 120 return scale * Math.Pow(1 + 0.5 * d / shape, -shape); 121 }; 122 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength); 123 return cov; 124 124 } 125 125 126 127 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 128 double d = i == j 129 ? 0.0 130 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 131 return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape); 132 } 133 134 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 126 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double inverseLength) { 135 127 double d = i == j 136 128 ? 0.0 … … 138 130 139 131 double b = 1 + 0.5 * d / shape; 140 yield return sf2 * Math.Pow(b, -shape - 1) * d; 141 yield return 2 * sf2 * Math.Pow(b, -shape); 142 yield return sf2 * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b)); 143 } 144 145 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 146 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices); 147 return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape); 132 yield return scale * Math.Pow(b, -shape - 1) * d; 133 yield return 2 * scale * Math.Pow(b, -shape); 134 yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b)); 148 135 } 149 136 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceScale.cs
r8929 r9078 34 34 Description = "Scale covariance function for Gaussian processes.")] 35 35 public sealed class CovarianceScale : ParameterizedNamedItem, ICovarianceFunction { 36 [Storable]37 private double sf2;38 [Storable]39 private readonly HyperParameter<DoubleValue> scaleParameter;40 36 public IValueParameter<DoubleValue> ScaleParameter { 41 get { return scaleParameter; }37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 42 38 } 43 39 44 [Storable]45 private ICovarianceFunction cov;46 [Storable]47 private readonly ValueParameter<ICovarianceFunction> covParameter;48 40 public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter { 49 get { return covParameter; }41 get { return (IValueParameter<ICovarianceFunction>)Parameters["CovarianceFunction"]; } 50 42 } 51 43 … … 57 49 private CovarianceScale(CovarianceScale original, Cloner cloner) 58 50 : base(original, cloner) { 59 this.scaleParameter = cloner.Clone(original.scaleParameter);60 this.sf2 = original.sf2;61 62 this.covParameter = cloner.Clone(original.covParameter);63 this.cov = cloner.Clone(original.cov);64 RegisterEvents();65 51 } 66 52 … … 70 56 Description = ItemDescription; 71 57 72 this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter."); 73 this.covParameter = new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso()); 74 cov = covParameter.Value; 75 76 Parameters.Add(this.scaleParameter); 77 Parameters.Add(covParameter); 78 79 RegisterEvents(); 58 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter.")); 59 Parameters.Add(new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso())); 80 60 } 81 61 … … 84 64 } 85 65 86 [StorableHook(HookType.AfterDeserialization)] 87 private void AfterDeserialization() { 88 RegisterEvents(); 66 public int GetNumberOfParameters(int numberOfVariables) { 67 return (ScaleParameter.Value != null ? 0 : 1) + CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables); 89 68 } 90 69 91 private void RegisterEvents() { 92 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; }); 93 covParameter.ValueChanged += (sender, args) => { cov = covParameter.Value; }; 70 public void SetParameter(double[] p) { 71 double scale; 72 GetParameterValues(p, out scale); 73 ScaleParameter.Value = new DoubleValue(scale); 74 CovarianceFunctionParameter.Value.SetParameter(p.Skip(1).ToArray()); 94 75 } 95 76 96 public int GetNumberOfParameters(int numberOfVariables) { 97 return (scaleParameter.Fixed ? 0 : 1) + cov.GetNumberOfParameters(numberOfVariables); 77 private void GetParameterValues(double[] p, out double scale) { 78 // gather parameter values 79 if (ScaleParameter.Value != null) { 80 scale = ScaleParameter.Value.Value; 81 } else { 82 scale = Math.Exp(2 * p[0]); 83 } 98 84 } 99 85 100 public void SetParameter(double[] hyp) { 101 int i = 0; 102 if (!scaleParameter.Fixed) { 103 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 104 i++; 105 } 106 cov.SetParameter(hyp.Skip(i).ToArray()); 86 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 87 double scale; 88 GetParameterValues(p, out scale); 89 var subCov = CovarianceFunctionParameter.Value.GetParameterizedCovarianceFunction(p.Skip(1).ToArray(), columnIndices); 90 // create functions 91 var cov = new ParameterizedCovarianceFunction(); 92 cov.Covariance = (x, i, j) => scale * subCov.Covariance(x, i, j); 93 cov.CrossCovariance = (x, xt, i, j) => scale * subCov.CrossCovariance(x, xt, i, j); 94 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, subCov); 95 return cov; 107 96 } 108 97 109 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 110 return sf2 * cov.GetCovariance(x, i, j, columnIndices); 111 } 112 113 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 114 yield return 2 * sf2 * cov.GetCovariance(x, i, j, columnIndices); 115 foreach (var g in cov.GetGradient(x, i, j, columnIndices)) 116 yield return sf2 * g; 117 } 118 119 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 120 return sf2 * cov.GetCrossCovariance(x, xt, i, j, columnIndices); 98 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, ParameterizedCovarianceFunction cov) { 99 yield return 2 * scale * cov.Covariance(x, i, j); 100 foreach (var g in cov.CovarianceGradient(x, i, j)) 101 yield return scale * g; 121 102 } 122 103 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialArd.cs
r8933 r9078 26 26 using HeuristicLab.Core; 27 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 28 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 29 30 … … 32 33 [Item(Name = "CovarianceSquaredExponentialArd", Description = "Squared exponential covariance function with automatic relevance determination for Gaussian processes.")] 33 34 public sealed class CovarianceSquaredExponentialArd : ParameterizedNamedItem, ICovarianceFunction { 34 [Storable] 35 private double sf2; 36 [Storable] 37 private readonly HyperParameter<DoubleValue> scaleParameter; 38 public IValueParameter<DoubleValue> ScaleParameter { get { return scaleParameter; } } 35 public IValueParameter<DoubleValue> ScaleParameter { 36 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 37 } 39 38 40 [Storable] 41 private double[] inverseLength; 42 [Storable] 43 private readonly HyperParameter<DoubleArray> inverseLengthParameter; 44 public IValueParameter<DoubleArray> InverseLengthParameter { get { return inverseLengthParameter; } } 39 public IValueParameter<DoubleArray> InverseLengthParameter { 40 get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; } 41 } 45 42 46 43 [StorableConstructor] … … 48 45 private CovarianceSquaredExponentialArd(CovarianceSquaredExponentialArd original, Cloner cloner) 49 46 : base(original, cloner) { 50 this.sf2 = original.sf2;51 this.scaleParameter = cloner.Clone(original.scaleParameter);52 53 if (original.inverseLength != null) {54 this.inverseLength = new double[original.inverseLength.Length];55 Array.Copy(original.inverseLength, this.inverseLength, this.inverseLength.Length);56 }57 this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);58 59 RegisterEvents();60 47 } 61 48 public CovarianceSquaredExponentialArd() … … 64 51 Description = ItemDescription; 65 52 66 this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the squared exponential covariance function with ARD."); 67 this.inverseLengthParameter = new HyperParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination."); 68 69 Parameters.Add(scaleParameter); 70 Parameters.Add(inverseLengthParameter); 71 72 RegisterEvents(); 53 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the squared exponential covariance function with ARD.")); 54 Parameters.Add(new OptionalValueParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination.")); 73 55 } 74 56 … … 77 59 } 78 60 79 [StorableHook(HookType.AfterDeserialization)] 80 private void AfterDeserialization() { 81 RegisterEvents(); 61 public int GetNumberOfParameters(int numberOfVariables) { 62 return 63 (ScaleParameter.Value != null ? 0 : 1) + 64 (InverseLengthParameter.Value != null ? 0 : numberOfVariables); 82 65 } 83 66 84 p rivate void RegisterEvents() {85 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });86 Util.AttachArrayChangeHandler<DoubleArray, double>(inverseLengthParameter, () => {87 inverseLength =88 inverseLengthParameter.Value.ToArray();89 });67 public void SetParameter(double[] p) { 68 double scale; 69 double[] inverseLength; 70 GetParameterValues(p, out scale, out inverseLength); 71 ScaleParameter.Value = new DoubleValue(scale); 72 InverseLengthParameter.Value = new DoubleArray(inverseLength); 90 73 } 91 74 92 public int GetNumberOfParameters(int numberOfVariables) { 93 return 94 (scaleParameter.Fixed ? 0 : 1) + 95 (inverseLengthParameter.Fixed ? 0 : numberOfVariables); 75 private void GetParameterValues(double[] p, out double scale, out double[] inverseLength) { 76 int c = 0; 77 // gather parameter values 78 if (ScaleParameter.Value != null) { 79 scale = ScaleParameter.Value.Value; 80 } else { 81 scale = Math.Exp(2 * p[c]); 82 c++; 83 } 84 if (InverseLengthParameter.Value != null) { 85 inverseLength = InverseLengthParameter.Value.ToArray(); 86 } else { 87 inverseLength = p.Skip(1).Select(e => 1.0 / Math.Exp(e)).ToArray(); 88 c += inverseLength.Length; 89 } 90 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialArd", "p"); 91 } 92 93 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 94 double scale; 95 double[] inverseLength; 96 GetParameterValues(p, out scale, out inverseLength); 97 // create functions 98 var cov = new ParameterizedCovarianceFunction(); 99 cov.Covariance = (x, i, j) => { 100 double d = i == j 101 ? 0.0 102 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 103 return scale * Math.Exp(-d / 2.0); 104 }; 105 cov.CrossCovariance = (x, xt, i, j) => { 106 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices); 107 return scale * Math.Exp(-d / 2.0); 108 }; 109 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, inverseLength); 110 return cov; 96 111 } 97 112 98 113 99 public void SetParameter(double[] hyp) { 100 int i = 0; 101 if (!scaleParameter.Fixed) { 102 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 103 i++; 104 } 105 if (!inverseLengthParameter.Fixed) { 106 inverseLengthParameter.SetValue(new DoubleArray(hyp.Skip(i).Select(e => 1.0 / Math.Exp(e)).ToArray())); 107 i += hyp.Skip(i).Count(); 108 } 109 if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialArd", "hyp"); 110 } 111 112 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 113 double d = i == j 114 ? 0.0 115 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 116 return sf2 * Math.Exp(-d / 2.0); 117 } 118 119 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 114 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double[] inverseLength) { 120 115 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1)); 121 116 double d = i == j … … 125 120 foreach (var columnIndex in columnIndices) { 126 121 double sqrDist = Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]); 127 yield return s f2* Math.Exp(-d / 2.0) * sqrDist;122 yield return scale * Math.Exp(-d / 2.0) * sqrDist; 128 123 k++; 129 124 } 130 125 131 yield return 2.0 * sf2 * Math.Exp(-d / 2.0); 132 } 133 134 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 135 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices); 136 return sf2 * Math.Exp(-d / 2.0); 126 yield return 2.0 * scale * Math.Exp(-d / 2.0); 137 127 } 138 128 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialIso.cs
r8929 r9078 22 22 using System; 23 23 using System.Collections.Generic; 24 using System.Linq.Expressions; 24 25 using HeuristicLab.Common; 25 26 using HeuristicLab.Core; 26 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 27 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 30 … … 32 34 Description = "Isotropic squared exponential covariance function for Gaussian processes.")] 33 35 public sealed class CovarianceSquaredExponentialIso : ParameterizedNamedItem, ICovarianceFunction { 34 [Storable] 35 private double sf2; 36 [Storable] 37 private readonly HyperParameter<DoubleValue> scaleParameter; 38 public IValueParameter<DoubleValue> ScaleParameter { get { return scaleParameter; } } 36 public IValueParameter<DoubleValue> ScaleParameter { 37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 38 } 39 39 40 [Storable] 41 private double inverseLength; 42 [Storable] 43 private readonly HyperParameter<DoubleValue> inverseLengthParameter; 44 public IValueParameter<DoubleValue> InverseLengthParameter { get { return inverseLengthParameter; } } 40 public IValueParameter<DoubleValue> InverseLengthParameter { 41 get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; } 42 } 45 43 46 44 [StorableConstructor] … … 51 49 private CovarianceSquaredExponentialIso(CovarianceSquaredExponentialIso original, Cloner cloner) 52 50 : base(original, cloner) { 53 this.sf2 = original.sf2;54 this.scaleParameter = cloner.Clone(original.scaleParameter);55 56 this.inverseLength = original.inverseLength;57 this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);58 59 RegisterEvents();60 51 } 61 52 … … 65 56 Description = ItemDescription; 66 57 67 this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the isometric squared exponential covariance function."); 68 this.inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric squared exponential covariance function."); 69 70 Parameters.Add(scaleParameter); 71 Parameters.Add(inverseLengthParameter); 72 73 RegisterEvents(); 58 Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric squared exponential covariance function.")); 59 Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric squared exponential covariance function.")); 74 60 } 75 61 … … 78 64 } 79 65 80 [StorableHook(HookType.AfterDeserialization)] 81 private void AfterDeserialization() { 82 RegisterEvents(); 66 public int GetNumberOfParameters(int numberOfVariables) { 67 return 68 (ScaleParameter.Value != null ? 0 : 1) + 69 (InverseLengthParameter.Value != null ? 0 : 1); 83 70 } 84 71 85 private void RegisterEvents() { 86 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; }); 87 Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; }); 88 } 89 90 public int GetNumberOfParameters(int numberOfVariables) { 91 return 92 (scaleParameter.Fixed ? 0 : 1) + 93 (inverseLengthParameter.Fixed ? 0 : 1); 94 } 95 96 public void SetParameter(double[] hyp) { 97 int i = 0; 98 if (!inverseLengthParameter.Fixed) { 99 inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i]))); 100 i++; 101 } 102 if (!scaleParameter.Fixed) { 103 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 104 i++; 105 } 106 if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialIso", "hyp"); 72 public void SetParameter(double[] p) { 73 double scale, inverseLength; 74 GetParameterValues(p, out scale, out inverseLength); 75 ScaleParameter.Value = new DoubleValue(scale); 76 InverseLengthParameter.Value = new DoubleValue(inverseLength); 107 77 } 108 78 109 79 110 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 111 double d = i == j 112 ? 0.0 113 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 114 return sf2 * Math.Exp(-d / 2.0); 80 private void GetParameterValues(double[] p, out double scale, out double inverseLength) { 81 // gather parameter values 82 int c = 0; 83 if (InverseLengthParameter.Value != null) { 84 inverseLength = InverseLengthParameter.Value.Value; 85 } else { 86 inverseLength = 1.0 / Math.Exp(p[c]); 87 c++; 88 } 89 90 if (ScaleParameter.Value != null) { 91 scale = ScaleParameter.Value.Value; 92 } else { 93 scale = Math.Exp(2 * p[c]); 94 c++; 95 } 96 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialIso", "p"); 115 97 } 116 98 117 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 99 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 100 double inverseLength, scale; 101 GetParameterValues(p, out scale, out inverseLength); 102 // create functions 103 var cov = new ParameterizedCovarianceFunction(); 104 cov.Covariance = (x, i, j) => { 105 double d = i == j 106 ? 0.0 107 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 108 return scale * Math.Exp(-d / 2.0); 109 }; 110 cov.CrossCovariance = (x, xt, i, j) => { 111 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices); 112 return scale * Math.Exp(-d / 2.0); 113 }; 114 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, inverseLength, columnIndices); 115 return cov; 116 } 117 118 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double sf2, double inverseLength, IEnumerable<int> columnIndices) { 118 119 double d = i == j 119 120 ? 0.0 … … 123 124 yield return 2.0 * sf2 * g; 124 125 } 125 126 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {127 double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);128 return sf2 * Math.Exp(-d / 2.0);129 }130 126 } 131 127 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSum.cs
r8929 r9078 23 23 using System.Collections.Generic; 24 24 using System.Linq; 25 using System.Linq.Expressions; 25 26 using HeuristicLab.Common; 26 27 using HeuristicLab.Core; … … 66 67 } 67 68 68 public void SetParameter(double[] hyp) { 69 if (terms.Count == 0) throw new ArgumentException("At least one term is needed for sum covariance function."); 69 public void SetParameter(double[] p) { 70 70 int offset = 0; 71 71 foreach (var t in terms) { 72 72 var numberOfParameters = t.GetNumberOfParameters(numberOfVariables); 73 t.SetParameter( hyp.Skip(offset).Take(numberOfParameters).ToArray());73 t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray()); 74 74 offset += numberOfParameters; 75 75 } 76 76 } 77 77 78 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 79 return terms.Select(t => t.GetCovariance(x, i, j, columnIndices)).Sum(); 80 } 78 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 79 if (terms.Count == 0) throw new ArgumentException("at least one term is necessary for the product covariance function."); 80 var functions = new List<ParameterizedCovarianceFunction>(); 81 foreach (var t in terms) { 82 var numberOfParameters = t.GetNumberOfParameters(numberOfVariables); 83 functions.Add(t.GetParameterizedCovarianceFunction(p.Take(numberOfParameters).ToArray(), columnIndices)); 84 p = p.Skip(numberOfParameters).ToArray(); 85 } 81 86 82 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 83 return terms.Select(t => t.GetGradient(x, i, j, columnIndices)).Aggregate(Enumerable.Concat); 84 } 85 86 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 87 return terms.Select(t => t.GetCrossCovariance(x, xt, i, j, columnIndices)).Sum(); 87 var sum = new ParameterizedCovarianceFunction(); 88 sum.Covariance = (x, i, j) => functions.Select(e => e.Covariance(x, i, j)).Sum(); 89 sum.CrossCovariance = (x, xt, i, j) => functions.Select(e => e.CrossCovariance(x, xt, i, j)).Sum(); 90 sum.CovarianceGradient = (x, i, j) => functions.Select(e => e.CovarianceGradient(x, i, j)).Aggregate(Enumerable.Concat); 91 return sum; 88 92 } 89 93 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationSolutionCreator.cs
r8679 r9078 77 77 if (ModelParameter.ActualValue != null) { 78 78 var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone(); 79 m.FixParameters(); 79 80 var data = (IClassificationProblemData)ProblemDataParameter.ActualValue.Clone(); 80 81 var model = new DiscriminantFunctionClassificationModel(m, new NormalDistributionCutPointsThresholdCalculator()); -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8623 r9078 81 81 82 82 [Storable] 83 private double[] meanParameter; 84 [Storable] 85 private double[] covarianceParameter; 86 87 [Storable] 83 88 private double[,] l; 84 89 … … 99 104 this.targetVariable = original.targetVariable; 100 105 this.sqrSigmaNoise = original.sqrSigmaNoise; 106 if (original.meanParameter != null) { 107 this.meanParameter = (double[])original.meanParameter.Clone(); 108 } 109 if (original.covarianceParameter != null) { 110 this.covarianceParameter = (double[])original.covarianceParameter.Clone(); 111 } 101 112 102 113 // shallow copies of arrays because they cannot be modified … … 118 129 119 130 int nVariables = this.allowedInputVariables.Length; 120 this.meanFunction.SetParameter(hyp131 meanParameter = hyp 121 132 .Take(this.meanFunction.GetNumberOfParameters(nVariables)) 122 .ToArray()); 123 this.covarianceFunction.SetParameter(hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables)) 124 .Take(this.covarianceFunction.GetNumberOfParameters(nVariables)) 125 .ToArray()); 133 .ToArray(); 134 135 covarianceParameter = hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables)) 136 .Take(this.covarianceFunction.GetNumberOfParameters(nVariables)) 137 .ToArray(); 126 138 sqrSigmaNoise = Math.Exp(2.0 * hyp.Last()); 127 139 … … 138 150 139 151 // calculate means and covariances 140 double[] m = meanFunction.GetMean(x); 152 var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, x.GetLength(1))); 153 double[] m = Enumerable.Range(0, x.GetLength(0)) 154 .Select(r => mean.Mean(x, r)) 155 .ToArray(); 156 157 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1))); 141 158 for (int i = 0; i < n; i++) { 142 159 for (int j = i; j < n; j++) { 143 l[j, i] = cov arianceFunction.GetCovariance(x, i, j) / sqrSigmaNoise;160 l[j, i] = cov.Covariance(x, i, j) / sqrSigmaNoise; 144 161 if (j == i) l[j, i] += 1.0; 145 162 } 146 163 } 164 147 165 148 166 // cholesky decomposition … … 181 199 182 200 double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)]; 183 for (int i = 0; i < meanGradients.Length; i++) { 184 var meanGrad = meanFunction.GetGradients(i, x); 185 meanGradients[i] = -Util.ScalarProd(meanGrad, alpha); 201 for (int k = 0; k < meanGradients.Length; k++) { 202 var meanGrad = Enumerable.Range(0, alpha.Length) 203 .Select(r => mean.Gradient(x, r, k)); 204 meanGradients[k] = -Util.ScalarProd(meanGrad, alpha); 186 205 } 187 206 … … 190 209 for (int i = 0; i < n; i++) { 191 210 for (int j = 0; j < i; j++) { 192 var g = cov arianceFunction.GetGradient(x, i, j).ToArray();211 var g = cov.CovarianceGradient(x, i, j).ToArray(); 193 212 for (int k = 0; k < covGradients.Length; k++) { 194 213 covGradients[k] += lCopy[i, j] * g[k]; … … 196 215 } 197 216 198 var gDiag = cov arianceFunction.GetGradient(x, i, i).ToArray();217 var gDiag = cov.CovarianceGradient(x, i, i).ToArray(); 199 218 for (int k = 0; k < covGradients.Length; k++) { 200 219 // diag … … 216 235 } 217 236 237 // is called by the solution creator to set all parameter values of the covariance and mean function 238 // to the optimized values (necessary to make the values visible in the GUI) 239 public void FixParameters() { 240 covarianceFunction.SetParameter(covarianceParameter); 241 meanFunction.SetParameter(meanParameter); 242 covarianceParameter = new double[0]; 243 meanParameter = new double[0]; 244 } 245 218 246 #region IRegressionModel Members 219 247 public IEnumerable<double> GetEstimatedValues(Dataset dataset, IEnumerable<int> rows) { … … 234 262 int n = x.GetLength(0); 235 263 var Ks = new double[newN, n]; 236 var ms = meanFunction.GetMean(newX); 264 var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, newX.GetLength(1))); 265 var ms = Enumerable.Range(0, newX.GetLength(0)) 266 .Select(r => mean.Mean(newX, r)) 267 .ToArray(); 268 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1))); 237 269 for (int i = 0; i < newN; i++) { 238 270 for (int j = 0; j < n; j++) { 239 Ks[i, j] = cov arianceFunction.GetCrossCovariance(x, newX, j, i);271 Ks[i, j] = cov.CrossCovariance(x, newX, j, i); 240 272 } 241 273 } … … 252 284 var kss = new double[newN]; 253 285 double[,] sWKs = new double[n, newN]; 286 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1))); 254 287 255 288 // for stddev 256 289 for (int i = 0; i < newN; i++) 257 kss[i] = cov arianceFunction.GetCovariance(newX, i, i);290 kss[i] = cov.Covariance(newX, i, i); 258 291 259 292 for (int i = 0; i < newN; i++) { 260 293 for (int j = 0; j < n; j++) { 261 sWKs[j, i] = cov arianceFunction.GetCrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);294 sWKs[j, i] = cov.CrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise); 262 295 } 263 296 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs
r8494 r9078 77 77 if (ModelParameter.ActualValue != null) { 78 78 var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone(); 79 m.FixParameters(); 79 80 var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone(); 80 81 var s = new GaussianProcessRegressionSolution(m, data); -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs
r8678 r9078 20 20 #endregion 21 21 22 using System; 22 23 using System.Collections.Generic; 24 using System.Linq.Expressions; 23 25 using HeuristicLab.Core; 24 26 25 27 namespace HeuristicLab.Algorithms.DataAnalysis { 28 29 public delegate double CovarianceFunctionDelegate(double[,] x, int i, int j); 30 public delegate double CrossCovarianceFunctionDelegate(double[,] x, double[,] xt, int i, int j); 31 public delegate IEnumerable<double> CovarianceGradientFunctionDelegate(double[,] x, int i, int j); 32 33 public class ParameterizedCovarianceFunction { 34 public CovarianceFunctionDelegate Covariance { get; set; } 35 public CrossCovarianceFunctionDelegate CrossCovariance { get; set; } 36 public CovarianceGradientFunctionDelegate CovarianceGradient { get; set; } 37 } 38 26 39 public interface ICovarianceFunction : IItem { 27 40 int GetNumberOfParameters(int numberOfVariables); 28 void SetParameter(double[] hyp); 29 double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices = null); 30 IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices = null); 31 double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices = null); 41 void SetParameter(double[] p); 42 ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices); 32 43 } 33 44 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs
r8612 r9078 19 19 */ 20 20 #endregion 21 22 using System; 23 using System.Collections.Generic; 21 24 using HeuristicLab.Core; 22 25 23 26 namespace HeuristicLab.Algorithms.DataAnalysis { 27 public delegate double MeanFunctionDelegate(double[,] x, int row); 28 public delegate double MeanGradientDelegate(double[,] x, int row, int k); 29 30 public class ParameterizedMeanFunction { 31 public MeanFunctionDelegate Mean { get; set; } 32 public MeanGradientDelegate Gradient { get; set; } 33 } 34 24 35 public interface IMeanFunction : IItem { 25 36 int GetNumberOfParameters(int numberOfVariables); 26 void SetParameter(double[] hyp); 27 double[] GetMean(double[,] x); 28 double[] GetGradients(int k, double[,] x); 37 void SetParameter(double[] p); 38 ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices); 29 39 } 30 40 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanConst.cs
r8929 r9078 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using System.Linq; 24 25 using HeuristicLab.Common; 25 26 using HeuristicLab.Core; 26 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 27 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 30 … … 31 33 [Item(Name = "MeanConst", Description = "Constant mean function for Gaussian processes.")] 32 34 public sealed class MeanConst : ParameterizedNamedItem, IMeanFunction { 33 [Storable] 34 private double c; 35 [Storable] 36 private readonly HyperParameter<DoubleValue> valueParameter; 37 public IValueParameter<DoubleValue> ValueParameter { get { return valueParameter; } } 35 public IValueParameter<DoubleValue> ValueParameter { 36 get { return (IValueParameter<DoubleValue>)Parameters["Value"]; } 37 } 38 38 39 39 [StorableConstructor] … … 41 41 private MeanConst(MeanConst original, Cloner cloner) 42 42 : base(original, cloner) { 43 this.c = original.c;44 this.valueParameter = cloner.Clone(original.valueParameter);45 RegisterEvents();46 43 } 47 44 public MeanConst() … … 50 47 this.description = ItemDescription; 51 48 52 this.valueParameter = new HyperParameter<DoubleValue>("Value", "The constant value for the constant mean function."); 53 Parameters.Add(valueParameter); 54 RegisterEvents(); 49 Parameters.Add(new OptionalValueParameter<DoubleValue>("Value", "The constant value for the constant mean function.")); 55 50 } 56 51 … … 59 54 } 60 55 61 [StorableHook(HookType.AfterDeserialization)] 62 private void AfterDeserialization() { 63 RegisterEvents(); 56 public int GetNumberOfParameters(int numberOfVariables) { 57 return ValueParameter.Value != null ? 0 : 1; 64 58 } 65 59 66 private void RegisterEvents() { 67 Util.AttachValueChangeHandler<DoubleValue, double>(valueParameter, () => { c = valueParameter.Value.Value; }); 60 public void SetParameter(double[] p) { 61 double c; 62 GetParameters(p, out c); 63 ValueParameter.Value = new DoubleValue(c); 68 64 } 69 65 70 public int GetNumberOfParameters(int numberOfVariables) { 71 return valueParameter.Fixed ? 0 : 1; 66 private void GetParameters(double[] p, out double c) { 67 if (ValueParameter.Value == null) { 68 c = p[0]; 69 } else { 70 if (p.Length > 0) 71 throw new ArgumentException( 72 "The length of the parameter vector does not match the number of free parameters for the constant mean function.", 73 "p"); 74 c = ValueParameter.Value.Value; 75 } 72 76 } 73 77 74 public void SetParameter(double[] hyp) { 75 if (!valueParameter.Fixed) { 76 valueParameter.SetValue(new DoubleValue(hyp[0])); 77 } else if (hyp.Length > 0) 78 throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the constant mean function.", "hyp"); 79 } 80 81 public double[] GetMean(double[,] x) { 82 return Enumerable.Repeat(c, x.GetLength(0)).ToArray(); 83 } 84 85 public double[] GetGradients(int k, double[,] x) { 86 if (k > 0) throw new ArgumentException(); 87 return Enumerable.Repeat(1.0, x.GetLength(0)).ToArray(); 78 public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) { 79 double c; 80 GetParameters(p, out c); 81 var mf = new ParameterizedMeanFunction(); 82 mf.Mean = (x, i) => c; 83 mf.Gradient = (x, i, k) => { 84 if (k > 0) throw new ArgumentException(); 85 return 1.0; 86 }; 87 return mf; 88 88 } 89 89 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanLinear.cs
r8929 r9078 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using System.Linq; 24 25 using HeuristicLab.Common; 25 26 using HeuristicLab.Core; 26 27 using HeuristicLab.Data; 28 using HeuristicLab.Parameters; 27 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 30 … … 31 33 [Item(Name = "MeanLinear", Description = "Linear mean function for Gaussian processes.")] 32 34 public sealed class MeanLinear : ParameterizedNamedItem, IMeanFunction { 33 [Storable] 34 private double[] weights; 35 [Storable] 36 private readonly HyperParameter<DoubleArray> weightsParameter; 37 public IValueParameter<DoubleArray> WeightsParameter { get { return weightsParameter; } } 35 public IValueParameter<DoubleArray> WeightsParameter { 36 get { return (IValueParameter<DoubleArray>)Parameters["Weights"]; } 37 } 38 38 39 39 [StorableConstructor] … … 41 41 private MeanLinear(MeanLinear original, Cloner cloner) 42 42 : base(original, cloner) { 43 if (original.weights != null) {44 this.weights = new double[original.weights.Length];45 Array.Copy(original.weights, weights, original.weights.Length);46 }47 weightsParameter = cloner.Clone(original.weightsParameter);48 RegisterEvents();49 43 } 50 44 public MeanLinear() 51 45 : base() { 52 this.weightsParameter = new HyperParameter<DoubleArray>("Weights", "The weights parameter for the linear mean function."); 53 Parameters.Add(weightsParameter); 54 RegisterEvents(); 46 Parameters.Add(new OptionalValueParameter<DoubleArray>("Weights", "The weights parameter for the linear mean function.")); 55 47 } 56 48 … … 59 51 } 60 52 61 [StorableHook(HookType.AfterDeserialization)] 62 private void AfterDeserialization() { 63 RegisterEvents(); 53 public int GetNumberOfParameters(int numberOfVariables) { 54 return WeightsParameter.Value != null ? 0 : numberOfVariables; 64 55 } 65 56 66 p rivate void RegisterEvents() {67 Util.AttachArrayChangeHandler<DoubleArray, double>(weightsParameter, () => {68 weights = weightsParameter.Value.ToArray();69 });57 public void SetParameter(double[] p) { 58 double[] weights; 59 GetParameter(p, out weights); 60 WeightsParameter.Value = new DoubleArray(weights); 70 61 } 71 62 72 public int GetNumberOfParameters(int numberOfVariables) { 73 return weightsParameter.Fixed ? 0 : numberOfVariables; 63 public void GetParameter(double[] p, out double[] weights) { 64 if (WeightsParameter.Value == null) { 65 weights = p; 66 } else { 67 if (p.Length != 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the linear mean function.", "p"); 68 weights = WeightsParameter.Value.ToArray(); 69 } 74 70 } 75 71 76 public void SetParameter(double[] hyp) { 77 if (!weightsParameter.Fixed) { 78 weightsParameter.SetValue(new DoubleArray(hyp)); 79 } else if (hyp.Length != 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the linear mean function.", "hyp"); 80 } 81 82 public double[] GetMean(double[,] x) { 83 // sanity check 84 if (weights.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function."); 85 int cols = x.GetLength(1); 86 int n = x.GetLength(0); 87 return (from i in Enumerable.Range(0, n) 88 let rowVector = Enumerable.Range(0, cols).Select(j => x[i, j]) 89 select Util.ScalarProd(weights, rowVector)) 90 .ToArray(); 91 } 92 93 public double[] GetGradients(int k, double[,] x) { 94 int cols = x.GetLength(1); 95 int n = x.GetLength(0); 96 if (k > cols) throw new ArgumentException(); 97 return (Enumerable.Range(0, n).Select(r => x[r, k])).ToArray(); 72 public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) { 73 double[] weights; 74 int[] columns = columnIndices.ToArray(); 75 GetParameter(p, out weights); 76 var mf = new ParameterizedMeanFunction(); 77 mf.Mean = (x, i) => { 78 // sanity check 79 if (weights.Length != columns.Length) throw new ArgumentException("The number of rparameters must match the number of variables for the linear mean function."); 80 return Util.ScalarProd(weights, Util.GetRow(x, i, columns)); 81 }; 82 mf.Gradient = (x, i, k) => { 83 if (k > columns.Length) throw new ArgumentException(); 84 return x[i, columns[k]]; 85 }; 86 return mf; 98 87 } 99 88 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanProduct.cs
r8929 r9078 19 19 */ 20 20 #endregion 21 22 using System.Collections.Generic; 21 23 using System.Linq; 22 24 using HeuristicLab.Common; … … 61 63 } 62 64 63 public void SetParameter(double[] hyp) {65 public void SetParameter(double[] p) { 64 66 int offset = 0; 65 67 foreach (var t in factors) { 66 68 var numberOfParameters = t.GetNumberOfParameters(numberOfVariables); 67 t.SetParameter( hyp.Skip(offset).Take(numberOfParameters).ToArray());69 t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray()); 68 70 offset += numberOfParameters; 69 71 } 70 72 } 71 73 72 public double[] GetMean(double[,] x) {73 var res = factors.First().GetMean(x);74 foreach (var t in factors.Skip(1)) {75 var a = t.GetMean(x);76 for (int i = 0; i < res.Length; i++) res[i] *= a[i];77 }78 return res;79 }80 74 81 public double[] GetGradients(int k, double[,] x) { 82 double[] res = Enumerable.Repeat(1.0, x.GetLength(0)).ToArray(); 83 // find index of factor for the given k 84 int j = 0; 85 while (k >= factors[j].GetNumberOfParameters(numberOfVariables)) { 86 k -= factors[j].GetNumberOfParameters(numberOfVariables); 87 j++; 88 } 89 for (int i = 0; i < factors.Count; i++) { 90 var f = factors[i]; 91 if (i == j) { 92 // multiply gradient 93 var g = f.GetGradients(k, x); 94 for (int ii = 0; ii < res.Length; ii++) res[ii] *= g[ii]; 95 } else { 96 // multiply mean 97 var m = f.GetMean(x); 98 for (int ii = 0; ii < res.Length; ii++) res[ii] *= m[ii]; 75 public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) { 76 var factorMf = new List<ParameterizedMeanFunction>(); 77 int totalNumberOfParameters = GetNumberOfParameters(numberOfVariables); 78 int[] factorIndexMap = new int[totalNumberOfParameters]; // maps k-th hyperparameter to the correct mean-term 79 int[] hyperParameterIndexMap = new int[totalNumberOfParameters]; // maps k-th hyperparameter to the l-th hyperparameter of the correct mean-term 80 int c = 0; 81 // get the parameterized mean function for each term 82 for (int factorIndex = 0; factorIndex < factors.Count; factorIndex++) { 83 var numberOfParameters = factors[factorIndex].GetNumberOfParameters(numberOfVariables); 84 factorMf.Add(factors[factorIndex].GetParameterizedMeanFunction(p.Take(numberOfParameters).ToArray(), columnIndices)); 85 p = p.Skip(numberOfParameters).ToArray(); 86 87 for (int hyperParameterIndex = 0; hyperParameterIndex < numberOfParameters; hyperParameterIndex++) { 88 factorIndexMap[c] = factorIndex; 89 hyperParameterIndexMap[c] = hyperParameterIndex; 90 c++; 99 91 } 100 92 } 101 return res; 93 94 var mf = new ParameterizedMeanFunction(); 95 mf.Mean = (x, i) => factorMf.Select(t => t.Mean(x, i)).Aggregate((a, b) => a * b); 96 mf.Gradient = (x, i, k) => { 97 double result = 1.0; 98 int hyperParameterFactorIndex = factorIndexMap[k]; 99 for (int factorIndex = 0; factorIndex < factors.Count; factorIndex++) { 100 if (factorIndex == hyperParameterFactorIndex) { 101 // multiply gradient 102 result *= factorMf[factorIndex].Gradient(x, i, hyperParameterIndexMap[k]); 103 } else { 104 // multiply mean 105 result *= factorMf[factorIndex].Mean(x, i); 106 } 107 } 108 return result; 109 }; 110 return mf; 102 111 } 103 112 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanSum.cs
r8929 r9078 19 19 */ 20 20 #endregion 21 22 using System.Collections.Generic; 21 23 using System.Linq; 22 24 using HeuristicLab.Common; … … 57 59 } 58 60 59 public void SetParameter(double[] hyp) {61 public void SetParameter(double[] p) { 60 62 int offset = 0; 61 63 foreach (var t in terms) { 62 64 var numberOfParameters = t.GetNumberOfParameters(numberOfVariables); 63 t.SetParameter( hyp.Skip(offset).Take(numberOfParameters).ToArray());65 t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray()); 64 66 offset += numberOfParameters; 65 67 } 66 68 } 67 69 68 public double[] GetMean(double[,] x) { 69 var res = terms.First().GetMean(x); 70 foreach (var t in terms.Skip(1)) { 71 var a = t.GetMean(x); 72 for (int i = 0; i < res.Length; i++) res[i] += a[i]; 70 public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) { 71 var termMf = new List<ParameterizedMeanFunction>(); 72 int totalNumberOfParameters = GetNumberOfParameters(numberOfVariables); 73 int[] termIndexMap = new int[totalNumberOfParameters]; // maps k-th parameter to the correct mean-term 74 int[] hyperParameterIndexMap = new int[totalNumberOfParameters]; // maps k-th parameter to the l-th parameter of the correct mean-term 75 int c = 0; 76 // get the parameterized mean function for each term 77 for (int termIndex = 0; termIndex < terms.Count; termIndex++) { 78 var numberOfParameters = terms[termIndex].GetNumberOfParameters(numberOfVariables); 79 termMf.Add(terms[termIndex].GetParameterizedMeanFunction(p.Take(numberOfParameters).ToArray(), columnIndices)); 80 p = p.Skip(numberOfParameters).ToArray(); 81 82 for (int hyperParameterIndex = 0; hyperParameterIndex < numberOfParameters; hyperParameterIndex++) { 83 termIndexMap[c] = termIndex; 84 hyperParameterIndexMap[c] = hyperParameterIndex; 85 c++; 86 } 73 87 } 74 return res;75 }76 88 77 public double[] GetGradients(int k, double[,] x) { 78 int i = 0; 79 while (k >= terms[i].GetNumberOfParameters(numberOfVariables)) { 80 k -= terms[i].GetNumberOfParameters(numberOfVariables); 81 i++; 82 } 83 return terms[i].GetGradients(k, x); 89 var mf = new ParameterizedMeanFunction(); 90 mf.Mean = (x, i) => termMf.Select(t => t.Mean(x, i)).Sum(); 91 mf.Gradient = (x, i, k) => { 92 return termMf[termIndexMap[k]].Gradient(x, i, hyperParameterIndexMap[k]); 93 }; 94 return mf; 84 95 } 85 96 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanZero.cs
r8929 r9078 20 20 #endregion 21 21 using System; 22 using System.Collections.Generic; 22 23 using System.Linq; 23 24 using HeuristicLab.Common; … … 45 46 } 46 47 47 public void SetParameter(double[] hyp) {48 if ( hyp.Length > 0) throw new ArgumentException("No hyper-parameters allowed for zero mean function.", "hyp");48 public void SetParameter(double[] p) { 49 if (p.Length > 0) throw new ArgumentException("No parameters allowed for zero mean function.", "p"); 49 50 } 50 51 51 public double[] GetMean(double[,] x) { 52 return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray(); 53 } 54 55 public double[] GetGradients(int k, double[,] x) { 56 if (k > 0) throw new ArgumentException(); 57 return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray(); 52 public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) { 53 if (p.Length > 0) throw new ArgumentException("No parameters allowed for zero mean function.", "p"); 54 var mf = new ParameterizedMeanFunction(); 55 mf.Mean = (x, i) => 0.0; 56 mf.Gradient = (x, i, k) => { 57 if (k > 0) 58 throw new ArgumentException(); 59 return 0.0; 60 }; 61 return mf; 58 62 } 59 63 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs
r8933 r9078 106 106 public static IEnumerable<double> GetRow(double[,] x, int r) { 107 107 int cols = x.GetLength(1); 108 return Enumerable.Range(0, cols).Select(c => x[r, c]); 108 return GetRow(x, r, Enumerable.Range(0, cols)); 109 } 110 public static IEnumerable<double> GetRow(double[,] x, int r, IEnumerable<int> columnIndices) { 111 return columnIndices.Select(c => x[r, c]); 109 112 } 110 113 public static IEnumerable<double> GetCol(double[,] x, int c) { … … 112 115 return Enumerable.Range(0, rows).Select(r => x[r, c]); 113 116 } 114 115 116 public static void AttachValueChangeHandler<T, U>(IValueParameter<T> parameter, Action action)117 where T : ValueTypeValue<U>118 where U : struct {119 parameter.ValueChanged += (sender, args) => {120 if (parameter.Value != null) {121 parameter.Value.ValueChanged += (s, a) => action();122 action();123 }124 };125 if (parameter.Value != null) {126 parameter.Value.ValueChanged += (s, a) => action();127 }128 }129 130 public static void AttachArrayChangeHandler<T, U>(IValueParameter<T> parameter, Action action)131 where T : ValueTypeArray<U>132 where U : struct {133 parameter.ValueChanged += (sender, args) => {134 if (parameter.Value != null) {135 parameter.Value.ItemChanged += (s, a) => action();136 parameter.Value.Reset += (s, a) => action();137 action();138 }139 };140 if (parameter.Value != null) {141 parameter.Value.ItemChanged += (s, a) => action();142 parameter.Value.Reset += (s, a) => action();143 }144 }145 117 } 146 118 } -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj
r8929 r9078 120 120 </Compile> 121 121 <Compile Include="FixedDataAnalysisAlgorithm.cs" /> 122 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceConst.cs" /> 123 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinear.cs" /> 124 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinearArd.cs" /> 125 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMask.cs" /> 126 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMaternIso.cs" /> 127 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceNoise.cs" /> 128 <Compile Include="GaussianProcess\CovarianceFunctions\CovariancePeriodic.cs" /> 129 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceProduct.cs" /> 130 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticArd.cs" /> 131 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticIso.cs" /> 132 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceScale.cs" /> 133 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSquaredExponentialArd.cs" /> 122 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceConst.cs"> 123 <SubType>Code</SubType> 124 </Compile> 125 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinear.cs"> 126 <SubType>Code</SubType> 127 </Compile> 128 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinearArd.cs"> 129 <SubType>Code</SubType> 130 </Compile> 131 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMask.cs"> 132 <SubType>Code</SubType> 133 </Compile> 134 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMaternIso.cs"> 135 <SubType>Code</SubType> 136 </Compile> 137 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceNoise.cs"> 138 <SubType>Code</SubType> 139 </Compile> 140 <Compile Include="GaussianProcess\CovarianceFunctions\CovariancePeriodic.cs"> 141 <SubType>Code</SubType> 142 </Compile> 143 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceProduct.cs"> 144 <SubType>Code</SubType> 145 </Compile> 146 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticArd.cs"> 147 <SubType>Code</SubType> 148 </Compile> 149 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticIso.cs"> 150 <SubType>Code</SubType> 151 </Compile> 152 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceScale.cs"> 153 <SubType>Code</SubType> 154 </Compile> 155 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSquaredExponentialArd.cs"> 156 <SubType>Code</SubType> 157 </Compile> 134 158 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSquaredExponentialIso.cs" /> 135 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSum.cs" /> 159 <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSum.cs"> 160 <SubType>Code</SubType> 161 </Compile> 136 162 <Compile Include="GaussianProcess\GaussianProcessClassificationSolutionCreator.cs" /> 137 163 <Compile Include="GaussianProcess\GaussianProcessClassificationModelCreator.cs" /> 138 164 <Compile Include="GaussianProcess\GaussianProcessClassification.cs" /> 139 <Compile Include="GaussianProcess\HyperParameter.cs" />140 165 <Compile Include="GaussianProcess\MeanFunctions\MeanConst.cs" /> 141 166 <Compile Include="GaussianProcess\MeanFunctions\MeanLinear.cs" /> 142 <Compile Include="GaussianProcess\MeanFunctions\MeanProduct.cs" /> 143 <Compile Include="GaussianProcess\MeanFunctions\MeanSum.cs" /> 167 <Compile Include="GaussianProcess\MeanFunctions\MeanProduct.cs"> 168 <SubType>Code</SubType> 169 </Compile> 170 <Compile Include="GaussianProcess\MeanFunctions\MeanSum.cs"> 171 <SubType>Code</SubType> 172 </Compile> 144 173 <Compile Include="GaussianProcess\MeanFunctions\MeanZero.cs" /> 145 174 <Compile Include="GaussianProcess\GaussianProcessHyperparameterInitializer.cs" /> -
branches/RuntimeOptimizer/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs
r8582 r9078 35 35 36 36 IEnumerable<double> GetEstimatedVariance(Dataset ds, IEnumerable<int> rows); 37 void FixParameters(); 37 38 } 38 39 }
Note: See TracChangeset
for help on using the changeset viewer.