- Timestamp:
- 10/05/12 11:58:17 (12 years ago)
- Location:
- branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4
- Files:
-
- 43 edited
- 14 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/CrossValidation.cs
r7738 r8742 44 44 45 45 executionState = ExecutionState.Stopped; 46 runs = new RunCollection ();46 runs = new RunCollection { AlgorithmName = name }; 47 47 runsCounter = 0; 48 48 … … 246 246 #endregion 247 247 248 protected override void OnNameChanged() { 249 base.OnNameChanged(); 250 Runs.AlgorithmName = Name; 251 } 252 248 253 public void Prepare() { 249 254 if (ExecutionState == ExecutionState.Started) … … 447 452 problemDataClone.TestPartition.Start = SamplesStart.Value; problemDataClone.TestPartition.End = SamplesEnd.Value; 448 453 // clone models 449 var ensembleSolution = new ClassificationEnsembleSolution( 450 solutions.Value.Select(x => cloner.Clone(x.Model)), 451 problemDataClone, 452 solutions.Value.Select(x => cloner.Clone(x.ProblemData.TrainingPartition)), 453 solutions.Value.Select(x => cloner.Clone(x.ProblemData.TestPartition))); 454 var ensembleSolution = new ClassificationEnsembleSolution(problemDataClone); 455 ensembleSolution.AddClassificationSolutions(solutions.Value); 454 456 455 457 aggregatedResults.Add(new Result(solutions.Key + " (ensemble)", ensembleSolution)); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceConst.cs
r8477 r8742 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; 26 using HeuristicLab.Data; 25 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 26 28 … … 29 31 [Item(Name = "CovarianceConst", 30 32 Description = "Constant covariance function for Gaussian processes.")] 31 public class CovarianceConst : Item, ICovarianceFunction { 33 public sealed class CovarianceConst : ParameterizedNamedItem, ICovarianceFunction { 34 32 35 [Storable] 33 private double sf2; 34 public double Scale { get { return sf2; } } 36 private double scale; 37 [Storable] 38 private readonly HyperParameter<DoubleValue> scaleParameter; 39 public IValueParameter<DoubleValue> ScaleParameter { 40 get { return scaleParameter; } 41 } 35 42 36 43 [StorableConstructor] 37 pr otectedCovarianceConst(bool deserializing)44 private CovarianceConst(bool deserializing) 38 45 : base(deserializing) { 39 46 } 40 47 41 pr otectedCovarianceConst(CovarianceConst original, Cloner cloner)48 private CovarianceConst(CovarianceConst original, Cloner cloner) 42 49 : base(original, cloner) { 43 this.sf2 = original.sf2; 50 this.scaleParameter = cloner.Clone(original.scaleParameter); 51 this.scale = original.scale; 52 53 RegisterEvents(); 44 54 } 45 55 46 56 public CovarianceConst() 47 57 : base() { 58 Name = ItemName; 59 Description = ItemDescription; 60 61 scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the constant covariance function."); 62 Parameters.Add(scaleParameter); 63 RegisterEvents(); 48 64 } 65 66 [StorableHook(HookType.AfterDeserialization)] 67 private void AfterDeserialization() { 68 RegisterEvents(); 69 } 70 71 // caching 72 private void RegisterEvents() { 73 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; }); 74 } 75 49 76 50 77 public override IDeepCloneable Clone(Cloner cloner) { … … 53 80 54 81 public int GetNumberOfParameters(int numberOfVariables) { 55 return 1;82 return scaleParameter.Fixed ? 0 : 1; 56 83 } 57 84 58 85 public void SetParameter(double[] hyp) { 59 this.sf2 = Math.Exp(2 * hyp[0]); 60 } 61 public void SetData(double[,] x) { 62 // nothing to do 86 if (!scaleParameter.Fixed && hyp.Length == 1) { 87 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0]))); 88 } else { 89 throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceConst", "hyp"); 90 } 63 91 } 64 92 65 66 public void SetData(double[,] x, double[,] xt) { 67 // nothing to do 93 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 94 return scale; 68 95 } 69 96 70 public double GetCovariance(int i, int j) {71 return sf2;97 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 98 yield return 2.0 * scale; 72 99 } 73 100 74 public double GetGradient(int i, int j, int k) { 75 if (k != 0) throw new ArgumentException("CovarianceConst has only one hyperparameters", "k"); 76 return 2 * sf2; 101 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 102 return scale; 77 103 } 78 104 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceLinear.cs
r8477 r8742 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; … … 28 29 [StorableClass] 29 30 [Item(Name = "CovarianceLinear", Description = "Linear covariance function for Gaussian processes.")] 30 public class CovarianceLinear : Item, ICovarianceFunction { 31 [Storable] 32 private double[,] x; 33 [Storable] 34 private double[,] xt; 35 36 private double[,] k; 37 private bool symmetric; 38 39 public int GetNumberOfParameters(int numberOfVariables) { 40 return 0; 41 } 31 public sealed class CovarianceLinear : Item, ICovarianceFunction { 42 32 [StorableConstructor] 43 pr otectedCovarianceLinear(bool deserializing) : base(deserializing) { }44 pr otectedCovarianceLinear(CovarianceLinear original, Cloner cloner)33 private CovarianceLinear(bool deserializing) : base(deserializing) { } 34 private CovarianceLinear(CovarianceLinear original, Cloner cloner) 45 35 : base(original, cloner) { 46 if (original.x != null) {47 this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];48 Array.Copy(original.x, this.x, x.Length);49 50 this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];51 Array.Copy(original.xt, this.xt, xt.Length);52 53 this.k = new double[original.k.GetLength(0), original.k.GetLength(1)];54 Array.Copy(original.k, this.k, k.Length);55 }56 this.symmetric = original.symmetric;57 36 } 58 37 public CovarianceLinear() … … 64 43 } 65 44 45 public int GetNumberOfParameters(int numberOfVariables) { 46 return 0; 47 } 48 66 49 public void SetParameter(double[] hyp) { 67 50 if (hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function."); 68 k = null;69 51 } 70 52 71 public void SetData(double[,] x) { 72 SetData(x, x); 73 this.symmetric = true; 53 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 54 return Util.ScalarProd(x, i, j, 1, columnIndices); 74 55 } 75 56 76 public void SetData(double[,] x, double[,] xt) { 77 this.x = x; 78 this.xt = xt; 79 this.symmetric = false; 80 81 k = null; 57 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 58 yield break; 82 59 } 83 60 84 public double GetCovariance(int i, int j) { 85 if (k == null) CalculateInnerProduct(); 86 return k[i, j]; 87 } 88 89 public double GetGradient(int i, int j, int k) { 90 throw new NotSupportedException("CovarianceLinear does not have hyperparameters."); 91 } 92 93 94 private void CalculateInnerProduct() { 95 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException(); 96 int rows = x.GetLength(0); 97 int cols = xt.GetLength(0); 98 k = new double[rows, cols]; 99 if (symmetric) { 100 for (int i = 0; i < rows; i++) { 101 for (int j = i; j < cols; j++) { 102 k[i, j] = Util.ScalarProd(Util.GetRow(x, i), 103 Util.GetRow(x, j)); 104 k[j, i] = k[i, j]; 105 } 106 } 107 } else { 108 for (int i = 0; i < rows; i++) { 109 for (int j = 0; j < cols; j++) { 110 k[i, j] = Util.ScalarProd(Util.GetRow(x, i), 111 Util.GetRow(xt, j)); 112 } 113 } 114 } 61 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 62 return Util.ScalarProd(x, i, xt, j); 115 63 } 116 64 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceNoise.cs
r8477 r8742 21 21 22 22 using System; 23 using System.Collections.Generic; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; 26 using HeuristicLab.Data; 25 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 26 28 … … 29 31 [Item(Name = "CovarianceNoise", 30 32 Description = "Noise covariance function for Gaussian processes.")] 31 public class CovarianceNoise : Item, ICovarianceFunction { 33 public sealed class CovarianceNoise : ParameterizedNamedItem, ICovarianceFunction { 34 35 32 36 [Storable] 33 37 private double sf2; 34 public double Scale { get { return sf2; } } 38 [Storable] 39 private readonly HyperParameter<DoubleValue> scaleParameter; 40 public IValueParameter<DoubleValue> ScaleParameter { 41 get { return scaleParameter; } 42 } 35 43 36 44 [StorableConstructor] 37 pr otectedCovarianceNoise(bool deserializing)45 private CovarianceNoise(bool deserializing) 38 46 : base(deserializing) { 39 47 } 40 48 41 pr otectedCovarianceNoise(CovarianceNoise original, Cloner cloner)49 private CovarianceNoise(CovarianceNoise original, Cloner cloner) 42 50 : base(original, cloner) { 51 this.scaleParameter = cloner.Clone(original.scaleParameter); 43 52 this.sf2 = original.sf2; 53 RegisterEvents(); 44 54 } 45 55 46 56 public CovarianceNoise() 47 57 : base() { 58 Name = ItemName; 59 Description = ItemDescription; 60 61 this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of noise."); 62 Parameters.Add(this.scaleParameter); 63 64 RegisterEvents(); 48 65 } 49 66 … … 52 69 } 53 70 71 [StorableHook(HookType.AfterDeserialization)] 72 private void AfterDeserialization() { 73 RegisterEvents(); 74 } 75 76 private void RegisterEvents() { 77 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; }); 78 } 79 54 80 public int GetNumberOfParameters(int numberOfVariables) { 55 return 1;81 return scaleParameter.Fixed ? 0 : 1; 56 82 } 57 83 58 84 public void SetParameter(double[] hyp) { 59 this.sf2 = Math.Exp(2 * hyp[0]); 60 } 61 public void SetData(double[,] x) { 62 // nothing to do 85 if (!scaleParameter.Fixed) { 86 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0]))); 87 } else { 88 if (hyp.Length > 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceNoise", "hyp"); 89 } 63 90 } 64 91 65 66 public void SetData(double[,] x, double[,] xt) { 67 // nothing to do 92 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 93 return sf2; 68 94 } 69 95 70 public double GetCovariance(int i, int j) { 71 if (i == j) return sf2; 72 else return 0.0; 96 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 97 yield return 2 * sf2; 73 98 } 74 99 75 public double GetGradient(int i, int j, int k) { 76 if (k != 0) throw new ArgumentException("CovarianceConst has only one hyperparameters", "k"); 77 if (i == j) 78 return 2 * sf2; 79 else 80 return 0.0; 100 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 101 return 0.0; 81 102 } 82 103 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovariancePeriodic.cs
r8477 r8742 21 21 22 22 using System; 23 using System.Collections.Generic; 24 using System.Linq; 23 25 using HeuristicLab.Common; 24 26 using HeuristicLab.Core; 27 using HeuristicLab.Data; 25 28 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 26 29 … … 28 31 [StorableClass] 29 32 [Item(Name = "CovariancePeriodic", Description = "Periodic covariance function for Gaussian processes.")] 30 public class CovariancePeriodic : Item, ICovarianceFunction { 33 public sealed class CovariancePeriodic : ParameterizedNamedItem, ICovarianceFunction { 34 31 35 [Storable] 32 private double [,] x;36 private double scale; 33 37 [Storable] 34 private double[,] xt; 38 private readonly HyperParameter<DoubleValue> scaleParameter; 39 public IValueParameter<DoubleValue> ScaleParameter { 40 get { return scaleParameter; } 41 } 42 35 43 [Storable] 36 private double sf2; 37 public double Scale { get { return sf2; } } 44 private double inverseLength; 38 45 [Storable] 39 private double l; 40 public double Length { get { return l; } } 46 private readonly HyperParameter<DoubleValue> inverseLengthParameter; 47 public IValueParameter<DoubleValue> InverseLengthParameter { 48 get { return inverseLengthParameter; } 49 } 50 41 51 [Storable] 42 private double p; 43 public double Period { get { return p; } } 52 private double period; 53 [Storable] 54 private readonly HyperParameter<DoubleValue> periodParameter; 55 public IValueParameter<DoubleValue> PeriodParameter { 56 get { return periodParameter; } 57 } 44 58 45 private bool symmetric;46 59 47 private double[,] sd; 48 public int GetNumberOfParameters(int numberOfVariables) { 49 return 3; 60 [StorableConstructor] 61 private CovariancePeriodic(bool deserializing) : base(deserializing) { } 62 private CovariancePeriodic(CovariancePeriodic original, Cloner cloner) 63 : base(original, cloner) { 64 this.scaleParameter = cloner.Clone(original.scaleParameter); 65 this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter); 66 this.periodParameter = cloner.Clone(original.periodParameter); 67 this.scale = original.scale; 68 this.inverseLength = original.inverseLength; 69 this.period = original.period; 70 71 RegisterEvents(); 50 72 } 51 [StorableConstructor] 52 protected CovariancePeriodic(bool deserializing) : base(deserializing) { } 53 protected CovariancePeriodic(CovariancePeriodic original, Cloner cloner) 54 : base(original, cloner) { 55 if (original.x != null) { 56 x = new double[original.x.GetLength(0), original.x.GetLength(1)]; 57 Array.Copy(original.x, x, x.Length); 58 xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)]; 59 Array.Copy(original.xt, xt, xt.Length); 60 } 61 sf2 = original.sf2; 62 l = original.l; 63 p = original.p; 64 symmetric = original.symmetric; 65 } 73 66 74 public CovariancePeriodic() 67 75 : base() { 76 Name = ItemName; 77 Description = ItemDescription; 78 79 scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the periodic covariance function."); 80 inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter for the periodic covariance function."); 81 periodParameter = new HyperParameter<DoubleValue>("Period", "The period parameter for the periodic covariance function."); 82 Parameters.Add(scaleParameter); 83 Parameters.Add(inverseLengthParameter); 84 Parameters.Add(periodParameter); 85 86 RegisterEvents(); 87 } 88 89 [StorableHook(HookType.AfterDeserialization)] 90 private void AfterDeserialization() { 91 RegisterEvents(); 68 92 } 69 93 … … 72 96 } 73 97 74 public void SetParameter(double[] hyp) { 75 if (hyp.Length != 3) throw new ArgumentException(); 76 this.l = Math.Exp(hyp[0]); 77 this.p = Math.Exp(hyp[1]); 78 this.sf2 = Math.Exp(2 * hyp[2]); 79 // sf2 = Math.Min(10E6, sf2); // upper limit for the scale 80 81 sd = null; 82 } 83 public void SetData(double[,] x) { 84 SetData(x, x); 85 this.symmetric = true; 98 // caching 99 private void RegisterEvents() { 100 Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; }); 101 Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; }); 102 Util.AttachValueChangeHandler<DoubleValue, double>(periodParameter, () => { period = periodParameter.Value.Value; }); 86 103 } 87 104 88 public void SetData(double[,] x, double[,] xt) { 89 this.x = x; 90 this.xt = xt; 91 this.symmetric = false; 92 93 sd = null; 105 public int GetNumberOfParameters(int numberOfVariables) { 106 return 107 (new[] { scaleParameter, inverseLengthParameter, periodParameter }).Count(p => !p.Fixed); 94 108 } 95 109 96 public double GetCovariance(int i, int j) { 97 if (sd == null) CalculateSquaredDistances(); 98 double k = sd[i, j]; 99 k = Math.PI * k / p; 100 k = Math.Sin(k) / l; 110 public void SetParameter(double[] hyp) { 111 int i = 0; 112 if (!inverseLengthParameter.Fixed) { 113 inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i]))); 114 i++; 115 } 116 if (!periodParameter.Fixed) { 117 periodParameter.SetValue(new DoubleValue(Math.Exp(hyp[i]))); 118 i++; 119 } 120 if (!scaleParameter.Fixed) { 121 scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i]))); 122 i++; 123 } 124 if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovariancePeriod", "hyp"); 125 } 126 127 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 128 double k = i == j ? 0.0 : GetDistance(x, x, i, j, columnIndices); 129 k = Math.PI * k / period; 130 k = Math.Sin(k) * inverseLength; 101 131 k = k * k; 102 132 103 return s f2* Math.Exp(-2.0 * k);133 return scale * Math.Exp(-2.0 * k); 104 134 } 105 135 106 public double GetGradient(int i, int j, int k) { 107 double v = Math.PI * sd[i, j] / p; 108 switch (k) { 109 case 0: { 110 double newK = Math.Sin(v) / l; 111 newK = newK * newK; 112 return 4 * sf2 * Math.Exp(-2 * newK) * newK; 113 } 114 case 1: { 115 double r = Math.Sin(v) / l; 116 return 4 * sf2 / l * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v; 117 } 118 case 2: { 119 double newK = Math.Sin(v) / l; 120 newK = newK * newK; 121 return 2 * sf2 * Math.Exp(-2 * newK); 122 123 } 124 default: { 125 throw new ArgumentException("CovariancePeriodic only has three hyperparameters.", "k"); 126 } 127 } 136 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 137 double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period; 138 double gradient = Math.Sin(v) * inverseLength; 139 gradient *= gradient; 140 yield return 4.0 * scale * Math.Exp(-2.0 * gradient) * gradient; 141 double r = Math.Sin(v) * inverseLength; 142 yield return 4.0 * scale * inverseLength * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v; 143 yield return 2.0 * scale * Math.Exp(-2 * gradient); 128 144 } 129 145 130 p rivate void CalculateSquaredDistances() {131 if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();132 int rows = x.GetLength(0);133 int cols = xt.GetLength(0);134 sd = new double[rows, cols];146 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 147 double k = GetDistance(x, xt, i, j, columnIndices); 148 k = Math.PI * k / period; 149 k = Math.Sin(k) * inverseLength; 150 k = k * k; 135 151 136 if (symmetric) { 137 for (int i = 0; i < rows; i++) { 138 for (int j = i; j < cols; j++) { 139 sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(x, j))); 140 sd[j, i] = sd[i, j]; 141 } 142 } 143 } else { 144 for (int i = 0; i < rows; i++) { 145 for (int j = 0; j < cols; j++) { 146 sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, j))); 147 } 148 } 149 } 152 return scale * Math.Exp(-2.0 * k); 153 } 154 155 private double GetDistance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 156 return Math.Sqrt(Util.SqrDist(x, i, xt, j, 1, columnIndices)); 150 157 } 151 158 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs
r8477 r8742 31 31 [Item(Name = "CovarianceSum", 32 32 Description = "Sum covariance function for Gaussian processes.")] 33 public class CovarianceSum : Item, ICovarianceFunction {33 public sealed class CovarianceSum : Item, ICovarianceFunction { 34 34 [Storable] 35 35 private ItemList<ICovarianceFunction> terms; … … 42 42 43 43 [StorableConstructor] 44 pr otectedCovarianceSum(bool deserializing)44 private CovarianceSum(bool deserializing) 45 45 : base(deserializing) { 46 46 } 47 47 48 pr otectedCovarianceSum(CovarianceSum original, Cloner cloner)48 private CovarianceSum(CovarianceSum original, Cloner cloner) 49 49 : base(original, cloner) { 50 50 this.terms = cloner.Clone(original.terms); 51 51 this.numberOfVariables = original.numberOfVariables; 52 AttachEventHandlers();53 52 } 54 53 … … 56 55 : base() { 57 56 this.terms = new ItemList<ICovarianceFunction>(); 58 AttachEventHandlers();59 }60 61 private void AttachEventHandlers() {62 this.terms.CollectionReset += (sender, args) => ClearCache();63 this.terms.ItemsAdded += (sender, args) => ClearCache();64 this.terms.ItemsRemoved += (sender, args) => ClearCache();65 this.terms.ItemsReplaced += (sender, args) => ClearCache();66 this.terms.ItemsMoved += (sender, args) => ClearCache();67 57 } 68 58 … … 77 67 78 68 public void SetParameter(double[] hyp) { 69 if (terms.Count == 0) throw new ArgumentException("At least one term is needed for sum covariance function."); 79 70 int offset = 0; 80 71 foreach (var t in terms) { … … 84 75 } 85 76 } 86 public void SetData(double[,] x) { 87 SetData(x, x); 77 78 public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 79 return terms.Select(t => t.GetCovariance(x, i, j, columnIndices)).Sum(); 88 80 } 89 81 90 public void SetData(double[,] x, double[,] xt) { 91 foreach (var t in terms) { 92 t.SetData(x, xt); 93 } 82 public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) { 83 return terms.Select(t => t.GetGradient(x, i, j, columnIndices)).Aggregate(Enumerable.Concat); 94 84 } 95 85 96 public double GetCovariance(int i, int j) { 97 return terms.Select(t => t.GetCovariance(i, j)).Sum(); 98 } 99 100 private Dictionary<int, Tuple<int, int>> cachedParameterMap; 101 public double GetGradient(int i, int j, int k) { 102 if (cachedParameterMap == null) { 103 CalculateParameterMap(); 104 } 105 int ti = cachedParameterMap[k].Item1; 106 k = cachedParameterMap[k].Item2; 107 return terms[ti].GetGradient(i, j, k); 108 } 109 private void ClearCache() { 110 cachedParameterMap = null; 111 } 112 113 private void CalculateParameterMap() { 114 cachedParameterMap = new Dictionary<int, Tuple<int, int>>(); 115 int k = 0; 116 for (int ti = 0; ti < terms.Count; ti++) { 117 for (int ti_k = 0; ti_k < terms[ti].GetNumberOfParameters(numberOfVariables); ti_k++) { 118 cachedParameterMap[k++] = Tuple.Create(ti, ti_k); 119 } 120 } 86 public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) { 87 return terms.Select(t => t.GetCrossCovariance(x, xt, i, j, columnIndices)).Sum(); 121 88 } 122 89 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessHyperparameterInitializer.cs
r8477 r8742 91 91 var rand = RandomParameter.ActualValue; 92 92 for (int i = 0; i < r.Length; i++) 93 r[i] = rand.NextDouble() * 2 - 1;93 r[i] = rand.NextDouble() * 10 - 5; 94 94 95 95 HyperparameterParameter.ActualValue = r; -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8477 r8742 42 42 43 43 [Storable] 44 private double[] hyperparameterGradients; 45 public double[] HyperparameterGradients { 46 get { 47 var copy = new double[hyperparameterGradients.Length]; 48 Array.Copy(hyperparameterGradients, copy, copy.Length); 49 return copy; 50 } 51 } 52 53 [Storable] 44 54 private ICovarianceFunction covarianceFunction; 45 55 public ICovarianceFunction CovarianceFunction { … … 66 76 [Storable] 67 77 private double sqrSigmaNoise; 78 public double SigmaNoise { 79 get { return Math.Sqrt(sqrSigmaNoise); } 80 } 68 81 69 82 [Storable] … … 124 137 l = new double[n, n]; 125 138 126 meanFunction.SetData(x);127 covarianceFunction.SetData(x);128 129 139 // calculate means and covariances 130 140 double[] m = meanFunction.GetMean(x); 131 141 for (int i = 0; i < n; i++) { 132 142 for (int j = i; j < n; j++) { 133 l[j, i] = covarianceFunction.GetCovariance( i, j) / sqrSigmaNoise;143 l[j, i] = covarianceFunction.GetCovariance(x, i, j) / sqrSigmaNoise; 134 144 if (j == i) l[j, i] += 1.0; 135 145 } … … 153 163 alpha[i] = alpha[i] / sqrSigmaNoise; 154 164 negativeLogLikelihood = 0.5 * Util.ScalarProd(ym, alpha) + diagSum + (n / 2.0) * Math.Log(2.0 * Math.PI * sqrSigmaNoise); 155 } 156 157 public double[] GetHyperparameterGradients() { 165 158 166 // derivatives 159 int n = x.GetLength(0);160 167 int nAllowedVariables = x.GetLength(1); 161 168 162 int info;163 169 alglib.matinvreport matInvRep; 164 170 double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)]; … … 183 189 if (covGradients.Length > 0) { 184 190 for (int i = 0; i < n; i++) { 191 for (int j = 0; j < i; j++) { 192 var g = covarianceFunction.GetGradient(x, i, j).ToArray(); 193 for (int k = 0; k < covGradients.Length; k++) { 194 covGradients[k] += lCopy[i, j] * g[k]; 195 } 196 } 197 198 var gDiag = covarianceFunction.GetGradient(x, i, i).ToArray(); 185 199 for (int k = 0; k < covGradients.Length; k++) { 186 for (int j = 0; j < i; j++) { 187 covGradients[k] += lCopy[i, j] * covarianceFunction.GetGradient(i, j, k); 188 } 189 covGradients[k] += 0.5 * lCopy[i, i] * covarianceFunction.GetGradient(i, i, k); 200 // diag 201 covGradients[k] += 0.5 * lCopy[i, i] * gDiag[k]; 190 202 } 191 203 } 192 204 } 193 205 194 return206 hyperparameterGradients = 195 207 meanGradients 196 208 .Concat(covGradients) 197 209 .Concat(new double[] { noiseGradient }).ToArray(); 210 198 211 } 199 212 … … 208 221 } 209 222 public GaussianProcessRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 210 return new GaussianProcessRegressionSolution(this, problemData);223 return new GaussianProcessRegressionSolution(this, new RegressionProblemData(problemData)); 211 224 } 212 225 IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) { … … 214 227 } 215 228 #endregion 229 216 230 217 231 private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) { … … 219 233 int newN = newX.GetLength(0); 220 234 int n = x.GetLength(0); 221 // var predMean = new double[newN];222 // predVar = new double[newN];223 224 225 226 // var kss = new double[newN];227 235 var Ks = new double[newN, n]; 228 //double[,] sWKs = new double[n, newN];229 // double[,] v;230 231 232 // for stddev233 //covarianceFunction.SetParameter(covHyp, newX);234 //kss = covarianceFunction.GetDiagonalCovariances();235 236 covarianceFunction.SetData(x, newX);237 meanFunction.SetData(newX);238 236 var ms = meanFunction.GetMean(newX); 239 237 for (int i = 0; i < newN; i++) { 240 238 for (int j = 0; j < n; j++) { 241 Ks[i, j] = covarianceFunction.GetCovariance(j, i); 242 //sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise); 243 } 244 } 245 246 // for stddev 247 // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v); 239 Ks[i, j] = covarianceFunction.GetCrossCovariance(x, newX, j, i); 240 } 241 } 248 242 249 243 return Enumerable.Range(0, newN) 250 244 .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha)); 251 //for (int i = 0; i < newN; i++) {252 // // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);253 // // var sumV2 = prod(GetCol(v, i), GetCol(v, i));254 // // predVar[i] = kss[i] - sumV2;255 //}256 257 245 } 258 246 … … 266 254 267 255 // for stddev 268 covarianceFunction.SetData(newX);269 256 for (int i = 0; i < newN; i++) 270 kss[i] = covarianceFunction.GetCovariance(i, i); 271 272 covarianceFunction.SetData(x, newX); 257 kss[i] = covarianceFunction.GetCovariance(newX, i, i); 258 273 259 for (int i = 0; i < newN; i++) { 274 260 for (int j = 0; j < n; j++) { 275 sWKs[j, i] = covarianceFunction.GetC ovariance(j, i) / Math.Sqrt(sqrSigmaNoise);261 sWKs[j, i] = covarianceFunction.GetCrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise); 276 262 } 277 263 } 278 264 279 265 // for stddev 280 int info; 281 alglib.densesolverreport denseSolveRep; 282 double[,] v; 283 284 alglib.rmatrixsolvem(l, n, sWKs, newN, false, out info, out denseSolveRep, out v); 266 alglib.ablas.rmatrixlefttrsm(n, newN, l, 0, 0, false, false, 0, ref sWKs, 0, 0); 285 267 286 268 for (int i = 0; i < newN; i++) { 287 var sumV = Util.ScalarProd(Util.GetCol( v, i), Util.GetCol(v, i));269 var sumV = Util.ScalarProd(Util.GetCol(sWKs, i), Util.GetCol(sWKs, i)); 288 270 kss[i] -= sumV; 289 271 if (kss[i] < 0) kss[i] = 0; -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs
r8477 r8742 22 22 23 23 using System; 24 using System.Collections.Generic;25 using System.Linq;26 24 using HeuristicLab.Algorithms.GradientDescent; 27 25 using HeuristicLab.Common; … … 32 30 using HeuristicLab.Parameters; 33 31 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 34 using HeuristicLab.PluginInfrastructure;35 32 using HeuristicLab.Problems.DataAnalysis; 36 33 … … 59 56 60 57 #region parameter properties 61 public I ConstrainedValueParameter<IMeanFunction> MeanFunctionParameter {62 get { return (I ConstrainedValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }58 public IValueParameter<IMeanFunction> MeanFunctionParameter { 59 get { return (IValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; } 63 60 } 64 public I ConstrainedValueParameter<ICovarianceFunction> CovarianceFunctionParameter {65 get { return (I ConstrainedValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }61 public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter { 62 get { return (IValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; } 66 63 } 67 64 public IValueParameter<IntValue> MinimizationIterationsParameter { … … 104 101 Problem = new RegressionProblem(); 105 102 106 List<IMeanFunction> meanFunctions = ApplicationManager.Manager.GetInstances<IMeanFunction>().ToList(); 107 List<ICovarianceFunction> covFunctions = ApplicationManager.Manager.GetInstances<ICovarianceFunction>().ToList(); 108 109 Parameters.Add(new ConstrainedValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", 110 new ItemSet<IMeanFunction>(meanFunctions), meanFunctions.OfType<MeanConst>().First())); 111 Parameters.Add(new ConstrainedValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", 112 new ItemSet<ICovarianceFunction>(covFunctions), covFunctions.OfType<CovarianceSEiso>().First())); 103 Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst())); 104 Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso())); 113 105 Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20))); 114 106 Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs
r8477 r8742 65 65 ModelParameter.ActualValue = model; 66 66 NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood); 67 HyperparameterGradientsParameter.ActualValue = new RealVector(model. GetHyperparameterGradients());67 HyperparameterGradientsParameter.ActualValue = new RealVector(model.HyperparameterGradients); 68 68 return base.Apply(); 69 69 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolution.cs
r8477 r8742 34 34 [StorableClass] 35 35 public sealed class GaussianProcessRegressionSolution : RegressionSolution, IGaussianProcessSolution { 36 private new readonly Dictionary<int, double> evaluationCache; 36 37 37 38 public new IGaussianProcessModel Model { … … 41 42 42 43 [StorableConstructor] 43 private GaussianProcessRegressionSolution(bool deserializing) : base(deserializing) { } 44 private GaussianProcessRegressionSolution(bool deserializing) 45 : base(deserializing) { 46 evaluationCache = new Dictionary<int, double>(); 47 48 } 44 49 private GaussianProcessRegressionSolution(GaussianProcessRegressionSolution original, Cloner cloner) 45 50 : base(original, cloner) { 51 evaluationCache = new Dictionary<int, double>(original.evaluationCache); 46 52 } 47 53 public GaussianProcessRegressionSolution(IGaussianProcessModel model, IRegressionProblemData problemData) 48 54 : base(model, problemData) { 55 56 evaluationCache = new Dictionary<int, double>(problemData.Dataset.Rows); 49 57 RecalculateResults(); 50 58 } … … 65 73 66 74 public IEnumerable<double> GetEstimatedVariance(IEnumerable<int> rows) { 67 return Model.GetEstimatedVariance(ProblemData.Dataset, rows); 75 var rowsToEvaluate = rows.Except(evaluationCache.Keys); 76 var rowsEnumerator = rowsToEvaluate.GetEnumerator(); 77 var valuesEnumerator = Model.GetEstimatedVariance(ProblemData.Dataset, rowsToEvaluate).GetEnumerator(); 78 79 while (rowsEnumerator.MoveNext() & valuesEnumerator.MoveNext()) { 80 evaluationCache.Add(rowsEnumerator.Current, valuesEnumerator.Current); 81 } 82 83 return rows.Select(row => evaluationCache[row]); 84 } 85 86 protected override void OnModelChanged() { 87 evaluationCache.Clear(); 88 base.OnModelChanged(); 89 } 90 protected override void OnProblemDataChanged() { 91 evaluationCache.Clear(); 92 base.OnProblemDataChanged(); 68 93 } 69 94 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs
r8477 r8742 75 75 76 76 public override IOperation Apply() { 77 var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone(); 78 var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone(); 79 var s = new GaussianProcessRegressionSolution(m, data); 77 if (ModelParameter.ActualValue != null) { 78 var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone(); 79 var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone(); 80 var s = new GaussianProcessRegressionSolution(m, data); 80 81 81 82 82 SolutionParameter.ActualValue = s; 83 var results = ResultsParameter.ActualValue; 84 if (!results.ContainsKey(SolutionParameterName)) { 85 results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s)); 86 results.Add(new Result(TrainingRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the training partition.", new DoubleValue(s.TrainingRSquared))); 87 results.Add(new Result(TestRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the test partition.", new DoubleValue(s.TestRSquared))); 88 } else { 89 results[SolutionParameterName].Value = s; 90 results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared); 91 results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared); 83 SolutionParameter.ActualValue = s; 84 var results = ResultsParameter.ActualValue; 85 if (!results.ContainsKey(SolutionParameterName)) { 86 results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s)); 87 results.Add(new Result(TrainingRSquaredResultName, 88 "The Pearson's R² of the Gaussian process solution on the training partition.", 89 new DoubleValue(s.TrainingRSquared))); 90 results.Add(new Result(TestRSquaredResultName, 91 "The Pearson's R² of the Gaussian process solution on the test partition.", 92 new DoubleValue(s.TestRSquared))); 93 } else { 94 results[SolutionParameterName].Value = s; 95 results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared); 96 results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared); 97 } 92 98 } 93 99 return base.Apply(); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs
r8477 r8742 20 20 #endregion 21 21 22 using System.Collections.Generic; 22 23 using HeuristicLab.Core; 23 24 … … 26 27 int GetNumberOfParameters(int numberOfVariables); 27 28 void SetParameter(double[] hyp); 28 void SetData(double[,] x); 29 void SetData(double[,] x, double[,] xt); 30 31 double GetCovariance(int i, int j); 32 double GetGradient(int i, int j, int k); 29 double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices = null); 30 IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices = null); 31 double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices = null); 33 32 } 34 33 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs
r8416 r8742 25 25 int GetNumberOfParameters(int numberOfVariables); 26 26 void SetParameter(double[] hyp); 27 void SetData(double[,] x);28 27 double[] GetMean(double[,] x); 29 28 double[] GetGradients(int k, double[,] x); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanConst.cs
r8477 r8742 24 24 using HeuristicLab.Common; 25 25 using HeuristicLab.Core; 26 using HeuristicLab.Data; 26 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 27 28 … … 29 30 [StorableClass] 30 31 [Item(Name = "MeanConst", Description = "Constant mean function for Gaussian processes.")] 31 public class MeanConst :Item, IMeanFunction {32 public sealed class MeanConst : ParameterizedNamedItem, IMeanFunction { 32 33 [Storable] 33 34 private double c; 34 public double Value { get { return c; } } 35 [Storable] 36 private readonly HyperParameter<DoubleValue> valueParameter; 37 public IValueParameter<DoubleValue> ValueParameter { get { return valueParameter; } } 35 38 36 public int GetNumberOfParameters(int numberOfVariables) {37 return 1;38 }39 39 [StorableConstructor] 40 pr otectedMeanConst(bool deserializing) : base(deserializing) { }41 pr otectedMeanConst(MeanConst original, Cloner cloner)40 private MeanConst(bool deserializing) : base(deserializing) { } 41 private MeanConst(MeanConst original, Cloner cloner) 42 42 : base(original, cloner) { 43 43 this.c = original.c; 44 this.valueParameter = cloner.Clone(original.valueParameter); 45 RegisterEvents(); 44 46 } 45 47 public MeanConst() 46 48 : base() { 49 this.name = ItemName; 50 this.description = ItemDescription; 51 52 this.valueParameter = new HyperParameter<DoubleValue>("Value", "The constant value for the constant mean function."); 53 Parameters.Add(valueParameter); 54 RegisterEvents(); 55 } 56 57 public override IDeepCloneable Clone(Cloner cloner) { 58 return new MeanConst(this, cloner); 59 } 60 61 [StorableHook(HookType.AfterDeserialization)] 62 private void AfterDeserialization() { 63 RegisterEvents(); 64 } 65 66 private void RegisterEvents() { 67 Util.AttachValueChangeHandler<DoubleValue, double>(valueParameter, () => { c = valueParameter.Value.Value; }); 68 } 69 70 public int GetNumberOfParameters(int numberOfVariables) { 71 return valueParameter.Fixed ? 0 : 1; 47 72 } 48 73 49 74 public void SetParameter(double[] hyp) { 50 if (hyp.Length != 1) throw new ArgumentException("Only one hyper-parameter allowed for constant mean function.", "hyp"); 51 this.c = hyp[0]; 52 } 53 public void SetData(double[,] x) { 54 // nothing to do 75 if (!valueParameter.Fixed) { 76 valueParameter.SetValue(new DoubleValue(hyp[0])); 77 } else if (hyp.Length > 0) 78 throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the constant mean function.", "hyp"); 55 79 } 56 80 … … 63 87 return Enumerable.Repeat(1.0, x.GetLength(0)).ToArray(); 64 88 } 65 66 public override IDeepCloneable Clone(Cloner cloner) {67 return new MeanConst(this, cloner);68 }69 89 } 70 90 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanLinear.cs
r8477 r8742 19 19 */ 20 20 #endregion 21 21 22 using System; 22 23 using System.Linq; 23 24 using HeuristicLab.Common; 24 25 using HeuristicLab.Core; 26 using HeuristicLab.Data; 25 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 26 28 … … 28 30 [StorableClass] 29 31 [Item(Name = "MeanLinear", Description = "Linear mean function for Gaussian processes.")] 30 public class MeanLinear :Item, IMeanFunction {32 public sealed class MeanLinear : ParameterizedNamedItem, IMeanFunction { 31 33 [Storable] 32 private double[] alpha; 33 public double[] Weights { 34 get { 35 if (alpha == null) return new double[0]; 36 var copy = new double[alpha.Length]; 37 Array.Copy(alpha, copy, copy.Length); 38 return copy; 34 private double[] weights; 35 [Storable] 36 private readonly HyperParameter<DoubleArray> weightsParameter; 37 public IValueParameter<DoubleArray> WeightsParameter { get { return weightsParameter; } } 38 39 [StorableConstructor] 40 private MeanLinear(bool deserializing) : base(deserializing) { } 41 private MeanLinear(MeanLinear original, Cloner cloner) 42 : base(original, cloner) { 43 if (original.weights != null) { 44 this.weights = new double[original.weights.Length]; 45 Array.Copy(original.weights, weights, original.weights.Length); 39 46 } 40 } 41 public int GetNumberOfParameters(int numberOfVariables) { 42 return numberOfVariables; 43 } 44 [StorableConstructor] 45 protected MeanLinear(bool deserializing) : base(deserializing) { } 46 protected MeanLinear(MeanLinear original, Cloner cloner) 47 : base(original, cloner) { 48 if (original.alpha != null) { 49 this.alpha = new double[original.alpha.Length]; 50 Array.Copy(original.alpha, alpha, original.alpha.Length); 51 } 47 weightsParameter = cloner.Clone(original.weightsParameter); 48 RegisterEvents(); 52 49 } 53 50 public MeanLinear() 54 51 : base() { 52 this.weightsParameter = new HyperParameter<DoubleArray>("Weights", "The weights parameter for the linear mean function."); 53 Parameters.Add(weightsParameter); 54 RegisterEvents(); 55 } 56 57 public override IDeepCloneable Clone(Cloner cloner) { 58 return new MeanLinear(this, cloner); 59 } 60 61 [StorableHook(HookType.AfterDeserialization)] 62 private void AfterDeserialization() { 63 RegisterEvents(); 64 } 65 66 private void RegisterEvents() { 67 Util.AttachArrayChangeHandler<DoubleArray, double>(weightsParameter, () => { 68 weights = weightsParameter.Value.ToArray(); 69 }); 70 } 71 72 public int GetNumberOfParameters(int numberOfVariables) { 73 return weightsParameter.Fixed ? 0 : numberOfVariables; 55 74 } 56 75 57 76 public void SetParameter(double[] hyp) { 58 this.alpha = new double[hyp.Length]; 59 Array.Copy(hyp, alpha, hyp.Length); 60 } 61 public void SetData(double[,] x) { 62 // nothing to do 77 if (!weightsParameter.Fixed) { 78 weightsParameter.SetValue(new DoubleArray(hyp)); 79 } else if (hyp.Length != 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the linear mean function.", "hyp"); 63 80 } 64 81 65 82 public double[] GetMean(double[,] x) { 66 83 // sanity check 67 if ( alpha.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function.");84 if (weights.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function."); 68 85 int cols = x.GetLength(1); 69 86 int n = x.GetLength(0); 70 87 return (from i in Enumerable.Range(0, n) 71 let rowVector = from j in Enumerable.Range(0, cols) 72 select x[i, j] 73 select Util.ScalarProd(alpha, rowVector)) 88 let rowVector = Enumerable.Range(0, cols).Select(j => x[i, j]) 89 select Util.ScalarProd(weights, rowVector)) 74 90 .ToArray(); 75 91 } … … 79 95 int n = x.GetLength(0); 80 96 if (k > cols) throw new ArgumentException(); 81 return (from r in Enumerable.Range(0, n) 82 select x[r, k]).ToArray(); 83 } 84 85 public override IDeepCloneable Clone(Cloner cloner) { 86 return new MeanLinear(this, cloner); 97 return (Enumerable.Range(0, n).Select(r => x[r, k])).ToArray(); 87 98 } 88 99 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanSum.cs
r8477 r8742 27 27 [StorableClass] 28 28 [Item(Name = "MeanSum", Description = "Sum of mean functions for Gaussian processes.")] 29 public class MeanSum : Item, IMeanFunction {29 public sealed class MeanSum : Item, IMeanFunction { 30 30 [Storable] 31 31 private ItemList<IMeanFunction> terms; … … 37 37 } 38 38 39 public int GetNumberOfParameters(int numberOfVariables) {40 this.numberOfVariables = numberOfVariables;41 return terms.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();42 }43 39 [StorableConstructor] 44 pr otectedMeanSum(bool deserializing) : base(deserializing) { }45 pr otectedMeanSum(MeanSum original, Cloner cloner)40 private MeanSum(bool deserializing) : base(deserializing) { } 41 private MeanSum(MeanSum original, Cloner cloner) 46 42 : base(original, cloner) { 47 43 this.terms = cloner.Clone(original.terms); … … 50 46 public MeanSum() { 51 47 this.terms = new ItemList<IMeanFunction>(); 48 } 49 50 public override IDeepCloneable Clone(Cloner cloner) { 51 return new MeanSum(this, cloner); 52 } 53 54 public int GetNumberOfParameters(int numberOfVariables) { 55 this.numberOfVariables = numberOfVariables; 56 return terms.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum(); 52 57 } 53 58 … … 59 64 offset += numberOfParameters; 60 65 } 61 }62 63 public void SetData(double[,] x) {64 foreach (var t in terms) t.SetData(x);65 66 } 66 67 … … 82 83 return terms[i].GetGradients(k, x); 83 84 } 84 85 public override IDeepCloneable Clone(Cloner cloner) {86 return new MeanSum(this, cloner);87 }88 85 } 89 86 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanZero.cs
r8416 r8742 28 28 [StorableClass] 29 29 [Item(Name = "MeanZero", Description = "Constant zero mean function for Gaussian processes.")] 30 public class MeanZero : Item, IMeanFunction { 31 public int GetNumberOfParameters(int numberOfVariables) { 32 return 0; 33 } 30 public sealed class MeanZero : Item, IMeanFunction { 34 31 [StorableConstructor] 35 pr otectedMeanZero(bool deserializing) : base(deserializing) { }36 pr otectedMeanZero(MeanZero original, Cloner cloner)32 private MeanZero(bool deserializing) : base(deserializing) { } 33 private MeanZero(MeanZero original, Cloner cloner) 37 34 : base(original, cloner) { 38 35 } … … 40 37 } 41 38 39 public override IDeepCloneable Clone(Cloner cloner) { 40 return new MeanZero(this, cloner); 41 } 42 43 public int GetNumberOfParameters(int numberOfVariables) { 44 return 0; 45 } 46 42 47 public void SetParameter(double[] hyp) { 43 48 if (hyp.Length > 0) throw new ArgumentException("No hyper-parameters allowed for zero mean function.", "hyp"); 44 }45 46 public void SetData(double[,] x) {47 // do nothing48 49 } 49 50 … … 56 57 return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray(); 57 58 } 58 public override IDeepCloneable Clone(Cloner cloner) {59 return new MeanZero(this, cloner);60 }61 59 } 62 60 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs
r8477 r8742 20 20 #endregion 21 21 22 using System; 22 23 using System.Collections.Generic; 23 24 using System.Linq; 25 using HeuristicLab.Core; 26 using HeuristicLab.Data; 24 27 25 28 namespace HeuristicLab.Algorithms.DataAnalysis { 26 publicstatic class Util {29 internal static class Util { 27 30 public static double ScalarProd(IEnumerable<double> v, IEnumerable<double> u) { 28 31 return v.Zip(u, (vi, ui) => vi * ui).Sum(); 32 } 33 34 public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) { 35 return x.Zip(y, (a, b) => (a - b) * (a - b)).Sum(); 29 36 } 30 37 … … 34 41 } 35 42 36 public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) { 37 return x.Zip(y, (a, b) => (a - b) * (a - b)).Sum(); 43 public static double SqrDist(double[,] x, int i, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) { 44 return SqrDist(x, i, x, j, scale, columnIndices); 45 } 46 47 public static double SqrDist(double[,] x, int i, double[,] xt, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) { 48 double ss = 0.0; 49 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1)); 50 foreach (int k in columnIndices) { 51 double d = x[i, k] - xt[j, k]; 52 ss += d * d; 53 } 54 return scale * scale * ss; 55 } 56 57 public static double SqrDist(double[,] x, int i, int j, double[] scale, IEnumerable<int> columnIndices = null) { 58 return SqrDist(x, i, x, j, scale); 59 } 60 61 public static double SqrDist(double[,] x, int i, double[,] xt, int j, double[] scale, IEnumerable<int> columnIndices = null) { 62 double ss = 0.0; 63 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1)); 64 foreach (int k in columnIndices) { 65 double d = x[i, k] - xt[j, k]; 66 ss += d * d * scale[k] * scale[k]; 67 } 68 return ss; 69 } 70 public static double ScalarProd(double[,] x, int i, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) { 71 return ScalarProd(x, i, x, j, scale, columnIndices); 72 } 73 74 public static double ScalarProd(double[,] x, int i, double[,] xt, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) { 75 double sum = 0.0; 76 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1)); 77 foreach (int k in columnIndices) { 78 sum += x[i, k] * xt[j, k]; 79 } 80 return scale * scale * sum; 81 } 82 public static double ScalarProd(double[,] x, int i, int j, double[] scale, IEnumerable<int> columnIndices = null) { 83 return ScalarProd(x, i, x, j, scale, columnIndices); 84 } 85 86 public static double ScalarProd(double[,] x, int i, double[,] xt, int j, double[] scale, IEnumerable<int> columnIndices = null) { 87 double sum = 0.0; 88 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1)); 89 foreach (int k in columnIndices) { 90 sum += x[i, k] * scale[k] * xt[j, k] * scale[k]; 91 } 92 return sum; 38 93 } 39 94 … … 46 101 return Enumerable.Range(0, rows).Select(r => x[r, c]); 47 102 } 103 104 105 public static void AttachValueChangeHandler<T, U>(IValueParameter<T> parameter, Action action) 106 where T : ValueTypeValue<U> 107 where U : struct { 108 parameter.ValueChanged += (sender, args) => { 109 if (parameter.Value != null) { 110 parameter.Value.ValueChanged += (s, a) => action(); 111 action(); 112 } 113 }; 114 if (parameter.Value != null) { 115 parameter.Value.ValueChanged += (s, a) => action(); 116 } 117 } 118 119 public static void AttachArrayChangeHandler<T, U>(IValueParameter<T> parameter, Action action) 120 where T : ValueTypeArray<U> 121 where U : struct { 122 parameter.ValueChanged += (sender, args) => { 123 if (parameter.Value != null) { 124 parameter.Value.ItemChanged += (s, a) => action(); 125 parameter.Value.Reset += (s, a) => action(); 126 action(); 127 } 128 }; 129 if (parameter.Value != null) { 130 parameter.Value.ItemChanged += (s, a) => action(); 131 parameter.Value.Reset += (s, a) => action(); 132 } 133 } 48 134 } 49 135 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj
r8477 r8742 105 105 <Private>False</Private> 106 106 </Reference> 107 <Reference Include="HeuristicLab.Algorithms.Benchmarks-3.3"> 108 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.Benchmarks-3.3.dll</HintPath> 109 </Reference> 110 <Reference Include="HeuristicLab.Algorithms.GradientDescent-3.3"> 111 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.GradientDescent-3.3.dll</HintPath> 112 </Reference> 113 <Reference Include="HeuristicLab.Analysis-3.3"> 114 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Analysis-3.3.dll</HintPath> 115 </Reference> 116 <Reference Include="HeuristicLab.Collections-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 117 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Collections-3.3.dll</HintPath> 118 </Reference> 119 <Reference Include="HeuristicLab.Common-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 120 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Common-3.3.dll</HintPath> 121 </Reference> 122 <Reference Include="HeuristicLab.Common.Resources-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 123 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Common.Resources-3.3.dll</HintPath> 124 </Reference> 125 <Reference Include="HeuristicLab.Core-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 126 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Core-3.3.dll</HintPath> 127 </Reference> 128 <Reference Include="HeuristicLab.Data-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 129 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Data-3.3.dll</HintPath> 130 </Reference> 131 <Reference Include="HeuristicLab.Encodings.RealVectorEncoding-3.3"> 132 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Encodings.RealVectorEncoding-3.3.dll</HintPath> 133 </Reference> 134 <Reference Include="HeuristicLab.Operators-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 135 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Operators-3.3.dll</HintPath> 136 </Reference> 137 <Reference Include="HeuristicLab.Optimization-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 138 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Optimization-3.3.dll</HintPath> 139 </Reference> 140 <Reference Include="HeuristicLab.Parameters-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 141 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Parameters-3.3.dll</HintPath> 142 </Reference> 143 <Reference Include="HeuristicLab.Persistence-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 144 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Persistence-3.3.dll</HintPath> 145 </Reference> 146 <Reference Include="HeuristicLab.PluginInfrastructure-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 147 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.PluginInfrastructure-3.3.dll</HintPath> 148 </Reference> 149 <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Classification-3.4"> 150 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Problems.DataAnalysis.Symbolic.Classification-3.4.dll</HintPath> 151 </Reference> 152 <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4"> 153 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4.dll</HintPath> 154 </Reference> 155 <Reference Include="HeuristicLab.Problems.Instances-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 156 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances-3.3.dll</HintPath> 157 </Reference> 158 <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 159 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Random-3.3.dll</HintPath> 160 </Reference> 161 <Reference Include="LibSVM-1.6.3, Version=1.6.3.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 162 <HintPath>..\..\..\..\trunk\sources\bin\LibSVM-1.6.3.dll</HintPath> 107 <Reference Include="HeuristicLab.Algorithms.GradientDescent-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 108 <Reference Include="HeuristicLab.Analysis-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 109 <Reference Include="HeuristicLab.Collections-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 110 <Reference Include="HeuristicLab.Common-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 111 <Reference Include="HeuristicLab.Common.Resources-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 112 <Reference Include="HeuristicLab.Core-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 113 <Reference Include="HeuristicLab.Data-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 114 <Reference Include="HeuristicLab.Encodings.RealVectorEncoding-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 115 <Reference Include="HeuristicLab.Operators-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 116 <Reference Include="HeuristicLab.Optimization-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 117 <Reference Include="HeuristicLab.Parameters-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 118 <Reference Include="HeuristicLab.Persistence-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 119 <Reference Include="HeuristicLab.PluginInfrastructure-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 120 <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Classification-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 121 <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 122 <Reference Include="HeuristicLab.Problems.Instances-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 123 <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" /> 124 <Reference Include="LibSVM-3.12, Version=3.12.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 125 <HintPath>..\..\..\..\trunk\sources\bin\LibSVM-3.12.dll</HintPath> 163 126 <Private>False</Private> 164 127 </Reference> … … 174 137 </Compile> 175 138 <Compile Include="FixedDataAnalysisAlgorithm.cs" /> 176 <Compile Include="GaussianProcess\CovarianceRQiso.cs" /> 139 <Compile Include="GaussianProcess\CovarianceMask.cs" /> 140 <Compile Include="GaussianProcess\GaussianProcessClassificationSolutionCreator.cs" /> 141 <Compile Include="GaussianProcess\GaussianProcessClassificationModelCreator.cs" /> 142 <Compile Include="GaussianProcess\GaussianProcessClassification.cs" /> 143 <Compile Include="GaussianProcess\CovarianceProduct.cs" /> 144 <Compile Include="GaussianProcess\CovarianceScale.cs" /> 145 <Compile Include="GaussianProcess\CovarianceRationalQuadraticArd.cs" /> 146 <Compile Include="GaussianProcess\CovarianceRationalQuadraticIso.cs" /> 147 <Compile Include="GaussianProcess\CovarianceSquaredExponentialArd.cs" /> 148 <Compile Include="GaussianProcess\CovarianceSquaredExponentialIso.cs" /> 149 <Compile Include="GaussianProcess\HyperParameter.cs" /> 150 <Compile Include="GaussianProcess\CovarianceMaternIso.cs" /> 151 <Compile Include="GaussianProcess\CovarianceLinearArd.cs" /> 177 152 <Compile Include="GaussianProcess\CovarianceNoise.cs" /> 178 153 <Compile Include="GaussianProcess\CovarianceConst.cs" /> 179 <Compile Include="GaussianProcess\MeanProd .cs" />154 <Compile Include="GaussianProcess\MeanProduct.cs" /> 180 155 <Compile Include="GaussianProcess\MeanSum.cs" /> 181 <Compile Include="GaussianProcess\CovarianceProd.cs" />182 156 <Compile Include="GaussianProcess\CovarianceSum.cs" /> 183 157 <Compile Include="GaussianProcess\CovariancePeriodic.cs" /> … … 194 168 <Compile Include="GaussianProcess\MeanConst.cs" /> 195 169 <Compile Include="GaussianProcess\IMeanFunction.cs" /> 196 <Compile Include="GaussianProcess\CovarianceSEard.cs" />197 <Compile Include="GaussianProcess\CovarianceSEiso.cs" />198 170 <Compile Include="GaussianProcess\GaussianProcessModel.cs" /> 199 171 <Compile Include="GaussianProcess\GaussianProcessRegression.cs" /> … … 295 267 <Name>HeuristicLab.Problems.DataAnalysis-3.4</Name> 296 268 </ProjectReference> 297 <ProjectReference Include="..\..\HeuristicLab.Random\3.3\HeuristicLab.Random-3.3.csproj">298 <Project>{F4539FB6-4708-40C9-BE64-0A1390AEA197}</Project>299 <Name>HeuristicLab.Random-3.3</Name>300 <Private>False</Private>301 </ProjectReference>302 269 </ItemGroup> 303 270 <ItemGroup> … … 332 299 --> 333 300 <PropertyGroup> 334 <PreBuildEvent >set Path=%25Path%25;$(ProjectDir);$(SolutionDir)301 <PreBuildEvent Condition=" '$(OS)' == 'Windows_NT' ">set Path=%25Path%25;$(ProjectDir);$(SolutionDir) 335 302 set ProjectDir=$(ProjectDir) 336 303 set SolutionDir=$(SolutionDir) … … 339 306 call PreBuildEvent.cmd 340 307 </PreBuildEvent> 308 <PreBuildEvent Condition=" '$(OS)' != 'Windows_NT' "> 309 export ProjectDir=$(ProjectDir) 310 export SolutionDir=$(SolutionDir) 311 312 $SolutionDir/PreBuildEvent.sh 313 </PreBuildEvent> 341 314 </PropertyGroup> 342 315 </Project> -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs
r8477 r8742 29 29 public interface IGaussianProcessModel : IRegressionModel { 30 30 double NegativeLogLikelihood { get; } 31 double SigmaNoise { get; } 31 32 IMeanFunction MeanFunction { get; } 32 33 ICovarianceFunction CovarianceFunction { get; } 33 double[] GetHyperparameterGradients();34 double[] HyperparameterGradients { get; } 34 35 35 36 IEnumerable<double> GetEstimatedVariance(Dataset ds, IEnumerable<int> rows); -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/ISupportVectorMachineModel.cs
r7259 r8742 20 20 #endregion 21 21 22 using HeuristicLab.Optimization;23 22 using HeuristicLab.Problems.DataAnalysis; 24 using HeuristicLab.Core; 25 using System.Collections.Generic; 23 using LibSVM; 26 24 27 25 namespace HeuristicLab.Algorithms.DataAnalysis { … … 30 28 /// </summary> 31 29 public interface ISupportVectorMachineModel : IDataAnalysisModel, IRegressionModel, IClassificationModel { 32 SVM.Model Model { get; }33 SVM.RangeTransform RangeTransform { get; }30 svm_model Model { get; } 31 RangeTransform RangeTransform { get; } 34 32 Dataset SupportVectors { get; } 35 33 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearDiscriminantAnalysis.cs
r8430 r8742 111 111 IClassificationProblemData problemData, 112 112 IEnumerable<int> rows) { 113 return new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter); 113 var model = new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter, new AccuracyMaximizationThresholdCalculator()); 114 model.RecalculateModelParameters(problemData, rows); 115 return model; 114 116 } 115 117 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitClassificationSolution.cs
r7259 r8742 45 45 public MultinomialLogitClassificationSolution(IClassificationProblemData problemData, MultinomialLogitModel logitModel) 46 46 : base(logitModel, problemData) { 47 RecalculateResults();48 47 } 49 48 … … 51 50 return new MultinomialLogitClassificationSolution(this, cloner); 52 51 } 53 54 protected override void RecalculateResults() {55 CalculateResults();56 }57 52 } 58 53 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitModel.cs
r7259 r8742 109 109 110 110 public MultinomialLogitClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 111 return new MultinomialLogitClassificationSolution( problemData, this);111 return new MultinomialLogitClassificationSolution(new ClassificationProblemData(problemData), this); 112 112 } 113 113 IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) { -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Matrix.cs
r8471 r8742 70 70 71 71 public Matrix Transpose() { 72 var result = new Matrix(Transpose(values, Columns, Rows), Columns, Rows); 73 return result; 72 return new Matrix(Transpose(values, Columns, Rows), Columns, Rows); 74 73 } 75 74 … … 125 124 } 126 125 127 public double VectorLength() { 128 return Math.Sqrt(SquaredVectorLength()); 129 } 130 131 public double SquaredVectorLength() { 132 if (Rows != 1) throw new ArgumentException("Length only works on vectors."); 126 public double EuclideanNorm() { 127 return Math.Sqrt(SumOfSquares()); 128 } 129 130 public double SumOfSquares() { 133 131 return values.Sum(x => x * x); 134 132 } … … 137 135 if (Rows != 1 || other.Rows != 1) throw new ArgumentException("OuterProduct can only be applied to vectors."); 138 136 return Transpose().Multiply(other); 137 } 138 139 public IEnumerable<double> ColumnSums() { 140 return Transpose().RowSums(); 141 } 142 143 public IEnumerable<double> RowSums() { 144 var sum = 0.0; 145 int counter = 0; 146 foreach (var v in values) { 147 sum += v; 148 counter++; 149 if (counter == Rows) { 150 yield return sum; 151 sum = 0.0; 152 counter = 0; 153 } 154 } 139 155 } 140 156 -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs
r8471 r8742 35 35 36 36 namespace HeuristicLab.Algorithms.DataAnalysis { 37 internal delegate void Reporter(double quality, double[] coefficients );37 internal delegate void Reporter(double quality, double[] coefficients, double[] gradients); 38 38 /// <summary> 39 39 /// Neighborhood Components Analysis 40 40 /// </summary> 41 [Item("Neighborhood Components Analysis (NCA)", "Implementation of Neighborhood Components Analysis based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520.")] 41 [Item("Neighborhood Components Analysis (NCA)", @"Implementation of Neighborhood Components Analysis 42 based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. 43 Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520 44 with additional regularizations described in Z. Yang, J. Laaksonen. 2007. 45 Regularized Neighborhood Component Analysis. Lecture Notes in Computer Science, 4522. pp. 253-262.")] 42 46 [Creatable("Data Analysis")] 43 47 [StorableClass] … … 59 63 get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; } 60 64 } 65 public IFixedValueParameter<DoubleValue> RegularizationParameter { 66 get { return (IFixedValueParameter<DoubleValue>)Parameters["Regularization"]; } 67 } 61 68 #endregion 62 69 63 70 #region Properties 64 p rivateint K {71 public int K { 65 72 get { return KParameter.Value.Value; } 66 73 set { KParameter.Value.Value = value; } 67 74 } 68 p rivateint Dimensions {75 public int Dimensions { 69 76 get { return DimensionsParameter.Value.Value; } 70 77 set { DimensionsParameter.Value.Value = value; } 71 78 } 72 p rivateint NeighborSamples {79 public int NeighborSamples { 73 80 get { return NeighborSamplesParameter.Value.Value; } 74 81 set { NeighborSamplesParameter.Value.Value = value; } 75 82 } 76 p rivateint Iterations {83 public int Iterations { 77 84 get { return IterationsParameter.Value.Value; } 78 85 set { IterationsParameter.Value.Value = value; } 86 } 87 public double Regularization { 88 get { return RegularizationParameter.Value.Value; } 89 set { RegularizationParameter.Value.Value = value; } 79 90 } 80 91 #endregion … … 85 96 public NcaAlgorithm() 86 97 : base() { 87 Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue( 1)));98 Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(3))); 88 99 Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2))); 89 100 Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate.")); 90 Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(50))); 91 Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(20))); 101 Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(60))); 102 Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50))); 103 Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0))); 92 104 93 105 INCAInitializer defaultInitializer = null; … … 105 117 } 106 118 119 [StorableHook(HookType.AfterDeserialization)] 120 private void AfterDeserialization() { 121 if (!Parameters.ContainsKey("Regularization")) { 122 Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0))); 123 } 124 } 125 107 126 public override void Prepare() { 108 127 if (Problem != null) base.Prepare(); … … 113 132 114 133 var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone(); 115 var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None); 116 Results.Add(new Result("ClassificationSolution", "The classification solution.", model.CreateClassificationSolution(clonedProblem))); 117 } 118 119 public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) { 134 var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Regularization, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None); 135 var solution = model.CreateClassificationSolution(clonedProblem); 136 if (!Results.ContainsKey("ClassificationSolution")) 137 Results.Add(new Result("ClassificationSolution", "The classification solution.", solution)); 138 else Results["ClassificationSolution"].Value = solution; 139 } 140 141 public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) { 120 142 var clonedProblem = (IClassificationProblemData)data.Clone(); 121 var model = Train(clonedProblem, k, dimensions, neighborSamples, iterations, initializer);143 var model = Train(clonedProblem, k, dimensions, neighborSamples, regularization, iterations, initializer); 122 144 return model.CreateClassificationSolution(clonedProblem); 123 145 } 124 146 125 public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {126 return Train(problemData, k, dimensions, neighborSamples, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);127 } 128 129 public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, int iterations, double[,] initalMatrix) {147 public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) { 148 return Train(problemData, k, dimensions, neighborSamples, regularization, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None); 149 } 150 151 public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, double regularization, int iterations, double[,] initalMatrix) { 130 152 var matrix = new double[initalMatrix.Length]; 131 153 for (int i = 0; i < initalMatrix.GetLength(0); i++) 132 154 for (int j = 0; j < initalMatrix.GetLength(1); j++) 133 155 matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j]; 134 return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, iterations, matrix, null, CancellationToken.None);135 } 136 137 private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {156 return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, regularization, iterations, matrix, null, CancellationToken.None); 157 } 158 159 private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) { 138 160 var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices); 139 161 var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling); … … 146 168 alglib.mincgsetcond(state, 0, 0, 0, iterations); 147 169 alglib.mincgsetxrep(state, true); 170 //alglib.mincgsetgradientcheck(state, 0.01); 148 171 int neighborSampleSize = neighborSamples; 149 Optimize(state, scaledData, classes, dimensions, neighborSampleSize, cancellation, reporter);172 Optimize(state, scaledData, classes, dimensions, neighborSampleSize, regularization, cancellation, reporter); 150 173 alglib.mincgresults(state, out matrix, out rep); 174 if (rep.terminationtype == -7) throw new InvalidOperationException("Gradient verification failed."); 151 175 152 176 var transformationMatrix = new double[attributes, dimensions]; … … 159 183 } 160 184 161 private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {185 private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda, CancellationToken cancellation, Reporter reporter) { 162 186 while (alglib.mincgiteration(state)) { 163 187 if (cancellation.IsCancellationRequested) break; 164 188 if (state.needfg) { 165 Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize );189 Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize, lambda); 166 190 continue; 167 191 } 168 192 if (state.innerobj.xupdated) { 169 193 if (reporter != null) 170 reporter(state.innerobj.f, state.innerobj.x );194 reporter(state.innerobj.f, state.innerobj.x, state.innerobj.g); 171 195 continue; 172 196 } … … 175 199 } 176 200 177 private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize ) {201 private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda) { 178 202 var instances = data.GetLength(0); 179 203 var attributes = data.GetLength(1); … … 192 216 } 193 217 var kVector = new Matrix(GetRow(data, k)); 194 transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SquaredVectorLength()); 195 } 196 var sample = transformedDistances.OrderByDescending(x => x.Value).Take(neighborSampleSize).ToArray(); 197 var normalization = sample.Sum(x => x.Value); 198 if (normalization > 0) { 199 foreach (var s in sample) { 200 if (s.Value <= 0) break; 201 alglib.sparseset(probabilities, i, s.Key, s.Value / normalization); 202 } 218 transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SumOfSquares()); 219 } 220 var normalization = transformedDistances.Sum(x => x.Value); 221 if (normalization <= 0) continue; 222 foreach (var s in transformedDistances.Where(x => x.Value > 0).OrderByDescending(x => x.Value).Take(neighborSampleSize)) { 223 alglib.sparseset(probabilities, i, s.Key, s.Value / normalization); 203 224 } 204 225 } … … 224 245 } 225 246 226 func = -pi.Sum() ;247 func = -pi.Sum() + lambda * AMatrix.SumOfSquares(); 227 248 228 249 r = 0; 229 250 var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose(); 230 251 foreach (var g in newGrad) { 231 grad[r++] = g; 232 } 233 } 234 235 private void ReportQuality(double func, double[] coefficients) { 252 grad[r] = g + lambda * 2 * A[r]; 253 r++; 254 } 255 } 256 257 private void ReportQuality(double func, double[] coefficients, double[] gradients) { 236 258 var instances = Problem.ProblemData.TrainingIndices.Count(); 237 259 DataTable qualities; … … 243 265 qualities.Rows["Quality"].Values.Add(-func / instances); 244 266 267 string[] attributNames = Problem.ProblemData.AllowedInputVariables.ToArray(); 268 if (gradients != null) { 269 DataTable grads; 270 if (!Results.ContainsKey("Gradients")) { 271 grads = new DataTable("Gradients"); 272 for (int i = 0; i < gradients.Length; i++) 273 grads.Rows.Add(new DataRow(attributNames[i / Dimensions] + "-" + (i % Dimensions), string.Empty)); 274 Results.Add(new Result("Gradients", grads)); 275 } else grads = (DataTable)Results["Gradients"].Value; 276 for (int i = 0; i < gradients.Length; i++) 277 grads.Rows[attributNames[i / Dimensions] + "-" + (i % Dimensions)].Values.Add(gradients[i]); 278 } 279 245 280 if (!Results.ContainsKey("Quality")) { 246 281 Results.Add(new Result("Quality", new DoubleValue(-func / instances))); 247 282 } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances; 283 284 var attributes = attributNames.Length; 285 var transformationMatrix = new double[attributes, Dimensions]; 286 var counter = 0; 287 for (var i = 0; i < attributes; i++) 288 for (var j = 0; j < Dimensions; j++) 289 transformationMatrix[i, j] = coefficients[counter++]; 290 291 var scaling = new Scaling(Problem.ProblemData.Dataset, attributNames, Problem.ProblemData.TrainingIndices); 292 var model = new NcaModel(K, transformationMatrix, Problem.ProblemData.Dataset, Problem.ProblemData.TrainingIndices, Problem.ProblemData.TargetVariable, attributNames, scaling, Problem.ProblemData.ClassValues.ToArray()); 293 294 IClassificationSolution solution = model.CreateClassificationSolution(Problem.ProblemData); 295 if (!Results.ContainsKey("ClassificationSolution")) { 296 Results.Add(new Result("ClassificationSolution", solution)); 297 } else { 298 Results["ClassificationSolution"].Value = solution; 299 } 248 300 } 249 301 … … 252 304 yield return data[row, i]; 253 305 } 306 254 307 } 255 308 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaClassificationSolution.cs
r8471 r8742 42 42 public NcaClassificationSolution(IClassificationProblemData problemData, INcaModel ncaModel) 43 43 : base(ncaModel, problemData) { 44 RecalculateResults();45 44 } 46 45 … … 48 47 return new NcaClassificationSolution(this, cloner); 49 48 } 50 51 protected override void RecalculateResults() {52 CalculateResults();53 }54 49 } 55 50 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaModel.cs
r8471 r8742 82 82 83 83 public INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 84 return new NcaClassificationSolution( problemData, this);84 return new NcaClassificationSolution(new ClassificationProblemData(problemData), this); 85 85 } 86 86 -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassificationSolution.cs
r7259 r8742 45 45 public NearestNeighbourClassificationSolution(IClassificationProblemData problemData, INearestNeighbourModel nnModel) 46 46 : base(nnModel, problemData) { 47 RecalculateResults();48 47 } 49 48 … … 51 50 return new NearestNeighbourClassificationSolution(this, cloner); 52 51 } 53 54 protected override void RecalculateResults() {55 CalculateResults();56 }57 52 } 58 53 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs
r8477 r8742 202 202 203 203 public INearestNeighbourRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 204 return new NearestNeighbourRegressionSolution( problemData, this);204 return new NearestNeighbourRegressionSolution(new RegressionProblemData(problemData), this); 205 205 } 206 206 IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) { … … 208 208 } 209 209 public INearestNeighbourClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 210 return new NearestNeighbourClassificationSolution( problemData, this);210 return new NearestNeighbourClassificationSolution(new ClassificationProblemData(problemData), this); 211 211 } 212 212 IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) { -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassificationSolution.cs
r7259 r8742 45 45 public NeuralNetworkClassificationSolution(IClassificationProblemData problemData, INeuralNetworkModel nnModel) 46 46 : base(nnModel, problemData) { 47 RecalculateResults();48 47 } 49 48 … … 51 50 return new NeuralNetworkClassificationSolution(this, cloner); 52 51 } 53 protected override void RecalculateResults() { 54 CalculateResults(); 55 } 52 56 53 } 57 54 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassificationSolution.cs
r7259 r8742 45 45 public NeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, INeuralNetworkEnsembleModel nnModel) 46 46 : base(nnModel, problemData) { 47 RecalculateResults();48 47 } 49 48 … … 51 50 return new NeuralNetworkEnsembleClassificationSolution(this, cloner); 52 51 } 53 54 protected override void RecalculateResults() {55 CalculateResults();56 }57 52 } 58 53 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs
r7694 r8742 130 130 131 131 public INeuralNetworkEnsembleRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 132 return new NeuralNetworkEnsembleRegressionSolution( problemData, this);132 return new NeuralNetworkEnsembleRegressionSolution(new RegressionEnsembleProblemData(problemData), this); 133 133 } 134 134 IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) { … … 136 136 } 137 137 public INeuralNetworkEnsembleClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 138 return new NeuralNetworkEnsembleClassificationSolution( problemData, this);138 return new NeuralNetworkEnsembleClassificationSolution(new ClassificationEnsembleProblemData(problemData), this); 139 139 } 140 140 IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) { -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs
r7259 r8742 138 138 139 139 public INeuralNetworkRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 140 return new NeuralNetworkRegressionSolution( problemData, this);140 return new NeuralNetworkRegressionSolution(new RegressionProblemData(problemData), this); 141 141 } 142 142 IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) { … … 144 144 } 145 145 public INeuralNetworkClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 146 return new NeuralNetworkClassificationSolution( problemData, this);146 return new NeuralNetworkClassificationSolution(new ClassificationProblemData(problemData), this); 147 147 } 148 148 IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) { -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Plugin.cs.frame
r8477 r8742 46 46 [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Classification", "3.4")] 47 47 [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Regression", "3.4")] 48 [PluginDependency("HeuristicLab.LibSVM", " 1.6.3")]48 [PluginDependency("HeuristicLab.LibSVM", "3.12")] 49 49 [PluginDependency("HeuristicLab.Random", "3.3")] 50 50 public class HeuristicLabAlgorithmsDataAnalysisPlugin : PluginBase { -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestClassificationSolution.cs
r7259 r8742 45 45 public RandomForestClassificationSolution(IClassificationProblemData problemData, IRandomForestModel randomForestModel) 46 46 : base(randomForestModel, problemData) { 47 RecalculateResults();48 47 } 49 48 … … 51 50 return new RandomForestClassificationSolution(this, cloner); 52 51 } 53 54 protected override void RecalculateResults() {55 CalculateResults();56 }57 52 } 58 53 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs
r7259 r8742 132 132 133 133 public IRandomForestRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 134 return new RandomForestRegressionSolution( problemData, this);134 return new RandomForestRegressionSolution(new RegressionProblemData(problemData), this); 135 135 } 136 136 IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) { … … 138 138 } 139 139 public IRandomForestClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 140 return new RandomForestClassificationSolution( problemData, this);140 return new RandomForestClassificationSolution(new ClassificationProblemData(problemData), this); 141 141 } 142 142 IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) { -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassification.cs
r8430 r8742 30 30 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 31 31 using HeuristicLab.Problems.DataAnalysis; 32 using LibSVM; 32 33 33 34 namespace HeuristicLab.Algorithms.DataAnalysis { … … 44 45 private const string NuParameterName = "Nu"; 45 46 private const string GammaParameterName = "Gamma"; 47 private const string DegreeParameterName = "Degree"; 46 48 47 49 #region parameter properties … … 60 62 public IValueParameter<DoubleValue> GammaParameter { 61 63 get { return (IValueParameter<DoubleValue>)Parameters[GammaParameterName]; } 64 } 65 public IValueParameter<IntValue> DegreeParameter { 66 get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; } 62 67 } 63 68 #endregion … … 79 84 public DoubleValue Gamma { 80 85 get { return GammaParameter.Value; } 86 } 87 public IntValue Degree { 88 get { return DegreeParameter.Value; } 81 89 } 82 90 #endregion … … 103 111 Parameters.Add(new ValueParameter<DoubleValue>(CostParameterName, "The value of the C (cost) parameter of C-SVC.", new DoubleValue(1.0))); 104 112 Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0))); 113 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 105 114 } 106 115 [StorableHook(HookType.AfterDeserialization)] 107 private void AfterDeserialization() { } 116 private void AfterDeserialization() { 117 #region backwards compatibility (change with 3.4) 118 if (!Parameters.ContainsKey(DegreeParameterName)) 119 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 120 #endregion 121 } 108 122 109 123 public override IDeepCloneable Clone(Cloner cloner) { … … 118 132 int nSv; 119 133 var solution = CreateSupportVectorClassificationSolution(problemData, selectedInputVariables, 120 SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, 134 SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Degree.Value, 121 135 out trainingAccuracy, out testAccuracy, out nSv); 122 136 123 137 Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", solution)); 124 138 Results.Add(new Result("Training accuracy", "The accuracy of the SVR solution on the training partition.", new DoubleValue(trainingAccuracy))); 125 Results.Add(new Result("Test R²", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy)));139 Results.Add(new Result("Test accuracy", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy))); 126 140 Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv))); 127 141 } 128 142 129 143 public static SupportVectorClassificationSolution CreateSupportVectorClassificationSolution(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables, 130 string svmType, string kernelType, double cost, double nu, double gamma, 144 string svmType, string kernelType, double cost, double nu, double gamma, int degree, 131 145 out double trainingAccuracy, out double testAccuracy, out int nSv) { 132 146 Dataset dataset = problemData.Dataset; … … 135 149 136 150 //extract SVM parameters from scope and set them 137 SVM.Parameter parameter = new SVM.Parameter();138 parameter. SvmType = (SVM.SvmType)Enum.Parse(typeof(SVM.SvmType), svmType, true);139 parameter. KernelType = (SVM.KernelType)Enum.Parse(typeof(SVM.KernelType), kernelType, true);151 svm_parameter parameter = new svm_parameter(); 152 parameter.svm_type = GetSvmType(svmType); 153 parameter.kernel_type = GetKernelType(kernelType); 140 154 parameter.C = cost; 141 parameter.Nu = nu; 142 parameter.Gamma = gamma; 143 parameter.CacheSize = 500; 144 parameter.Probability = false; 145 155 parameter.nu = nu; 156 parameter.gamma = gamma; 157 parameter.cache_size = 500; 158 parameter.probability = 0; 159 parameter.eps = 0.001; 160 parameter.degree = degree; 161 parameter.shrinking = 1; 162 parameter.coef0 = 0; 163 164 165 var weightLabels = new List<int>(); 166 var weights = new List<double>(); 146 167 foreach (double c in problemData.ClassValues) { 147 168 double wSum = 0.0; … … 151 172 } 152 173 } 153 parameter.Weights.Add((int)c, wSum); 174 weightLabels.Add((int)c); 175 weights.Add(wSum); 154 176 } 155 156 157 SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); 158 SVM.RangeTransform rangeTransform = SVM.RangeTransform.Compute(problem); 159 SVM.Problem scaledProblem = SVM.Scaling.Scale(rangeTransform, problem); 160 var svmModel = SVM.Training.Train(scaledProblem, parameter); 177 parameter.weight_label = weightLabels.ToArray(); 178 parameter.weight = weights.ToArray(); 179 180 181 svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); 182 RangeTransform rangeTransform = RangeTransform.Compute(problem); 183 svm_problem scaledProblem = rangeTransform.Scale(problem); 184 var svmModel = svm.svm_train(scaledProblem, parameter); 161 185 var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues); 162 186 var solution = new SupportVectorClassificationSolution(model, (IClassificationProblemData)problemData.Clone()); 163 187 164 nSv = svmModel.S upportVectorCount;188 nSv = svmModel.SV.Length; 165 189 trainingAccuracy = solution.TrainingAccuracy; 166 190 testAccuracy = solution.TestAccuracy; 167 191 168 192 return solution; 193 } 194 195 private static int GetSvmType(string svmType) { 196 if (svmType == "NU_SVC") return svm_parameter.NU_SVC; 197 if (svmType == "C_SVC") return svm_parameter.C_SVC; 198 throw new ArgumentException("Unknown SVM type"); 199 } 200 201 private static int GetKernelType(string kernelType) { 202 if (kernelType == "LINEAR") return svm_parameter.LINEAR; 203 if (kernelType == "POLY") return svm_parameter.POLY; 204 if (kernelType == "SIGMOID") return svm_parameter.SIGMOID; 205 if (kernelType == "RBF") return svm_parameter.RBF; 206 throw new ArgumentException("Unknown kernel type"); 169 207 } 170 208 #endregion -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassificationSolution.cs
r7259 r8742 45 45 public SupportVectorClassificationSolution(SupportVectorMachineModel model, IClassificationProblemData problemData) 46 46 : base(model, problemData) { 47 RecalculateResults();48 47 } 49 48 … … 51 50 return new SupportVectorClassificationSolution(this, cloner); 52 51 } 53 54 protected override void RecalculateResults() {55 CalculateResults();56 }57 52 } 58 53 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineModel.cs
r8430 r8742 29 29 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 30 30 using HeuristicLab.Problems.DataAnalysis; 31 using SVM;31 using LibSVM; 32 32 33 33 namespace HeuristicLab.Algorithms.DataAnalysis { … … 39 39 public sealed class SupportVectorMachineModel : NamedItem, ISupportVectorMachineModel { 40 40 41 private SVM.Model model;41 private svm_model model; 42 42 /// <summary> 43 43 /// Gets or sets the SVM model. 44 44 /// </summary> 45 public SVM.Model Model {45 public svm_model Model { 46 46 get { return model; } 47 47 set { … … 57 57 /// Gets or sets the range transformation for the model. 58 58 /// </summary> 59 private SVM.RangeTransform rangeTransform;60 public SVM.RangeTransform RangeTransform {59 private RangeTransform rangeTransform; 60 public RangeTransform RangeTransform { 61 61 get { return rangeTransform; } 62 62 set { … … 71 71 public Dataset SupportVectors { 72 72 get { 73 var data = new double[Model. SupportVectorCount, allowedInputVariables.Count()];74 for (int i = 0; i < Model. SupportVectorCount; i++) {75 var sv = Model.S upportVectors[i];73 var data = new double[Model.sv_coef.Length, allowedInputVariables.Count()]; 74 for (int i = 0; i < Model.sv_coef.Length; i++) { 75 var sv = Model.SV[i]; 76 76 for (int j = 0; j < sv.Length; j++) { 77 data[i, j] = sv[j]. Value;77 data[i, j] = sv[j].value; 78 78 } 79 79 } … … 101 101 this.classValues = (double[])original.classValues.Clone(); 102 102 } 103 public SupportVectorMachineModel( SVM.Model model, SVM.RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<double> classValues)103 public SupportVectorMachineModel(svm_model model, RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<double> classValues) 104 104 : this(model, rangeTransform, targetVariable, allowedInputVariables) { 105 105 this.classValues = classValues.ToArray(); 106 106 } 107 public SupportVectorMachineModel( SVM.Model model, SVM.RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables)107 public SupportVectorMachineModel(svm_model model, RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables) 108 108 : base() { 109 109 this.name = ItemName; … … 124 124 } 125 125 public SupportVectorRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 126 return new SupportVectorRegressionSolution(this, problemData);126 return new SupportVectorRegressionSolution(this, new RegressionProblemData(problemData)); 127 127 } 128 128 IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) { … … 153 153 154 154 public SupportVectorClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 155 return new SupportVectorClassificationSolution(this, problemData);155 return new SupportVectorClassificationSolution(this, new ClassificationProblemData(problemData)); 156 156 } 157 157 IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) { … … 161 161 private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) { 162 162 // calculate predictions for the currently requested rows 163 SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);164 SVM.Problem scaledProblem = SVM.Scaling.Scale(RangeTransform,problem);165 166 for (int i = 0; i < scaledProblem.Count; i++) {167 yield return SVM.Prediction.Predict(Model, scaledProblem.X[i]);163 svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); 164 svm_problem scaledProblem = rangeTransform.Scale(problem); 165 166 for (int i = 0; i < problem.l; i++) { 167 yield return svm.svm_predict(Model, scaledProblem.x[i]); 168 168 } 169 169 } … … 183 183 get { 184 184 using (MemoryStream stream = new MemoryStream()) { 185 SVM.Model.Write(stream, Model);185 svm.svm_save_model(new StreamWriter(stream), Model); 186 186 stream.Seek(0, System.IO.SeekOrigin.Begin); 187 187 StreamReader reader = new StreamReader(stream); … … 191 191 set { 192 192 using (MemoryStream stream = new MemoryStream(Encoding.ASCII.GetBytes(value))) { 193 model = SVM.Model.Read(stream);193 model = svm.svm_load_model(new StreamReader(stream)); 194 194 } 195 195 } … … 199 199 get { 200 200 using (MemoryStream stream = new MemoryStream()) { 201 SVM.RangeTransform.Write(stream, RangeTransform);201 RangeTransform.Write(stream, RangeTransform); 202 202 stream.Seek(0, System.IO.SeekOrigin.Begin); 203 203 StreamReader reader = new StreamReader(stream); … … 207 207 set { 208 208 using (MemoryStream stream = new MemoryStream(Encoding.ASCII.GetBytes(value))) { 209 RangeTransform = SVM.RangeTransform.Read(stream);209 RangeTransform = RangeTransform.Read(stream); 210 210 } 211 211 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineUtil.cs
r7259 r8742 23 23 using System.Linq; 24 24 using HeuristicLab.Problems.DataAnalysis; 25 using LibSVM; 25 26 26 27 namespace HeuristicLab.Algorithms.DataAnalysis { … … 32 33 /// <param name="rowIndices">The rows of the dataset that should be contained in the resulting SVM-problem</param> 33 34 /// <returns>A problem data type that can be used to train a support vector machine.</returns> 34 public static SVM.Problem CreateSvmProblem(Dataset dataset, string targetVariable, IEnumerable<string> inputVariables, IEnumerable<int> rowIndices) {35 public static svm_problem CreateSvmProblem(Dataset dataset, string targetVariable, IEnumerable<string> inputVariables, IEnumerable<int> rowIndices) { 35 36 double[] targetVector = 36 37 dataset.GetDoubleValues(targetVariable, rowIndices).ToArray(); 37 38 38 SVM.Node[][] nodes = new SVM.Node[targetVector.Length][];39 List< SVM.Node> tempRow;39 svm_node[][] nodes = new svm_node[targetVector.Length][]; 40 List<svm_node> tempRow; 40 41 int maxNodeIndex = 0; 41 42 int svmProblemRowIndex = 0; 42 43 List<string> inputVariablesList = inputVariables.ToList(); 43 44 foreach (int row in rowIndices) { 44 tempRow = new List< SVM.Node>();45 tempRow = new List<svm_node>(); 45 46 int colIndex = 1; // make sure the smallest node index for SVM = 1 46 47 foreach (var inputVariable in inputVariablesList) { … … 49 50 // => don't add NaN values in the dataset to the sparse SVM matrix representation 50 51 if (!double.IsNaN(value)) { 51 tempRow.Add(new SVM.Node(colIndex, value)); // nodes must be sorted in ascending ordered by column index52 tempRow.Add(new svm_node() { index = colIndex, value = value }); // nodes must be sorted in ascending ordered by column index 52 53 if (colIndex > maxNodeIndex) maxNodeIndex = colIndex; 53 54 } … … 57 58 } 58 59 59 return new SVM.Problem(targetVector.Length, targetVector, nodes, maxNodeIndex);60 return new svm_problem() { l = targetVector.Length, y = targetVector, x = nodes }; 60 61 } 61 62 } -
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorRegression.cs
r8430 r8742 30 30 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 31 31 using HeuristicLab.Problems.DataAnalysis; 32 using LibSVM; 32 33 33 34 namespace HeuristicLab.Algorithms.DataAnalysis { … … 45 46 private const string GammaParameterName = "Gamma"; 46 47 private const string EpsilonParameterName = "Epsilon"; 48 private const string DegreeParameterName = "Degree"; 47 49 48 50 #region parameter properties … … 64 66 public IValueParameter<DoubleValue> EpsilonParameter { 65 67 get { return (IValueParameter<DoubleValue>)Parameters[EpsilonParameterName]; } 68 } 69 public IValueParameter<IntValue> DegreeParameter { 70 get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; } 66 71 } 67 72 #endregion … … 86 91 public DoubleValue Epsilon { 87 92 get { return EpsilonParameter.Value; } 93 } 94 public IntValue Degree { 95 get { return DegreeParameter.Value; } 88 96 } 89 97 #endregion … … 111 119 Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0))); 112 120 Parameters.Add(new ValueParameter<DoubleValue>(EpsilonParameterName, "The value of the epsilon parameter for epsilon-SVR.", new DoubleValue(0.1))); 121 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 113 122 } 114 123 [StorableHook(HookType.AfterDeserialization)] 115 private void AfterDeserialization() { } 124 private void AfterDeserialization() { 125 #region backwards compatibility (change with 3.4) 126 if (!Parameters.ContainsKey(DegreeParameterName)) 127 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 128 #endregion 129 } 116 130 117 131 public override IDeepCloneable Clone(Cloner cloner) { … … 126 140 int nSv; 127 141 var solution = CreateSupportVectorRegressionSolution(problemData, selectedInputVariables, SvmType.Value, 128 KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, 142 KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value, 129 143 out trainR2, out testR2, out nSv); 130 144 … … 136 150 137 151 public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables, 138 string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, 152 string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree, 139 153 out double trainingR2, out double testR2, out int nSv) { 140 154 Dataset dataset = problemData.Dataset; … … 143 157 144 158 //extract SVM parameters from scope and set them 145 SVM.Parameter parameter = new SVM.Parameter();146 parameter. SvmType = (SVM.SvmType)Enum.Parse(typeof(SVM.SvmType), svmType, true);147 parameter. KernelType = (SVM.KernelType)Enum.Parse(typeof(SVM.KernelType), kernelType, true);159 svm_parameter parameter = new svm_parameter(); 160 parameter.svm_type = GetSvmType(svmType); 161 parameter.kernel_type = GetKernelType(kernelType); 148 162 parameter.C = cost; 149 parameter.Nu = nu; 150 parameter.Gamma = gamma; 151 parameter.P = epsilon; 152 parameter.CacheSize = 500; 153 parameter.Probability = false; 154 155 156 SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); 157 SVM.RangeTransform rangeTransform = SVM.RangeTransform.Compute(problem); 158 SVM.Problem scaledProblem = SVM.Scaling.Scale(rangeTransform, problem); 159 var svmModel = SVM.Training.Train(scaledProblem, parameter); 160 nSv = svmModel.SupportVectorCount; 163 parameter.nu = nu; 164 parameter.gamma = gamma; 165 parameter.p = epsilon; 166 parameter.cache_size = 500; 167 parameter.probability = 0; 168 parameter.eps = 0.001; 169 parameter.degree = degree; 170 parameter.shrinking = 1; 171 parameter.coef0 = 0; 172 173 174 175 svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); 176 RangeTransform rangeTransform = RangeTransform.Compute(problem); 177 svm_problem scaledProblem = rangeTransform.Scale(problem); 178 var svmModel = svm.svm_train(scaledProblem, parameter); 179 nSv = svmModel.SV.Length; 161 180 var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables); 162 181 var solution = new SupportVectorRegressionSolution(model, (IRegressionProblemData)problemData.Clone()); … … 165 184 return solution; 166 185 } 186 187 private static int GetSvmType(string svmType) { 188 if (svmType == "NU_SVR") return svm_parameter.NU_SVR; 189 if (svmType == "EPSILON_SVR") return svm_parameter.EPSILON_SVR; 190 throw new ArgumentException("Unknown SVM type"); 191 } 192 193 private static int GetKernelType(string kernelType) { 194 if (kernelType == "LINEAR") return svm_parameter.LINEAR; 195 if (kernelType == "POLY") return svm_parameter.POLY; 196 if (kernelType == "SIGMOID") return svm_parameter.SIGMOID; 197 if (kernelType == "RBF") return svm_parameter.RBF; 198 throw new ArgumentException("Unknown kernel type"); 199 } 167 200 #endregion 168 201 }
Note: See TracChangeset
for help on using the changeset viewer.