Changeset 10553 for branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess
- Timestamp:
- 03/05/14 17:30:38 (11 years ago)
- Location:
- branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess
- Files:
-
- 27 edited
- 5 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceConst.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 22 22 using System; 23 23 using System.Collections.Generic; 24 using System.Linq; 24 25 using HeuristicLab.Common; 25 26 using HeuristicLab.Core; … … 36 37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 37 38 } 38 39 private bool HasFixedScaleParameter { 40 get { return ScaleParameter.Value != null; } 41 } 39 42 [StorableConstructor] 40 43 private CovarianceConst(bool deserializing) … … 59 62 60 63 public int GetNumberOfParameters(int numberOfVariables) { 61 return ScaleParameter.Value != null? 0 : 1;64 return HasFixedScaleParameter ? 0 : 1; 62 65 } 63 66 … … 71 74 int c = 0; 72 75 // gather parameter values 73 if ( ScaleParameter.Value != null) {76 if (HasFixedScaleParameter) { 74 77 scale = ScaleParameter.Value.Value; 75 78 } else { … … 87 90 cov.Covariance = (x, i, j) => scale; 88 91 cov.CrossCovariance = (x, xt, i, j) => scale; 89 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, columnIndices); 92 if (HasFixedScaleParameter) { 93 cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>(); 94 } else { 95 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, columnIndices); 96 } 90 97 return cov; 91 98 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinear.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinearArd.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 37 37 get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; } 38 38 } 39 private bool HasFixedInverseLengthParameter { 40 get { return InverseLengthParameter.Value != null; } 41 } 39 42 40 43 [StorableConstructor] … … 57 60 58 61 public int GetNumberOfParameters(int numberOfVariables) { 59 if (InverseLengthParameter.Value == null) 62 if (HasFixedInverseLengthParameter) 63 return 0; 64 else 60 65 return numberOfVariables; 61 else62 return 0;63 66 } 64 67 … … 71 74 private void GetParameterValues(double[] p, out double[] inverseLength) { 72 75 // gather parameter values 73 if ( InverseLengthParameter.Value != null) {76 if (HasFixedInverseLengthParameter) { 74 77 inverseLength = InverseLengthParameter.Value.ToArray(); 75 78 } else { … … 81 84 double[] inverseLength; 82 85 GetParameterValues(p, out inverseLength); 86 var fixedInverseLength = HasFixedInverseLengthParameter; 83 87 // create functions 84 88 var cov = new ParameterizedCovarianceFunction(); 85 89 cov.Covariance = (x, i, j) => Util.ScalarProd(x, i, j, inverseLength, columnIndices); 86 90 cov.CrossCovariance = (x, xt, i, j) => Util.ScalarProd(x, i, xt, j, inverseLength, columnIndices); 87 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, inverseLength, columnIndices); 91 if (fixedInverseLength) 92 cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>(); 93 else 94 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, inverseLength, columnIndices); 88 95 return cov; 89 96 } 90 97 91 98 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double[] inverseLength, IEnumerable<int> columnIndices) { 92 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));93 94 99 int k = 0; 95 100 foreach (int columnIndex in columnIndices) { -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMask.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 74 74 75 75 public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) { 76 if (columnIndices != null)77 throw new InvalidOperationException("Stacking of masking covariance functions is not supported.");78 76 var cov = CovarianceFunctionParameter.Value; 79 77 var selectedDimensions = SelectedDimensionsParameter.Value; 80 78 81 return cov.GetParameterizedCovarianceFunction(p, selectedDimensions );79 return cov.GetParameterizedCovarianceFunction(p, selectedDimensions.Intersect(columnIndices)); 82 80 } 83 81 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMaternIso.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 45 45 get { return (IConstrainedValueParameter<IntValue>)Parameters["D"]; } 46 46 } 47 47 private bool HasFixedScaleParameter { 48 get { return ScaleParameter.Value != null; } 49 } 50 private bool HasFixedInverseLengthParameter { 51 get { return InverseLengthParameter.Value != null; } 52 } 48 53 49 54 [StorableConstructor] … … 76 81 public int GetNumberOfParameters(int numberOfVariables) { 77 82 return 78 ( InverseLengthParameter.Value != null? 0 : 1) +79 ( ScaleParameter.Value != null? 0 : 1);83 (HasFixedInverseLengthParameter ? 0 : 1) + 84 (HasFixedScaleParameter ? 0 : 1); 80 85 } 81 86 … … 90 95 // gather parameter values 91 96 int c = 0; 92 if ( InverseLengthParameter.Value != null) {97 if (HasFixedInverseLengthParameter) { 93 98 inverseLength = InverseLengthParameter.Value.Value; 94 99 } else { … … 97 102 } 98 103 99 if ( ScaleParameter.Value != null) {104 if (HasFixedScaleParameter) { 100 105 scale = ScaleParameter.Value.Value; 101 106 } else { … … 110 115 int d = DParameter.Value.Value; 111 116 GetParameterValues(p, out scale, out inverseLength); 117 var fixedInverseLength = HasFixedInverseLengthParameter; 118 var fixedScale = HasFixedScaleParameter; 112 119 // create functions 113 120 var cov = new ParameterizedCovarianceFunction(); … … 122 129 return scale * m(d, dist); 123 130 }; 124 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, d, scale, inverseLength, columnIndices );131 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, d, scale, inverseLength, columnIndices, fixedInverseLength, fixedScale); 125 132 return cov; 126 133 } … … 149 156 150 157 151 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, int d, double scale, double inverseLength, IEnumerable<int> columnIndices) { 158 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, int d, double scale, double inverseLength, IEnumerable<int> columnIndices, 159 bool fixedInverseLength, bool fixedScale) { 152 160 double dist = i == j 153 161 ? 0.0 154 162 : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices)); 155 163 156 yield return scale * dm(d, dist);157 yield return 2 * scale * m(d, dist);164 if (!fixedInverseLength) yield return scale * dm(d, dist); 165 if (!fixedScale) yield return 2 * scale * m(d, dist); 158 166 } 159 167 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceNoise.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 37 37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 38 38 } 39 private bool HasFixedScaleParameter { 40 get { return ScaleParameter.Value != null; } 41 } 39 42 40 43 [StorableConstructor] … … 60 63 61 64 public int GetNumberOfParameters(int numberOfVariables) { 62 return ScaleParameter.Value != null? 0 : 1;65 return HasFixedScaleParameter ? 0 : 1; 63 66 } 64 67 … … 72 75 int c = 0; 73 76 // gather parameter values 74 if ( ScaleParameter.Value != null) {77 if (HasFixedScaleParameter) { 75 78 scale = ScaleParameter.Value.Value; 76 79 } else { … … 84 87 double scale; 85 88 GetParameterValues(p, out scale); 89 var fixedScale = HasFixedScaleParameter; 86 90 // create functions 87 91 var cov = new ParameterizedCovarianceFunction(); 88 92 cov.Covariance = (x, i, j) => i == j ? scale : 0.0; 89 cov.CrossCovariance = (x, xt, i, j) => 0.0; 90 cov.CovarianceGradient = (x, i, j) => Enumerable.Repeat(i == j ? 2.0 * scale : 0.0, 1); 93 cov.CrossCovariance = (x, xt, i, j) => Util.SqrDist(x, i, xt, j, 1.0, columnIndices) < 1e-9 ? scale : 0.0; 94 if (fixedScale) 95 cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>(); 96 else 97 cov.CovarianceGradient = (x, i, j) => Enumerable.Repeat(i == j ? 2.0 * scale : 0.0, 1); 91 98 return cov; 92 99 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovariancePeriodic.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 46 46 } 47 47 48 private bool HasFixedScaleParameter { 49 get { return ScaleParameter.Value != null; } 50 } 51 private bool HasFixedInverseLengthParameter { 52 get { return InverseLengthParameter.Value != null; } 53 } 54 private bool HasFixedPeriodParameter { 55 get { return PeriodParameter.Value != null; } 56 } 57 48 58 49 59 [StorableConstructor] … … 68 78 69 79 public int GetNumberOfParameters(int numberOfVariables) { 70 return ( ScaleParameter.Value != null? 0 : 1) +71 ( PeriodParameter.Value != null? 0 : 1) +72 ( InverseLengthParameter.Value != null? 0 : 1);80 return (HasFixedScaleParameter ? 0 : 1) + 81 (HasFixedPeriodParameter ? 0 : 1) + 82 (HasFixedInverseLengthParameter ? 0 : 1); 73 83 } 74 84 … … 82 92 83 93 84 private void GetParameterValues(double[] p, out double scale, out double period, out double inverseLength) { 94 private void GetParameterValues(double[] 95 p, out double scale, out double period, out double inverseLength) { 85 96 // gather parameter values 86 97 int c = 0; 87 if ( InverseLengthParameter.Value != null) {98 if (HasFixedInverseLengthParameter) { 88 99 inverseLength = InverseLengthParameter.Value.Value; 89 100 } else { … … 91 102 c++; 92 103 } 93 if ( PeriodParameter.Value != null) {104 if (HasFixedPeriodParameter) { 94 105 period = PeriodParameter.Value.Value; 95 106 } else { … … 97 108 c++; 98 109 } 99 if ( ScaleParameter.Value != null) {110 if (HasFixedScaleParameter) { 100 111 scale = ScaleParameter.Value.Value; 101 112 } else { … … 109 120 double inverseLength, period, scale; 110 121 GetParameterValues(p, out scale, out period, out inverseLength); 122 var fixedInverseLength = HasFixedInverseLengthParameter; 123 var fixedPeriod = HasFixedPeriodParameter; 124 var fixedScale = HasFixedScaleParameter; 111 125 // create functions 112 126 var cov = new ParameterizedCovarianceFunction(); … … 127 141 return scale * Math.Exp(-2.0 * k); 128 142 }; 129 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, period, inverseLength );143 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, period, inverseLength, fixedInverseLength, fixedPeriod, fixedScale); 130 144 return cov; 131 145 } 132 146 133 147 134 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double period, double inverseLength) { 135 double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period; 136 double gradient = Math.Sin(v) * inverseLength; 148 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double period, double inverseLength, 149 bool fixedInverseLength, bool fixedPeriod, bool fixedScale) { 150 double k = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period; 151 double gradient = Math.Sin(k) * inverseLength; 137 152 gradient *= gradient; 138 yield return 4.0 * scale * Math.Exp(-2.0 * gradient) * gradient; 139 double r = Math.Sin(v) * inverseLength; 140 yield return 4.0 * scale * inverseLength * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v; 141 yield return 2.0 * scale * Math.Exp(-2 * gradient); 153 if (!fixedInverseLength) yield return 4.0 * scale * Math.Exp(-2.0 * gradient) * gradient; 154 if (!fixedPeriod) { 155 double r = Math.Sin(k) * inverseLength; 156 yield return 2.0 * k * scale * Math.Exp(-2 * r * r) * Math.Sin(2 * k) * inverseLength * inverseLength; 157 } 158 if (!fixedScale) 159 yield return 2.0 * scale * Math.Exp(-2 * gradient); 160 142 161 } 143 162 -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceProduct.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticArd.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 45 45 get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; } 46 46 } 47 private bool HasFixedScaleParameter { 48 get { return ScaleParameter.Value != null; } 49 } 50 private bool HasFixedInverseLengthParameter { 51 get { return InverseLengthParameter.Value != null; } 52 } 53 private bool HasFixedShapeParameter { 54 get { return ShapeParameter.Value != null; } 55 } 47 56 48 57 [StorableConstructor] … … 71 80 public int GetNumberOfParameters(int numberOfVariables) { 72 81 return 73 ( ScaleParameter.Value != null? 0 : 1) +74 ( ShapeParameter.Value != null? 0 : 1) +75 ( InverseLengthParameter.Value != null? 0 : numberOfVariables);82 (HasFixedScaleParameter ? 0 : 1) + 83 (HasFixedShapeParameter ? 0 : 1) + 84 (HasFixedInverseLengthParameter ? 0 : numberOfVariables); 76 85 } 77 86 … … 88 97 int c = 0; 89 98 // gather parameter values 90 if (ScaleParameter.Value != null) { 99 if (HasFixedInverseLengthParameter) { 100 inverseLength = InverseLengthParameter.Value.ToArray(); 101 } else { 102 int length = p.Length; 103 if (!HasFixedScaleParameter) length--; 104 if (!HasFixedShapeParameter) length--; 105 inverseLength = p.Select(e => 1.0 / Math.Exp(e)).Take(length).ToArray(); 106 c += inverseLength.Length; 107 } 108 if (HasFixedScaleParameter) { 91 109 scale = ScaleParameter.Value.Value; 92 110 } else { … … 94 112 c++; 95 113 } 96 if ( ShapeParameter.Value != null) {114 if (HasFixedShapeParameter) { 97 115 shape = ShapeParameter.Value.Value; 98 116 } else { 99 117 shape = Math.Exp(p[c]); 100 118 c++; 101 }102 if (InverseLengthParameter.Value != null) {103 inverseLength = InverseLengthParameter.Value.ToArray();104 } else {105 inverseLength = p.Skip(2).Select(e => 1.0 / Math.Exp(e)).ToArray();106 c += inverseLength.Length;107 119 } 108 120 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticArd", "p"); … … 113 125 double[] inverseLength; 114 126 GetParameterValues(p, out scale, out shape, out inverseLength); 127 var fixedInverseLength = HasFixedInverseLengthParameter; 128 var fixedScale = HasFixedScaleParameter; 129 var fixedShape = HasFixedShapeParameter; 115 130 // create functions 116 131 var cov = new ParameterizedCovarianceFunction(); … … 125 140 return scale * Math.Pow(1 + 0.5 * d / shape, -shape); 126 141 }; 127 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength );142 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength, fixedInverseLength, fixedScale, fixedShape); 128 143 return cov; 129 144 } 130 145 131 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double[] inverseLength ) {132 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));146 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double[] inverseLength, 147 bool fixedInverseLength, bool fixedScale, bool fixedShape) { 133 148 double d = i == j 134 149 ? 0.0 … … 136 151 double b = 1 + 0.5 * d / shape; 137 152 int k = 0; 138 foreach (var columnIndex in columnIndices) { 139 yield return scale * Math.Pow(b, -shape - 1) * Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]); 140 k++; 153 if (!fixedInverseLength) { 154 foreach (var columnIndex in columnIndices) { 155 yield return 156 scale * Math.Pow(b, -shape - 1) * 157 Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]); 158 k++; 159 } 141 160 } 142 yield return 2 * scale * Math.Pow(b, -shape);143 yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));161 if (!fixedScale) yield return 2 * scale * Math.Pow(b, -shape); 162 if (!fixedShape) yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b)); 144 163 } 145 164 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticIso.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 44 44 get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; } 45 45 } 46 47 private bool HasFixedScaleParameter { 48 get { return ScaleParameter.Value != null; } 49 } 50 private bool HasFixedInverseLengthParameter { 51 get { return InverseLengthParameter.Value != null; } 52 } 53 private bool HasFixedShapeParameter { 54 get { return ShapeParameter.Value != null; } 55 } 56 57 46 58 [StorableConstructor] 47 59 private CovarianceRationalQuadraticIso(bool deserializing) … … 68 80 69 81 public int GetNumberOfParameters(int numberOfVariables) { 70 return ( ScaleParameter.Value != null? 0 : 1) +71 ( ShapeParameter.Value != null? 0 : 1) +72 ( InverseLengthParameter.Value != null? 0 : 1);82 return (HasFixedScaleParameter ? 0 : 1) + 83 (HasFixedShapeParameter ? 0 : 1) + 84 (HasFixedInverseLengthParameter ? 0 : 1); 73 85 } 74 86 … … 84 96 int c = 0; 85 97 // gather parameter values 86 if (ScaleParameter.Value != null) { 98 if (HasFixedInverseLengthParameter) { 99 inverseLength = InverseLengthParameter.Value.Value; 100 } else { 101 inverseLength = 1.0 / Math.Exp(p[c]); 102 c++; 103 } 104 if (HasFixedScaleParameter) { 87 105 scale = ScaleParameter.Value.Value; 88 106 } else { … … 90 108 c++; 91 109 } 92 if ( ShapeParameter.Value != null) {110 if (HasFixedShapeParameter) { 93 111 shape = ShapeParameter.Value.Value; 94 112 } else { 95 113 shape = Math.Exp(p[c]); 96 c++;97 }98 if (InverseLengthParameter.Value != null) {99 inverseLength = InverseLengthParameter.Value.Value;100 } else {101 inverseLength = 1.0 / Math.Exp(p[c]);102 114 c++; 103 115 } … … 108 120 double scale, shape, inverseLength; 109 121 GetParameterValues(p, out scale, out shape, out inverseLength); 122 var fixedInverseLength = HasFixedInverseLengthParameter; 123 var fixedScale = HasFixedScaleParameter; 124 var fixedShape = HasFixedShapeParameter; 110 125 // create functions 111 126 var cov = new ParameterizedCovarianceFunction(); … … 114 129 ? 0.0 115 130 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 116 return s hape * Math.Pow(1 + 0.5 * d / shape, -shape);131 return scale * Math.Pow(1 + 0.5 * d / shape, -shape); 117 132 }; 118 133 cov.CrossCovariance = (x, xt, i, j) => { … … 120 135 return scale * Math.Pow(1 + 0.5 * d / shape, -shape); 121 136 }; 122 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength );137 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength, fixedInverseLength, fixedScale, fixedShape); 123 138 return cov; 124 139 } 125 140 126 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double inverseLength) { 141 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double inverseLength, 142 bool fixedInverseLength, bool fixedScale, bool fixedShape) { 127 143 double d = i == j 128 144 ? 0.0 … … 130 146 131 147 double b = 1 + 0.5 * d / shape; 132 yield return scale * Math.Pow(b, -shape - 1) * d;133 yield return 2 * scale * Math.Pow(b, -shape);134 yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));148 if (!fixedInverseLength) yield return scale * Math.Pow(b, -shape - 1) * d; 149 if (!fixedScale) yield return 2 * scale * Math.Pow(b, -shape); 150 if (!fixedShape) yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b)); 135 151 } 136 152 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceScale.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 37 37 get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; } 38 38 } 39 private bool HasFixedScaleParameter { 40 get { return ScaleParameter.Value != null; } 41 } 39 42 40 43 public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter { … … 65 68 66 69 public int GetNumberOfParameters(int numberOfVariables) { 67 return ( ScaleParameter.Value != null? 0 : 1) + CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables);70 return (HasFixedScaleParameter ? 0 : 1) + CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables); 68 71 } 69 72 … … 77 80 private void GetParameterValues(double[] p, out double scale) { 78 81 // gather parameter values 79 if ( ScaleParameter.Value != null) {82 if (HasFixedScaleParameter) { 80 83 scale = ScaleParameter.Value.Value; 81 84 } else { … … 87 90 double scale; 88 91 GetParameterValues(p, out scale); 92 var fixedScale = HasFixedScaleParameter; 89 93 var subCov = CovarianceFunctionParameter.Value.GetParameterizedCovarianceFunction(p.Skip(1).ToArray(), columnIndices); 90 94 // create functions … … 92 96 cov.Covariance = (x, i, j) => scale * subCov.Covariance(x, i, j); 93 97 cov.CrossCovariance = (x, xt, i, j) => scale * subCov.CrossCovariance(x, xt, i, j); 94 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, subCov );98 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, subCov, fixedScale); 95 99 return cov; 96 100 } 97 101 98 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, ParameterizedCovarianceFunction cov) { 99 yield return 2 * scale * cov.Covariance(x, i, j); 102 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, ParameterizedCovarianceFunction cov, 103 bool fixedScale) { 104 if (!fixedScale) { 105 yield return 2 * scale * cov.Covariance(x, i, j); 106 } 100 107 foreach (var g in cov.CovarianceGradient(x, i, j)) 101 108 yield return scale * g; -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialArd.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 40 40 get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; } 41 41 } 42 private bool HasFixedInverseLengthParameter { 43 get { return InverseLengthParameter.Value != null; } 44 } 45 private bool HasFixedScaleParameter { 46 get { return ScaleParameter.Value != null; } 47 } 42 48 43 49 [StorableConstructor] … … 61 67 public int GetNumberOfParameters(int numberOfVariables) { 62 68 return 63 ( ScaleParameter.Value != null? 0 : 1) +64 ( InverseLengthParameter.Value != null? 0 : numberOfVariables);69 (HasFixedScaleParameter ? 0 : 1) + 70 (HasFixedInverseLengthParameter ? 0 : numberOfVariables); 65 71 } 66 72 … … 76 82 int c = 0; 77 83 // gather parameter values 78 if (ScaleParameter.Value != null) { 84 if (HasFixedInverseLengthParameter) { 85 inverseLength = InverseLengthParameter.Value.ToArray(); 86 } else { 87 int length = p.Length; 88 if (!HasFixedScaleParameter) length--; 89 inverseLength = p.Select(e => 1.0 / Math.Exp(e)).Take(length).ToArray(); 90 c += inverseLength.Length; 91 } 92 if (HasFixedScaleParameter) { 79 93 scale = ScaleParameter.Value.Value; 80 94 } else { 81 95 scale = Math.Exp(2 * p[c]); 82 96 c++; 83 }84 if (InverseLengthParameter.Value != null) {85 inverseLength = InverseLengthParameter.Value.ToArray();86 } else {87 inverseLength = p.Skip(1).Select(e => 1.0 / Math.Exp(e)).ToArray();88 c += inverseLength.Length;89 97 } 90 98 if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialArd", "p"); … … 95 103 double[] inverseLength; 96 104 GetParameterValues(p, out scale, out inverseLength); 105 var fixedInverseLength = HasFixedInverseLengthParameter; 106 var fixedScale = HasFixedScaleParameter; 97 107 // create functions 98 108 var cov = new ParameterizedCovarianceFunction(); … … 107 117 return scale * Math.Exp(-d / 2.0); 108 118 }; 109 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, inverseLength );119 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, inverseLength, fixedInverseLength, fixedScale); 110 120 return cov; 111 121 } 112 122 113 114 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double[] inverseLength ) {115 if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));123 // order of returned gradients must match the order in GetParameterValues! 124 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double[] inverseLength, 125 bool fixedInverseLength, bool fixedScale) { 116 126 double d = i == j 117 127 ? 0.0 118 128 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 129 119 130 int k = 0; 120 foreach (var columnIndex in columnIndices) { 121 double sqrDist = Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]); 122 yield return scale * Math.Exp(-d / 2.0) * sqrDist; 123 k++; 131 if (!fixedInverseLength) { 132 foreach (var columnIndex in columnIndices) { 133 double sqrDist = Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]); 134 yield return scale * Math.Exp(-d / 2.0) * sqrDist; 135 k++; 136 } 124 137 } 125 126 yield return 2.0 * scale * Math.Exp(-d / 2.0); 138 if (!fixedScale) yield return 2.0 * scale * Math.Exp(-d / 2.0); 127 139 } 128 140 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialIso.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 42 42 } 43 43 44 private bool HasFixedInverseLengthParameter { 45 get { return InverseLengthParameter.Value != null; } 46 } 47 private bool HasFixedScaleParameter { 48 get { return ScaleParameter.Value != null; } 49 } 50 44 51 [StorableConstructor] 45 52 private CovarianceSquaredExponentialIso(bool deserializing) … … 66 73 public int GetNumberOfParameters(int numberOfVariables) { 67 74 return 68 ( ScaleParameter.Value != null? 0 : 1) +69 ( InverseLengthParameter.Value != null? 0 : 1);75 (HasFixedScaleParameter ? 0 : 1) + 76 (HasFixedInverseLengthParameter ? 0 : 1); 70 77 } 71 78 … … 81 88 // gather parameter values 82 89 int c = 0; 83 if ( InverseLengthParameter.Value != null) {90 if (HasFixedInverseLengthParameter) { 84 91 inverseLength = InverseLengthParameter.Value.Value; 85 92 } else { … … 88 95 } 89 96 90 if ( ScaleParameter.Value != null) {97 if (HasFixedScaleParameter) { 91 98 scale = ScaleParameter.Value.Value; 92 99 } else { … … 100 107 double inverseLength, scale; 101 108 GetParameterValues(p, out scale, out inverseLength); 109 var fixedInverseLength = HasFixedInverseLengthParameter; 110 var fixedScale = HasFixedScaleParameter; 102 111 // create functions 103 112 var cov = new ParameterizedCovarianceFunction(); … … 112 121 return scale * Math.Exp(-d / 2.0); 113 122 }; 114 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, inverseLength, columnIndices); 123 cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, inverseLength, columnIndices, 124 fixedInverseLength, fixedScale); 115 125 return cov; 116 126 } 117 127 118 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double sf2, double inverseLength, IEnumerable<int> columnIndices) { 128 // order of returned gradients must match the order in GetParameterValues! 129 private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double sf2, double inverseLength, IEnumerable<int> columnIndices, 130 bool fixedInverseLength, bool fixedScale) { 119 131 double d = i == j 120 132 ? 0.0 121 133 : Util.SqrDist(x, i, j, inverseLength, columnIndices); 122 134 double g = Math.Exp(-d / 2.0); 123 yield return sf2 * g * d;124 yield return 2.0 * sf2 * g;135 if (!fixedInverseLength) yield return sf2 * g * d; 136 if (!fixedScale) yield return 2.0 * sf2 * g; 125 137 } 126 138 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSum.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassification.cs
r8623 r10553 2 2 #region License Information 3 3 /* HeuristicLab 4 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)4 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 5 5 * 6 6 * This file is part of HeuristicLab. … … 22 22 23 23 using System; 24 using System.Linq; 24 25 using HeuristicLab.Algorithms.GradientDescent; 25 26 using HeuristicLab.Common; … … 30 31 using HeuristicLab.Parameters; 31 32 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 33 using HeuristicLab.PluginInfrastructure; 32 34 using HeuristicLab.Problems.DataAnalysis; 33 35 … … 39 41 [Creatable("Data Analysis")] 40 42 [StorableClass] 41 public sealed class GaussianProcessClassification : EngineAlgorithm, IStorableContent {43 public sealed class GaussianProcessClassification : GaussianProcessBase, IStorableContent { 42 44 public string Filename { get; set; } 43 45 … … 48 50 } 49 51 50 private const string MeanFunctionParameterName = "MeanFunction"; 51 private const string CovarianceFunctionParameterName = "CovarianceFunction"; 52 private const string MinimizationIterationsParameterName = "Iterations"; 53 private const string ApproximateGradientsParameterName = "ApproximateGradients"; 54 private const string SeedParameterName = "Seed"; 55 private const string SetSeedRandomlyParameterName = "SetSeedRandomly"; 52 private const string ModelParameterName = "Model"; 56 53 57 54 #region parameter properties 58 public I ValueParameter<IMeanFunction> MeanFunctionParameter {59 get { return (I ValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }55 public IConstrainedValueParameter<IGaussianProcessClassificationModelCreator> GaussianProcessModelCreatorParameter { 56 get { return (IConstrainedValueParameter<IGaussianProcessClassificationModelCreator>)Parameters[ModelCreatorParameterName]; } 60 57 } 61 public I ValueParameter<ICovarianceFunction> CovarianceFunctionParameter {62 get { return (I ValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }58 public IFixedValueParameter<GaussianProcessClassificationSolutionCreator> GaussianProcessSolutionCreatorParameter { 59 get { return (IFixedValueParameter<GaussianProcessClassificationSolutionCreator>)Parameters[SolutionCreatorParameterName]; } 63 60 } 64 public IValueParameter<IntValue> MinimizationIterationsParameter {65 get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }66 }67 public IValueParameter<IntValue> SeedParameter {68 get { return (IValueParameter<IntValue>)Parameters[SeedParameterName]; }69 }70 public IValueParameter<BoolValue> SetSeedRandomlyParameter {71 get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }72 }73 #endregion74 #region properties75 public IMeanFunction MeanFunction {76 set { MeanFunctionParameter.Value = value; }77 get { return MeanFunctionParameter.Value; }78 }79 public ICovarianceFunction CovarianceFunction {80 set { CovarianceFunctionParameter.Value = value; }81 get { return CovarianceFunctionParameter.Value; }82 }83 public int MinimizationIterations {84 set { MinimizationIterationsParameter.Value.Value = value; }85 get { return MinimizationIterationsParameter.Value.Value; }86 }87 public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } }88 public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } }89 61 #endregion 90 62 … … 93 65 private GaussianProcessClassification(GaussianProcessClassification original, Cloner cloner) 94 66 : base(original, cloner) { 67 RegisterEventHandlers(); 95 68 } 96 69 public GaussianProcessClassification() 97 : base( ) {70 : base(new ClassificationProblem()) { 98 71 this.name = ItemName; 99 72 this.description = ItemDescription; 100 73 101 Problem = new ClassificationProblem(); 74 var modelCreators = ApplicationManager.Manager.GetInstances<IGaussianProcessClassificationModelCreator>(); 75 var defaultModelCreator = modelCreators.First(c => c is GaussianProcessClassificationModelCreator); 102 76 103 Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst())); 104 Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso())); 105 Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20))); 106 Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); 107 Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); 77 // GP regression and classification algorithms only differ in the model and solution creators, 78 // thus we use a common base class and use operator parameters to implement the specific versions. 79 // Different model creators can be implemented, 80 // but the solution creator is implemented in a generic fashion already and we don't allow derived solution creators 81 Parameters.Add(new ConstrainedValueParameter<IGaussianProcessClassificationModelCreator>(ModelCreatorParameterName, "The operator to create the Gaussian process model.", 82 new ItemSet<IGaussianProcessClassificationModelCreator>(modelCreators), defaultModelCreator)); 83 // this parameter is not intended to be changed, 84 Parameters.Add(new FixedValueParameter<GaussianProcessClassificationSolutionCreator>(SolutionCreatorParameterName, "The solution creator for the algorithm", 85 new GaussianProcessClassificationSolutionCreator())); 86 Parameters[SolutionCreatorParameterName].Hidden = true; 108 87 109 Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false))); 110 Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed 111 112 var randomCreator = new HeuristicLab.Random.RandomCreator(); 113 var gpInitializer = new GaussianProcessHyperparameterInitializer(); 114 var bfgsInitializer = new LbfgsInitializer(); 115 var makeStep = new LbfgsMakeStep(); 116 var branch = new ConditionalBranch(); 117 var modelCreator = new GaussianProcessClassificationModelCreator(); 118 var updateResults = new LbfgsUpdateResults(); 119 var analyzer = new LbfgsAnalyzer(); 120 var finalModelCreator = new GaussianProcessClassificationModelCreator(); 121 var finalAnalyzer = new LbfgsAnalyzer(); 122 var solutionCreator = new GaussianProcessClassificationSolutionCreator(); 123 124 OperatorGraph.InitialOperator = randomCreator; 125 randomCreator.SeedParameter.ActualName = SeedParameterName; 126 randomCreator.SeedParameter.Value = null; 127 randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; 128 randomCreator.SetSeedRandomlyParameter.Value = null; 129 randomCreator.Successor = gpInitializer; 130 131 gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 132 gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 133 gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 134 gpInitializer.HyperparameterParameter.ActualName = modelCreator.HyperparameterParameter.Name; 135 gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name; 136 gpInitializer.Successor = bfgsInitializer; 137 138 bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName; 139 bfgsInitializer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 140 bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; 141 bfgsInitializer.Successor = makeStep; 142 143 makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; 144 makeStep.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 145 makeStep.Successor = branch; 146 147 branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; 148 branch.FalseBranch = modelCreator; 149 branch.TrueBranch = finalModelCreator; 150 151 modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 152 modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 153 modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 154 modelCreator.Successor = updateResults; 155 156 updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; 157 updateResults.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name; 158 updateResults.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name; 159 updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; 160 updateResults.Successor = analyzer; 161 162 analyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name; 163 analyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 164 analyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name; 165 analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; 166 analyzer.PointsTableParameter.ActualName = "Hyperparameter table"; 167 analyzer.QualityGradientsTableParameter.ActualName = "Gradients table"; 168 analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table"; 169 analyzer.Successor = makeStep; 170 171 finalModelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 172 finalModelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 173 finalModelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 174 finalModelCreator.HyperparameterParameter.ActualName = bfgsInitializer.PointParameter.ActualName; 175 finalModelCreator.Successor = finalAnalyzer; 176 177 finalAnalyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name; 178 finalAnalyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 179 finalAnalyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name; 180 finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName; 181 finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName; 182 finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName; 183 finalAnalyzer.Successor = solutionCreator; 184 185 solutionCreator.ModelParameter.ActualName = finalModelCreator.ModelParameter.Name; 186 solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 88 ParameterizedModelCreators(); 89 ParameterizeSolutionCreator(GaussianProcessSolutionCreatorParameter.Value); 90 RegisterEventHandlers(); 187 91 } 188 92 93 189 94 [StorableHook(HookType.AfterDeserialization)] 190 private void AfterDeserialization() { } 95 private void AfterDeserialization() { 96 RegisterEventHandlers(); 97 } 191 98 192 99 public override IDeepCloneable Clone(Cloner cloner) { 193 100 return new GaussianProcessClassification(this, cloner); 194 101 } 102 103 #region events 104 private void RegisterEventHandlers() { 105 GaussianProcessModelCreatorParameter.ValueChanged += ModelCreatorParameter_ValueChanged; 106 } 107 108 private void ModelCreatorParameter_ValueChanged(object sender, EventArgs e) { 109 ParameterizedModelCreator(GaussianProcessModelCreatorParameter.Value); 110 } 111 #endregion 112 113 private void ParameterizedModelCreators() { 114 foreach (var creator in GaussianProcessModelCreatorParameter.ValidValues) { 115 ParameterizedModelCreator(creator); 116 } 117 } 118 119 private void ParameterizedModelCreator(IGaussianProcessClassificationModelCreator modelCreator) { 120 modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 121 modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 122 modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 123 124 // parameter names fixed by the algorithm 125 modelCreator.ModelParameter.ActualName = ModelParameterName; 126 modelCreator.HyperparameterParameter.ActualName = HyperparameterParameterName; 127 modelCreator.HyperparameterGradientsParameter.ActualName = HyperparameterGradientsParameterName; 128 modelCreator.NegativeLogLikelihoodParameter.ActualName = NegativeLogLikelihoodParameterName; 129 } 130 131 private void ParameterizeSolutionCreator(GaussianProcessClassificationSolutionCreator solutionCreator) { 132 solutionCreator.ModelParameter.ActualName = ModelParameterName; 133 solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 134 } 195 135 } 196 136 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationModelCreator.cs
r8623 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 34 34 [Item(Name = "GaussianProcessClassificationModelCreator", 35 35 Description = "Creates a Gaussian process model for least-squares classification given the data, the hyperparameters, a mean function, and a covariance function.")] 36 public sealed class GaussianProcessClassificationModelCreator : GaussianProcessModelCreator {36 public sealed class GaussianProcessClassificationModelCreator : GaussianProcessModelCreator, IGaussianProcessClassificationModelCreator { 37 37 private const string ProblemDataParameterName = "ProblemData"; 38 38 -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationSolutionCreator.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessHyperparameterInitializer.cs
r8732 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 284 284 var kss = new double[newN]; 285 285 double[,] sWKs = new double[n, newN]; 286 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));286 var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1))); 287 287 288 288 // for stddev -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModelCreator.cs
r8401 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs
r8615 r10553 2 2 #region License Information 3 3 /* HeuristicLab 4 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)4 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 5 5 * 6 6 * This file is part of HeuristicLab. … … 22 22 23 23 using System; 24 using System.Linq; 24 25 using HeuristicLab.Algorithms.GradientDescent; 25 26 using HeuristicLab.Common; … … 30 31 using HeuristicLab.Parameters; 31 32 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 33 using HeuristicLab.PluginInfrastructure; 32 34 using HeuristicLab.Problems.DataAnalysis; 33 35 … … 39 41 [Creatable("Data Analysis")] 40 42 [StorableClass] 41 public sealed class GaussianProcessRegression : EngineAlgorithm, IStorableContent {43 public sealed class GaussianProcessRegression : GaussianProcessBase, IStorableContent { 42 44 public string Filename { get; set; } 43 45 … … 48 50 } 49 51 50 private const string MeanFunctionParameterName = "MeanFunction"; 51 private const string CovarianceFunctionParameterName = "CovarianceFunction"; 52 private const string MinimizationIterationsParameterName = "Iterations"; 53 private const string ApproximateGradientsParameterName = "ApproximateGradients"; 54 private const string SeedParameterName = "Seed"; 55 private const string SetSeedRandomlyParameterName = "SetSeedRandomly"; 52 private const string ModelParameterName = "Model"; 56 53 57 54 #region parameter properties 58 public I ValueParameter<IMeanFunction> MeanFunctionParameter {59 get { return (I ValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }55 public IConstrainedValueParameter<IGaussianProcessRegressionModelCreator> GaussianProcessModelCreatorParameter { 56 get { return (IConstrainedValueParameter<IGaussianProcessRegressionModelCreator>)Parameters[ModelCreatorParameterName]; } 60 57 } 61 public I ValueParameter<ICovarianceFunction> CovarianceFunctionParameter {62 get { return (I ValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }58 public IFixedValueParameter<GaussianProcessRegressionSolutionCreator> GaussianProcessSolutionCreatorParameter { 59 get { return (IFixedValueParameter<GaussianProcessRegressionSolutionCreator>)Parameters[SolutionCreatorParameterName]; } 63 60 } 64 public IValueParameter<IntValue> MinimizationIterationsParameter {65 get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }66 }67 public IValueParameter<IntValue> SeedParameter {68 get { return (IValueParameter<IntValue>)Parameters[SeedParameterName]; }69 }70 public IValueParameter<BoolValue> SetSeedRandomlyParameter {71 get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }72 }73 #endregion74 #region properties75 public IMeanFunction MeanFunction {76 set { MeanFunctionParameter.Value = value; }77 get { return MeanFunctionParameter.Value; }78 }79 public ICovarianceFunction CovarianceFunction {80 set { CovarianceFunctionParameter.Value = value; }81 get { return CovarianceFunctionParameter.Value; }82 }83 public int MinimizationIterations {84 set { MinimizationIterationsParameter.Value.Value = value; }85 get { return MinimizationIterationsParameter.Value.Value; }86 }87 public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } }88 public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } }89 61 #endregion 90 62 … … 93 65 private GaussianProcessRegression(GaussianProcessRegression original, Cloner cloner) 94 66 : base(original, cloner) { 67 RegisterEventHandlers(); 95 68 } 96 69 public GaussianProcessRegression() 97 : base( ) {70 : base(new RegressionProblem()) { 98 71 this.name = ItemName; 99 72 this.description = ItemDescription; 100 73 101 Problem = new RegressionProblem(); 74 var modelCreators = ApplicationManager.Manager.GetInstances<IGaussianProcessRegressionModelCreator>(); 75 var defaultModelCreator = modelCreators.First(c => c is GaussianProcessRegressionModelCreator); 102 76 103 Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst())); 104 Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso())); 105 Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20))); 106 Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); 107 Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); 77 // GP regression and classification algorithms only differ in the model and solution creators, 78 // thus we use a common base class and use operator parameters to implement the specific versions. 79 // Different model creators can be implemented, 80 // but the solution creator is implemented in a generic fashion already and we don't allow derived solution creators 81 Parameters.Add(new ConstrainedValueParameter<IGaussianProcessRegressionModelCreator>(ModelCreatorParameterName, "The operator to create the Gaussian process model.", 82 new ItemSet<IGaussianProcessRegressionModelCreator>(modelCreators), defaultModelCreator)); 83 // this parameter is not intended to be changed, 84 Parameters.Add(new FixedValueParameter<GaussianProcessRegressionSolutionCreator>(SolutionCreatorParameterName, "The solution creator for the algorithm", 85 new GaussianProcessRegressionSolutionCreator())); 86 Parameters[SolutionCreatorParameterName].Hidden = true; 108 87 109 Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false))); 110 Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed 111 112 var randomCreator = new HeuristicLab.Random.RandomCreator(); 113 var gpInitializer = new GaussianProcessHyperparameterInitializer(); 114 var bfgsInitializer = new LbfgsInitializer(); 115 var makeStep = new LbfgsMakeStep(); 116 var branch = new ConditionalBranch(); 117 var modelCreator = new GaussianProcessRegressionModelCreator(); 118 var updateResults = new LbfgsUpdateResults(); 119 var analyzer = new LbfgsAnalyzer(); 120 var finalModelCreator = new GaussianProcessRegressionModelCreator(); 121 var finalAnalyzer = new LbfgsAnalyzer(); 122 var solutionCreator = new GaussianProcessRegressionSolutionCreator(); 123 124 OperatorGraph.InitialOperator = randomCreator; 125 randomCreator.SeedParameter.ActualName = SeedParameterName; 126 randomCreator.SeedParameter.Value = null; 127 randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName; 128 randomCreator.SetSeedRandomlyParameter.Value = null; 129 randomCreator.Successor = gpInitializer; 130 131 gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 132 gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 133 gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 134 gpInitializer.HyperparameterParameter.ActualName = modelCreator.HyperparameterParameter.Name; 135 gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name; 136 gpInitializer.Successor = bfgsInitializer; 137 138 bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName; 139 bfgsInitializer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 140 bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; 141 bfgsInitializer.Successor = makeStep; 142 143 makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; 144 makeStep.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 145 makeStep.Successor = branch; 146 147 branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name; 148 branch.FalseBranch = modelCreator; 149 branch.TrueBranch = finalModelCreator; 150 151 modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 152 modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 153 modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 154 modelCreator.Successor = updateResults; 155 156 updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; 157 updateResults.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name; 158 updateResults.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name; 159 updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName; 160 updateResults.Successor = analyzer; 161 162 analyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name; 163 analyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 164 analyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name; 165 analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name; 166 analyzer.PointsTableParameter.ActualName = "Hyperparameter table"; 167 analyzer.QualityGradientsTableParameter.ActualName = "Gradients table"; 168 analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table"; 169 analyzer.Successor = makeStep; 170 171 finalModelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 172 finalModelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 173 finalModelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 174 finalModelCreator.HyperparameterParameter.ActualName = bfgsInitializer.PointParameter.ActualName; 175 finalModelCreator.Successor = finalAnalyzer; 176 177 finalAnalyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name; 178 finalAnalyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name; 179 finalAnalyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name; 180 finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName; 181 finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName; 182 finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName; 183 finalAnalyzer.Successor = solutionCreator; 184 185 solutionCreator.ModelParameter.ActualName = finalModelCreator.ModelParameter.Name; 186 solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 88 ParameterizedModelCreators(); 89 ParameterizeSolutionCreator(GaussianProcessSolutionCreatorParameter.Value); 90 RegisterEventHandlers(); 187 91 } 188 92 93 189 94 [StorableHook(HookType.AfterDeserialization)] 190 private void AfterDeserialization() { } 95 private void AfterDeserialization() { 96 RegisterEventHandlers(); 97 } 191 98 192 99 public override IDeepCloneable Clone(Cloner cloner) { 193 100 return new GaussianProcessRegression(this, cloner); 194 101 } 102 103 #region events 104 private void RegisterEventHandlers() { 105 GaussianProcessModelCreatorParameter.ValueChanged += ModelCreatorParameter_ValueChanged; 106 } 107 108 private void ModelCreatorParameter_ValueChanged(object sender, EventArgs e) { 109 ParameterizedModelCreator(GaussianProcessModelCreatorParameter.Value); 110 } 111 #endregion 112 113 private void ParameterizedModelCreators() { 114 foreach (var creator in GaussianProcessModelCreatorParameter.ValidValues) { 115 ParameterizedModelCreator(creator); 116 } 117 } 118 119 private void ParameterizedModelCreator(IGaussianProcessRegressionModelCreator modelCreator) { 120 modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 121 modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName; 122 modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName; 123 124 // parameter names fixed by the algorithm 125 modelCreator.ModelParameter.ActualName = ModelParameterName; 126 modelCreator.HyperparameterParameter.ActualName = HyperparameterParameterName; 127 modelCreator.HyperparameterGradientsParameter.ActualName = HyperparameterGradientsParameterName; 128 modelCreator.NegativeLogLikelihoodParameter.ActualName = NegativeLogLikelihoodParameterName; 129 } 130 131 private void ParameterizeSolutionCreator(GaussianProcessRegressionSolutionCreator solutionCreator) { 132 solutionCreator.ModelParameter.ActualName = ModelParameterName; 133 solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name; 134 } 195 135 } 196 136 } -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs
r8484 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 34 34 [Item(Name = "GaussianProcessRegressionModelCreator", 35 35 Description = "Creates a Gaussian process model for regression given the data, the hyperparameters, a mean function, and a covariance function.")] 36 public sealed class GaussianProcessRegressionModelCreator : GaussianProcessModelCreator {36 public sealed class GaussianProcessRegressionModelCreator : GaussianProcessModelCreator, IGaussianProcessRegressionModelCreator { 37 37 private const string ProblemDataParameterName = "ProblemData"; 38 38 -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolution.cs
r8837 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. -
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs
r8982 r10553 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 2Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab.
Note: See TracChangeset
for help on using the changeset viewer.