Free cookie consent management tool by TermsFeed Policy Generator

Changeset 8982


Ignore:
Timestamp:
12/01/12 19:02:47 (11 years ago)
Author:
gkronber
Message:

#1902: removed class HyperParameter and changed implementations of covariance and mean functions to remove the parameter value caching and event handlers for parameter caching. Instead it is now possible to create the actual covariance and mean functions as Func from templates and specified parameter values. The instances of mean and covariance functions configured in the GUI are actually templates where the structure and fixed parameters can be specified.

Location:
trunk/sources
Files:
1 deleted
28 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceConst.cs

    r8929 r8982  
    2525using HeuristicLab.Core;
    2626using HeuristicLab.Data;
     27using HeuristicLab.Parameters;
    2728using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2829
     
    3233    Description = "Constant covariance function for Gaussian processes.")]
    3334  public sealed class CovarianceConst : ParameterizedNamedItem, ICovarianceFunction {
    34 
    35     [Storable]
    36     private double scale;
    37     [Storable]
    38     private readonly HyperParameter<DoubleValue> scaleParameter;
    3935    public IValueParameter<DoubleValue> ScaleParameter {
    40       get { return scaleParameter; }
     36      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    4137    }
    4238
     
    4844    private CovarianceConst(CovarianceConst original, Cloner cloner)
    4945      : base(original, cloner) {
    50       this.scaleParameter = cloner.Clone(original.scaleParameter);
    51       this.scale = original.scale;
    52 
    53       RegisterEvents();
    5446    }
    5547
     
    5951      Description = ItemDescription;
    6052
    61       scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the constant covariance function.");
    62       Parameters.Add(scaleParameter);
    63       RegisterEvents();
     53      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale of the constant covariance function."));
    6454    }
    65 
    66     [StorableHook(HookType.AfterDeserialization)]
    67     private void AfterDeserialization() {
    68       RegisterEvents();
    69     }
    70 
    71     // caching
    72     private void RegisterEvents() {
    73       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; });
    74     }
    75 
    7655
    7756    public override IDeepCloneable Clone(Cloner cloner) {
     
    8059
    8160    public int GetNumberOfParameters(int numberOfVariables) {
    82       return scaleParameter.Fixed ? 0 : 1;
     61      return ScaleParameter.Value != null ? 0 : 1;
    8362    }
    8463
    85     public void SetParameter(double[] hyp) {
    86       if (!scaleParameter.Fixed && hyp.Length == 1) {
    87         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0])));
    88       } else {
    89         throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceConst", "hyp");
    90       }
     64    public void SetParameter(double[] p) {
     65      double scale;
     66      GetParameterValues(p, out scale);
     67      ScaleParameter.Value = new DoubleValue(scale);
    9168    }
    9269
    93     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    94       return scale;
     70    private void GetParameterValues(double[] p, out double scale) {
     71      int c = 0;
     72      // gather parameter values
     73      if (ScaleParameter.Value != null) {
     74        scale = ScaleParameter.Value.Value;
     75      } else {
     76        scale = Math.Exp(2 * p[c]);
     77        c++;
     78      }
     79      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceConst", "p");
    9580    }
    9681
    97     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    98       yield return 2.0 * scale;
     82    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     83      double scale;
     84      GetParameterValues(p, out scale);
     85      // create functions
     86      var cov = new ParameterizedCovarianceFunction();
     87      cov.Covariance = (x, i, j) => scale;
     88      cov.CrossCovariance = (x, xt, i, j) => scale;
     89      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, columnIndices);
     90      return cov;
    9991    }
    10092
    101     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    102       return scale;
     93    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double scale, IEnumerable<int> columnIndices) {
     94      yield return 2.0 * scale;
    10395    }
    10496  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinear.cs

    r8931 r8982  
    4848    }
    4949
    50     public void SetParameter(double[] hyp) {
    51       if (hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function.");
     50    public void SetParameter(double[] p) {
     51      if (p.Length > 0) throw new ArgumentException("No parameters are allowed for the linear covariance function.");
    5252    }
    5353
    54     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    55       return Util.ScalarProd(x, i, j, 1, columnIndices);
    56     }
    57 
    58     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    59       yield break;
    60     }
    61 
    62     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    63       return Util.ScalarProd(x, i, xt, j, 1.0 , columnIndices);
     54    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     55      if (p.Length > 0) throw new ArgumentException("No parameters are allowed for the linear covariance function.");
     56      // create functions
     57      var cov = new ParameterizedCovarianceFunction();
     58      cov.Covariance = (x, i, j) => Util.ScalarProd(x, i, j, 1, columnIndices);
     59      cov.CrossCovariance = (x, xt, i, j) =>  Util.ScalarProd(x, i, xt, j, 1.0 , columnIndices);
     60      cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>();
     61      return cov;
    6462    }
    6563  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinearArd.cs

    r8933 r8982  
    2626using HeuristicLab.Core;
    2727using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2829using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2930
     
    3334    Description = "Linear covariance function with automatic relevance determination for Gaussian processes.")]
    3435  public sealed class CovarianceLinearArd : ParameterizedNamedItem, ICovarianceFunction {
    35     [Storable]
    36     private double[] inverseLength;
    37     [Storable]
    38     private readonly HyperParameter<DoubleArray> inverseLengthParameter;
    3936    public IValueParameter<DoubleArray> InverseLengthParameter {
    40       get { return inverseLengthParameter; }
     37      get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; }
    4138    }
    4239
     
    4542    private CovarianceLinearArd(CovarianceLinearArd original, Cloner cloner)
    4643      : base(original, cloner) {
    47       inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
    48       if (original.inverseLength != null) {
    49         this.inverseLength = new double[original.inverseLength.Length];
    50         Array.Copy(original.inverseLength, inverseLength, inverseLength.Length);
    51       }
    52 
    53       RegisterEvents();
    5444    }
    5545    public CovarianceLinearArd()
     
    5848      Description = ItemDescription;
    5949
    60       inverseLengthParameter = new HyperParameter<DoubleArray>("InverseLength",
    61                                                                "The inverse length parameter for ARD.");
    62       Parameters.Add(inverseLengthParameter);
    63       RegisterEvents();
    64     }
    65 
    66     [StorableHook(HookType.AfterDeserialization)]
    67     private void AfterDeserialization() {
    68       RegisterEvents();
     50      Parameters.Add(new OptionalValueParameter<DoubleArray>("InverseLength",
     51                                                             "The inverse length parameter for ARD."));
    6952    }
    7053
     
    7356    }
    7457
    75     // caching
    76     private void RegisterEvents() {
    77       Util.AttachArrayChangeHandler<DoubleArray, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.ToArray(); });
    78     }
    79 
    80 
    8158    public int GetNumberOfParameters(int numberOfVariables) {
    82       if (!inverseLengthParameter.Fixed)
     59      if (InverseLengthParameter.Value == null)
    8360        return numberOfVariables;
    8461      else
     
    8663    }
    8764
    88     public void SetParameter(double[] hyp) {
    89       if (!inverseLengthParameter.Fixed && hyp.Length > 0) {
    90         inverseLengthParameter.SetValue(new DoubleArray(hyp.Select(e => 1.0 / Math.Exp(e)).ToArray()));
    91       } else throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceLinearArd", "hyp");
     65    public void SetParameter(double[] p) {
     66      double[] inverseLength;
     67      GetParameterValues(p, out inverseLength);
     68      InverseLengthParameter.Value = new DoubleArray(inverseLength);
    9269    }
    9370
    94     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    95       return Util.ScalarProd(x, i, j, inverseLength, columnIndices);
     71    private void GetParameterValues(double[] p, out double[] inverseLength) {
     72      // gather parameter values
     73      if (InverseLengthParameter.Value != null) {
     74        inverseLength = InverseLengthParameter.Value.ToArray();
     75      } else {
     76        inverseLength = p.Select(e => 1.0 / Math.Exp(e)).ToArray();
     77      }
    9678    }
    9779
    98     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     80    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     81      double[] inverseLength;
     82      GetParameterValues(p, out inverseLength);
     83      // create functions
     84      var cov = new ParameterizedCovarianceFunction();
     85      cov.Covariance = (x, i, j) => Util.ScalarProd(x, i, j, inverseLength, columnIndices);
     86      cov.CrossCovariance = (x, xt, i, j) => Util.ScalarProd(x, i, xt, j, inverseLength, columnIndices);
     87      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, inverseLength, columnIndices);
     88      return cov;
     89    }
     90
     91    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double[] inverseLength, IEnumerable<int> columnIndices) {
    9992      if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
    10093
     
    10598      }
    10699    }
    107 
    108     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    109       return Util.ScalarProd(x, i, xt, j, inverseLength, columnIndices);
    110     }
    111100  }
    112101}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMask.cs

    r8933 r8982  
    2323using System.Collections.Generic;
    2424using System.Linq;
     25using System.Linq.Expressions;
    2526using HeuristicLab.Common;
    2627using HeuristicLab.Core;
     
    3435    Description = "Masking covariance function for dimension selection can be used to apply a covariance function only on certain input dimensions.")]
    3536  public sealed class CovarianceMask : ParameterizedNamedItem, ICovarianceFunction {
    36     [Storable]
    37     private int[] selectedDimensions;
    38     [Storable]
    39     private readonly ValueParameter<IntArray> selectedDimensionsParameter;
    4037    public IValueParameter<IntArray> SelectedDimensionsParameter {
    41       get { return selectedDimensionsParameter; }
     38      get { return (IValueParameter<IntArray>)Parameters["SelectedDimensions"]; }
    4239    }
    43 
    44     [Storable]
    45     private ICovarianceFunction cov;
    46     [Storable]
    47     private readonly ValueParameter<ICovarianceFunction> covParameter;
    4840    public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
    49       get { return covParameter; }
     41      get { return (IValueParameter<ICovarianceFunction>)Parameters["CovarianceFunction"]; }
    5042    }
    5143
     
    5749    private CovarianceMask(CovarianceMask original, Cloner cloner)
    5850      : base(original, cloner) {
    59       this.selectedDimensionsParameter = cloner.Clone(original.selectedDimensionsParameter);
    60       if (original.selectedDimensions != null) {
    61         this.selectedDimensions = (int[])original.selectedDimensions.Clone();
    62       }
    63 
    64       this.covParameter = cloner.Clone(original.covParameter);
    65       this.cov = cloner.Clone(original.cov);
    66       RegisterEvents();
    6751    }
    6852
     
    7256      Description = ItemDescription;
    7357
    74       this.selectedDimensionsParameter = new ValueParameter<IntArray>("SelectedDimensions", "The dimensions on which the specified covariance function should be applied to.");
    75       this.covParameter = new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso());
    76       cov = covParameter.Value;
    77 
    78       Parameters.Add(selectedDimensionsParameter);
    79       Parameters.Add(covParameter);
    80 
    81       RegisterEvents();
     58      Parameters.Add(new OptionalValueParameter<IntArray>("SelectedDimensions", "The dimensions on which the specified covariance function should be applied to."));
     59      Parameters.Add(new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso()));
    8260    }
    8361
     
    8664    }
    8765
    88     [StorableHook(HookType.AfterDeserialization)]
    89     private void AfterDeserialization() {
    90       RegisterEvents();
     66    public int GetNumberOfParameters(int numberOfVariables) {
     67      if (SelectedDimensionsParameter.Value == null) return CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables);
     68      else return CovarianceFunctionParameter.Value.GetNumberOfParameters(SelectedDimensionsParameter.Value.Length);
    9169    }
    9270
    93     private void RegisterEvents() {
    94       Util.AttachArrayChangeHandler<IntArray, int>(selectedDimensionsParameter, () => {
    95         selectedDimensions = selectedDimensionsParameter.Value
    96           .OrderBy(x => x)
    97           .Distinct()
    98           .ToArray();
    99         if (selectedDimensions.Length == 0) selectedDimensions = null;
    100       });
    101       covParameter.ValueChanged += (sender, args) => { cov = covParameter.Value; };
     71    public void SetParameter(double[] p) {
     72      CovarianceFunctionParameter.Value.SetParameter(p);
    10273    }
    10374
    104     public int GetNumberOfParameters(int numberOfVariables) {
    105       if (selectedDimensions == null) return cov.GetNumberOfParameters(numberOfVariables);
    106       else return cov.GetNumberOfParameters(selectedDimensions.Length);
    107     }
     75    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     76      if (columnIndices != null)
     77        throw new InvalidOperationException("Stacking of masking covariance functions is not supported.");
     78      var cov = CovarianceFunctionParameter.Value;
     79      var selectedDimensions = SelectedDimensionsParameter.Value;
    10880
    109     public void SetParameter(double[] hyp) {
    110       cov.SetParameter(hyp);
    111     }
    112 
    113     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    114       // cov mask overwrites the previously selected columnIndices
    115       // -> stacking of CovarianceMask is not supported
    116       if (columnIndices != null && columnIndices.Count() != x.GetLength(1))
    117         throw new InvalidOperationException("Stacking of masking covariance functions is not supported.");
    118 
    119       return cov.GetCovariance(x, i, j, selectedDimensions);
    120     }
    121 
    122     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    123       if (columnIndices != null && columnIndices.Count() != x.GetLength(1))
    124         throw new InvalidOperationException("Stacking of masking covariance functions is not supported.");
    125 
    126       return cov.GetGradient(x, i, j, selectedDimensions);
    127     }
    128 
    129     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    130       if (columnIndices != null && columnIndices.Count() != x.GetLength(1))
    131         throw new InvalidOperationException("Stacking of masking covariance functions is not supported.");
    132 
    133       return cov.GetCrossCovariance(x, xt, i, j, selectedDimensions);
     81      return cov.GetParameterizedCovarianceFunction(p, selectedDimensions);
    13482    }
    13583  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMaternIso.cs

    r8929 r8982  
    3434    Description = "Matern covariance function for Gaussian processes.")]
    3535  public sealed class CovarianceMaternIso : ParameterizedNamedItem, ICovarianceFunction {
    36     [Storable]
    37     private double inverseLength;
    38     [Storable]
    39     private readonly HyperParameter<DoubleValue> inverseLengthParameter;
    4036    public IValueParameter<DoubleValue> InverseLengthParameter {
    41       get { return inverseLengthParameter; }
     37      get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; }
    4238    }
    4339
    44     [Storable]
    45     private double sf2;
    46     [Storable]
    47     private readonly HyperParameter<DoubleValue> scaleParameter;
    4840    public IValueParameter<DoubleValue> ScaleParameter {
    49       get { return scaleParameter; }
     41      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    5042    }
    5143
    52     [Storable]
    53     private int d;
    54     [Storable]
    55     private readonly ConstrainedValueParameter<IntValue> dParameter;
    5644    public IConstrainedValueParameter<IntValue> DParameter {
    57       get { return dParameter; }
     45      get { return (IConstrainedValueParameter<IntValue>)Parameters["D"]; }
    5846    }
    5947
     
    6654    private CovarianceMaternIso(CovarianceMaternIso original, Cloner cloner)
    6755      : base(original, cloner) {
    68       this.scaleParameter = cloner.Clone(original.scaleParameter);
    69       this.sf2 = original.sf2;
    70       this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
    71       this.inverseLength = original.inverseLength;
    72       this.dParameter = cloner.Clone(original.dParameter);
    73       this.d = original.d;
    74       RegisterEvents();
    7556    }
    7657
     
    8061      Description = ItemDescription;
    8162
    82       inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric Matern covariance function.");
    83       scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the isometric Matern covariance function.");
     63      Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric Matern covariance function."));
     64      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric Matern covariance function."));
    8465      var validDValues = new ItemSet<IntValue>();
    8566      validDValues.Add((IntValue)new IntValue(1).AsReadOnly());
    8667      validDValues.Add((IntValue)new IntValue(3).AsReadOnly());
    8768      validDValues.Add((IntValue)new IntValue(5).AsReadOnly());
    88       dParameter = new ConstrainedValueParameter<IntValue>("D", "The d parameter (allowed values: 1, 3, or 5) of the isometric Matern covariance function.", validDValues, validDValues.First());
    89       d = dParameter.Value.Value;
    90 
    91       Parameters.Add(inverseLengthParameter);
    92       Parameters.Add(scaleParameter);
    93       Parameters.Add(dParameter);
    94 
    95       RegisterEvents();
    96     }
    97 
    98     [StorableHook(HookType.AfterDeserialization)]
    99     private void AfterDeserialization() {
    100       RegisterEvents();
     69      Parameters.Add(new ConstrainedValueParameter<IntValue>("D", "The d parameter (allowed values: 1, 3, or 5) of the isometric Matern covariance function.", validDValues, validDValues.First()));
    10170    }
    10271
     
    10574    }
    10675
    107     // caching
    108     private void RegisterEvents() {
    109       Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; });
    110       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
    111       Util.AttachValueChangeHandler<IntValue, int>(dParameter, () => { d = dParameter.Value.Value; });
     76    public int GetNumberOfParameters(int numberOfVariables) {
     77      return
     78        (InverseLengthParameter.Value != null ? 0 : 1) +
     79        (ScaleParameter.Value != null ? 0 : 1);
    11280    }
    11381
    114     public int GetNumberOfParameters(int numberOfVariables) {
    115       return
    116         (inverseLengthParameter.Fixed ? 0 : 1) +
    117         (scaleParameter.Fixed ? 0 : 1);
     82    public void SetParameter(double[] p) {
     83      double inverseLength, scale;
     84      GetParameterValues(p, out scale, out inverseLength);
     85      InverseLengthParameter.Value = new DoubleValue(inverseLength);
     86      ScaleParameter.Value = new DoubleValue(scale);
    11887    }
    11988
    120     public void SetParameter(double[] hyp) {
    121       int i = 0;
    122       if (!inverseLengthParameter.Fixed) {
    123         inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i])));
    124         i++;
     89    private void GetParameterValues(double[] p, out double scale, out double inverseLength) {
     90      // gather parameter values
     91      int c = 0;
     92      if (InverseLengthParameter.Value != null) {
     93        inverseLength = InverseLengthParameter.Value.Value;
     94      } else {
     95        inverseLength = 1.0 / Math.Exp(p[c]);
     96        c++;
    12597      }
    126       if (!scaleParameter.Fixed) {
    127         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
    128         i++;
     98
     99      if (ScaleParameter.Value != null) {
     100        scale = ScaleParameter.Value.Value;
     101      } else {
     102        scale = Math.Exp(2 * p[c]);
     103        c++;
    129104      }
    130       if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceMaternIso", "hyp");
     105      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceMaternIso", "p");
    131106    }
    132107
     108    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     109      double inverseLength, scale;
     110      int d = DParameter.Value.Value;
     111      GetParameterValues(p, out scale, out inverseLength);
     112      // create functions
     113      var cov = new ParameterizedCovarianceFunction();
     114      cov.Covariance = (x, i, j) => {
     115        double dist = i == j
     116                       ? 0.0
     117                       : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices));
     118        return scale * m(d, dist);
     119      };
     120      cov.CrossCovariance = (x, xt, i, j) => {
     121        double dist = Math.Sqrt(Util.SqrDist(x, i, xt, j, Math.Sqrt(d) * inverseLength, columnIndices));
     122        return scale * m(d, dist);
     123      };
     124      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, d, scale, inverseLength, columnIndices);
     125      return cov;
     126    }
    133127
    134     private double m(double t) {
     128    private static double m(int d, double t) {
    135129      double f;
    136130      switch (d) {
     
    143137    }
    144138
    145     private double dm(double t) {
     139    private static double dm(int d, double t) {
    146140      double df;
    147141      switch (d) {
     
    154148    }
    155149
    156     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    157       double dist = i == j
    158                    ? 0.0
    159                    : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices));
    160       return sf2 * m(dist);
    161     }
    162150
    163     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     151    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, int d, double scale, double inverseLength, IEnumerable<int> columnIndices) {
    164152      double dist = i == j
    165153                   ? 0.0
    166154                   : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices));
    167155
    168       yield return sf2 * dm(dist);
    169       yield return 2 * sf2 * m(dist);
    170     }
    171 
    172     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    173       double dist = Math.Sqrt(Util.SqrDist(x, i, xt, j, Math.Sqrt(d) * inverseLength, columnIndices));
    174       return sf2 * m(dist);
     156      yield return scale * dm(d, dist);
     157      yield return 2 * scale * m(d, dist);
    175158    }
    176159  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceNoise.cs

    r8929 r8982  
    2222using System;
    2323using System.Collections.Generic;
     24using System.Linq;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
    2627using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2729using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2830
     
    3234    Description = "Noise covariance function for Gaussian processes.")]
    3335  public sealed class CovarianceNoise : ParameterizedNamedItem, ICovarianceFunction {
    34 
    35 
    36     [Storable]
    37     private double sf2;
    38     [Storable]
    39     private readonly HyperParameter<DoubleValue> scaleParameter;
    4036    public IValueParameter<DoubleValue> ScaleParameter {
    41       get { return scaleParameter; }
     37      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    4238    }
    4339
     
    4945    private CovarianceNoise(CovarianceNoise original, Cloner cloner)
    5046      : base(original, cloner) {
    51       this.scaleParameter = cloner.Clone(original.scaleParameter);
    52       this.sf2 = original.sf2;
    53       RegisterEvents();
    5447    }
    5548
     
    5952      Description = ItemDescription;
    6053
    61       this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of noise.");
    62       Parameters.Add(this.scaleParameter);
    63 
    64       RegisterEvents();
     54      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale of noise."));
    6555    }
    6656
     
    6959    }
    7060
    71     [StorableHook(HookType.AfterDeserialization)]
    72     private void AfterDeserialization() {
    73       RegisterEvents();
     61    public int GetNumberOfParameters(int numberOfVariables) {
     62      return ScaleParameter.Value != null ? 0 : 1;
    7463    }
    7564
    76     private void RegisterEvents() {
    77       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
     65    public void SetParameter(double[] p) {
     66      double scale;
     67      GetParameterValues(p, out scale);
     68      ScaleParameter.Value = new DoubleValue(scale);
    7869    }
    7970
    80     public int GetNumberOfParameters(int numberOfVariables) {
    81       return scaleParameter.Fixed ? 0 : 1;
     71    private void GetParameterValues(double[] p, out double scale) {
     72      int c = 0;
     73      // gather parameter values
     74      if (ScaleParameter.Value != null) {
     75        scale = ScaleParameter.Value.Value;
     76      } else {
     77        scale = Math.Exp(2 * p[c]);
     78        c++;
     79      }
     80      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceNoise", "p");
    8281    }
    8382
    84     public void SetParameter(double[] hyp) {
    85       if (!scaleParameter.Fixed) {
    86         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0])));
    87       } else {
    88         if (hyp.Length > 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceNoise", "hyp");
    89       }
    90     }
    91 
    92     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    93       return i == j ? sf2 : 0.0;
    94     }
    95 
    96     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    97       yield return i == j ? 2 * sf2 : 0.0;
    98     }
    99 
    100     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    101       return 0.0;
     83    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     84      double scale;
     85      GetParameterValues(p, out scale);
     86      // create functions
     87      var cov = new ParameterizedCovarianceFunction();
     88      cov.Covariance = (x, i, j) => i == j ? scale : 0.0;
     89      cov.CrossCovariance = (x, xt, i, j) => 0.0;
     90      cov.CovarianceGradient = (x, i, j) => Enumerable.Repeat(i == j ? 2.0 * scale : 0.0, 1);
     91      return cov;
    10292    }
    10393  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovariancePeriodic.cs

    r8929 r8982  
    2626using HeuristicLab.Core;
    2727using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2829using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2930
     
    3334  public sealed class CovariancePeriodic : ParameterizedNamedItem, ICovarianceFunction {
    3435
    35     [Storable]
    36     private double scale;
    37     [Storable]
    38     private readonly HyperParameter<DoubleValue> scaleParameter;
    3936    public IValueParameter<DoubleValue> ScaleParameter {
    40       get { return scaleParameter; }
     37      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    4138    }
    4239
    43     [Storable]
    44     private double inverseLength;
    45     [Storable]
    46     private readonly HyperParameter<DoubleValue> inverseLengthParameter;
    4740    public IValueParameter<DoubleValue> InverseLengthParameter {
    48       get { return inverseLengthParameter; }
     41      get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; }
    4942    }
    5043
    51     [Storable]
    52     private double period;
    53     [Storable]
    54     private readonly HyperParameter<DoubleValue> periodParameter;
    5544    public IValueParameter<DoubleValue> PeriodParameter {
    56       get { return periodParameter; }
     45      get { return (IValueParameter<DoubleValue>)Parameters["Period"]; }
    5746    }
    5847
     
    6251    private CovariancePeriodic(CovariancePeriodic original, Cloner cloner)
    6352      : base(original, cloner) {
    64       this.scaleParameter = cloner.Clone(original.scaleParameter);
    65       this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
    66       this.periodParameter = cloner.Clone(original.periodParameter);
    67       this.scale = original.scale;
    68       this.inverseLength = original.inverseLength;
    69       this.period = original.period;
    70 
    71       RegisterEvents();
    7253    }
    7354
     
    7758      Description = ItemDescription;
    7859
    79       scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the periodic covariance function.");
    80       inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter for the periodic covariance function.");
    81       periodParameter = new HyperParameter<DoubleValue>("Period", "The period parameter for the periodic covariance function.");
    82       Parameters.Add(scaleParameter);
    83       Parameters.Add(inverseLengthParameter);
    84       Parameters.Add(periodParameter);
    85 
    86       RegisterEvents();
    87     }
    88 
    89     [StorableHook(HookType.AfterDeserialization)]
    90     private void AfterDeserialization() {
    91       RegisterEvents();
     60      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale of the periodic covariance function."));
     61      Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter for the periodic covariance function."));
     62      Parameters.Add(new OptionalValueParameter<DoubleValue>("Period", "The period parameter for the periodic covariance function."));
    9263    }
    9364
     
    9667    }
    9768
    98     // caching
    99     private void RegisterEvents() {
    100       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; });
    101       Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; });
    102       Util.AttachValueChangeHandler<DoubleValue, double>(periodParameter, () => { period = periodParameter.Value.Value; });
     69    public int GetNumberOfParameters(int numberOfVariables) {
     70      return (ScaleParameter.Value != null ? 0 : 1) +
     71       (PeriodParameter.Value != null ? 0 : 1) +
     72       (InverseLengthParameter.Value != null ? 0 : 1);
    10373    }
    10474
    105     public int GetNumberOfParameters(int numberOfVariables) {
    106       return
    107         (new[] { scaleParameter, inverseLengthParameter, periodParameter }).Count(p => !p.Fixed);
     75    public void SetParameter(double[] p) {
     76      double scale, inverseLength, period;
     77      GetParameterValues(p, out scale, out period, out inverseLength);
     78      ScaleParameter.Value = new DoubleValue(scale);
     79      PeriodParameter.Value = new DoubleValue(period);
     80      InverseLengthParameter.Value = new DoubleValue(inverseLength);
    10881    }
    10982
    110     public void SetParameter(double[] hyp) {
    111       int i = 0;
    112       if (!inverseLengthParameter.Fixed) {
    113         inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i])));
    114         i++;
     83
     84    private void GetParameterValues(double[] p, out double scale, out double period, out double inverseLength) {
     85      // gather parameter values
     86      int c = 0;
     87      if (InverseLengthParameter.Value != null) {
     88        inverseLength = InverseLengthParameter.Value.Value;
     89      } else {
     90        inverseLength = 1.0 / Math.Exp(p[c]);
     91        c++;
    11592      }
    116       if (!periodParameter.Fixed) {
    117         periodParameter.SetValue(new DoubleValue(Math.Exp(hyp[i])));
    118         i++;
     93      if (PeriodParameter.Value != null) {
     94        period = PeriodParameter.Value.Value;
     95      } else {
     96        period = Math.Exp(p[c]);
     97        c++;
    11998      }
    120       if (!scaleParameter.Fixed) {
    121         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
    122         i++;
     99      if (ScaleParameter.Value != null) {
     100        scale = ScaleParameter.Value.Value;
     101      } else {
     102        scale = Math.Exp(2 * p[c]);
     103        c++;
    123104      }
    124       if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovariancePeriod", "hyp");
     105      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovariancePeriodic", "p");
    125106    }
    126107
    127     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    128       double k = i == j ? 0.0 : GetDistance(x, x, i, j, columnIndices);
    129       k = Math.PI * k / period;
    130       k = Math.Sin(k) * inverseLength;
    131       k = k * k;
     108    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     109      double inverseLength, period, scale;
     110      GetParameterValues(p, out scale, out period, out inverseLength);
     111      // create functions
     112      var cov = new ParameterizedCovarianceFunction();
     113      cov.Covariance = (x, i, j) => {
     114        double k = i == j ? 0.0 : GetDistance(x, x, i, j, columnIndices);
     115        k = Math.PI * k / period;
     116        k = Math.Sin(k) * inverseLength;
     117        k = k * k;
    132118
    133       return scale * Math.Exp(-2.0 * k);
     119        return scale * Math.Exp(-2.0 * k);
     120      };
     121      cov.CrossCovariance = (x, xt, i, j) => {
     122        double k = GetDistance(x, xt, i, j, columnIndices);
     123        k = Math.PI * k / period;
     124        k = Math.Sin(k) * inverseLength;
     125        k = k * k;
     126
     127        return scale * Math.Exp(-2.0 * k);
     128      };
     129      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, period, inverseLength);
     130      return cov;
    134131    }
    135132
    136     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     133
     134    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double period, double inverseLength) {
    137135      double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period;
    138136      double gradient = Math.Sin(v) * inverseLength;
     
    144142    }
    145143
    146     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    147       double k = GetDistance(x, xt, i, j, columnIndices);
    148       k = Math.PI * k / period;
    149       k = Math.Sin(k) * inverseLength;
    150       k = k * k;
    151 
    152       return scale * Math.Exp(-2.0 * k);
    153     }
    154 
    155     private double GetDistance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
     144    private static double GetDistance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    156145      return Math.Sqrt(Util.SqrDist(x, i, xt, j, 1, columnIndices));
    157146    }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceProduct.cs

    r8929 r8982  
    2323using System.Collections.Generic;
    2424using System.Linq;
     25using System.Linq.Expressions;
    2526using HeuristicLab.Common;
    2627using HeuristicLab.Core;
     
    6667    }
    6768
    68     public void SetParameter(double[] hyp) {
    69       if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function.");
     69    public void SetParameter(double[] p) {
    7070      int offset = 0;
    71       foreach (var t in factors) {
    72         var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
    73         t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());
     71      foreach (var f in factors) {
     72        var numberOfParameters = f.GetNumberOfParameters(numberOfVariables);
     73        f.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray());
    7474        offset += numberOfParameters;
    7575      }
    7676    }
    7777
    78     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    79       return factors.Select(f => f.GetCovariance(x, i, j, columnIndices)).Aggregate((a, b) => a * b);
     78    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     79      if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function.");
     80      var functions = new List<ParameterizedCovarianceFunction>();
     81      foreach (var f in factors) {
     82        int numberOfParameters = f.GetNumberOfParameters(numberOfVariables);
     83        functions.Add(f.GetParameterizedCovarianceFunction(p.Take(numberOfParameters).ToArray(), columnIndices));
     84        p = p.Skip(numberOfParameters).ToArray();
     85      }
     86
     87
     88      var product = new ParameterizedCovarianceFunction();
     89      product.Covariance = (x, i, j) => functions.Select(e => e.Covariance(x, i, j)).Aggregate((a, b) => a * b);
     90      product.CrossCovariance = (x, xt, i, j) => functions.Select(e => e.CrossCovariance(x, xt, i, j)).Aggregate((a, b) => a * b);
     91      product.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, functions);
     92      return product;
    8093    }
    8194
    82     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    83       var covariances = factors.Select(f => f.GetCovariance(x, i, j, columnIndices)).ToArray();
    84       for (int ii = 0; ii < factors.Count; ii++) {
    85         foreach (var g in factors[ii].GetGradient(x, i, j, columnIndices)) {
     95    public static IEnumerable<double> GetGradient(double[,] x, int i, int j, List<ParameterizedCovarianceFunction> factorFunctions) {
     96      var covariances = factorFunctions.Select(f => f.Covariance(x, i, j)).ToArray();
     97      for (int ii = 0; ii < factorFunctions.Count; ii++) {
     98        foreach (var g in factorFunctions[ii].CovarianceGradient(x, i, j)) {
    8699          double res = g;
    87100          for (int jj = 0; jj < covariances.Length; jj++)
     
    91104      }
    92105    }
    93 
    94     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    95       return factors.Select(f => f.GetCrossCovariance(x, xt, i, j, columnIndices)).Aggregate((a, b) => a * b);
    96     }
    97106  }
    98107}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticArd.cs

    r8933 r8982  
    2626using HeuristicLab.Core;
    2727using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2829using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2930
     
    3334    Description = "Rational quadratic covariance function with automatic relevance determination for Gaussian processes.")]
    3435  public sealed class CovarianceRationalQuadraticArd : ParameterizedNamedItem, ICovarianceFunction {
    35     [Storable]
    36     private double sf2;
    37     [Storable]
    38     private readonly HyperParameter<DoubleValue> scaleParameter;
    3936    public IValueParameter<DoubleValue> ScaleParameter {
    40       get { return scaleParameter; }
     37      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    4138    }
    4239
    43     [Storable]
    44     private double[] inverseLength;
    45     [Storable]
    46     private readonly HyperParameter<DoubleArray> inverseLengthParameter;
    4740    public IValueParameter<DoubleArray> InverseLengthParameter {
    48       get { return inverseLengthParameter; }
     41      get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; }
    4942    }
    5043
    51     [Storable]
    52     private double shape;
    53     [Storable]
    54     private readonly HyperParameter<DoubleValue> shapeParameter;
    5544    public IValueParameter<DoubleValue> ShapeParameter {
    56       get { return shapeParameter; }
     45      get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; }
    5746    }
    5847
     
    6453    private CovarianceRationalQuadraticArd(CovarianceRationalQuadraticArd original, Cloner cloner)
    6554      : base(original, cloner) {
    66       this.scaleParameter = cloner.Clone(original.scaleParameter);
    67       this.sf2 = original.sf2;
    68 
    69       this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
    70       if (original.inverseLength != null) {
    71         this.inverseLength = new double[original.inverseLength.Length];
    72         Array.Copy(original.inverseLength, inverseLength, inverseLength.Length);
    73       }
    74 
    75       this.shapeParameter = cloner.Clone(original.shapeParameter);
    76       this.shape = original.shape;
    77 
    78       RegisterEvents();
    7955    }
    8056
     
    8460      Description = ItemDescription;
    8561
    86       this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the rational quadratic covariance function with ARD.");
    87       this.inverseLengthParameter = new HyperParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination.");
    88       this.shapeParameter = new HyperParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the rational quadratic covariance function with ARD.");
    89 
    90       Parameters.Add(scaleParameter);
    91       Parameters.Add(inverseLengthParameter);
    92       Parameters.Add(shapeParameter);
    93 
    94       RegisterEvents();
     62      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the rational quadratic covariance function with ARD."));
     63      Parameters.Add(new OptionalValueParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination."));
     64      Parameters.Add(new OptionalValueParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the rational quadratic covariance function with ARD."));
    9565    }
    9666
     
    9969    }
    10070
    101     [StorableHook(HookType.AfterDeserialization)]
    102     private void AfterDeserialization() {
    103       RegisterEvents();
     71    public int GetNumberOfParameters(int numberOfVariables) {
     72      return
     73        (ScaleParameter.Value != null ? 0 : 1) +
     74        (ShapeParameter.Value != null ? 0 : 1) +
     75        (InverseLengthParameter.Value != null ? 0 : numberOfVariables);
    10476    }
    10577
    106     private void RegisterEvents() {
    107       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
    108       Util.AttachValueChangeHandler<DoubleValue, double>(shapeParameter, () => { shape = shapeParameter.Value.Value; });
    109       Util.AttachArrayChangeHandler<DoubleArray, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.ToArray(); });
     78    public void SetParameter(double[] p) {
     79      double scale, shape;
     80      double[] inverseLength;
     81      GetParameterValues(p, out scale, out shape, out inverseLength);
     82      ScaleParameter.Value = new DoubleValue(scale);
     83      ShapeParameter.Value = new DoubleValue(shape);
     84      InverseLengthParameter.Value = new DoubleArray(inverseLength);
    11085    }
    11186
    112     public int GetNumberOfParameters(int numberOfVariables) {
    113       return
    114         (scaleParameter.Fixed ? 0 : 1) +
    115         (shapeParameter.Fixed ? 0 : 1) +
    116         (inverseLengthParameter.Fixed ? 0 : numberOfVariables);
     87    private void GetParameterValues(double[] p, out double scale, out double shape, out double[] inverseLength) {
     88      int c = 0;
     89      // gather parameter values
     90      if (ScaleParameter.Value != null) {
     91        scale = ScaleParameter.Value.Value;
     92      } else {
     93        scale = Math.Exp(2 * p[c]);
     94        c++;
     95      }
     96      if (ShapeParameter.Value != null) {
     97        shape = ShapeParameter.Value.Value;
     98      } else {
     99        shape = Math.Exp(p[c]);
     100        c++;
     101      }
     102      if (InverseLengthParameter.Value != null) {
     103        inverseLength = InverseLengthParameter.Value.ToArray();
     104      } else {
     105        inverseLength = p.Skip(2).Select(e => 1.0 / Math.Exp(e)).ToArray();
     106        c += inverseLength.Length;
     107      }
     108      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticArd", "p");
    117109    }
    118110
    119     public void SetParameter(double[] hyp) {
    120       int i = 0;
    121       if (!scaleParameter.Fixed) {
    122         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
    123         i++;
    124       }
    125       if (!shapeParameter.Fixed) {
    126         shapeParameter.SetValue(new DoubleValue(Math.Exp(hyp[i])));
    127         i++;
    128       }
    129       if (!inverseLengthParameter.Fixed) {
    130         inverseLengthParameter.SetValue(new DoubleArray(hyp.Skip(i).Select(e => 1.0 / Math.Exp(e)).ToArray()));
    131         i += hyp.Skip(i).Count();
    132       }
    133       if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticArd", "hyp");
     111    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     112      double scale, shape;
     113      double[] inverseLength;
     114      GetParameterValues(p, out scale, out shape, out inverseLength);
     115      // create functions
     116      var cov = new ParameterizedCovarianceFunction();
     117      cov.Covariance = (x, i, j) => {
     118        double d = i == j
     119                    ? 0.0
     120                    : Util.SqrDist(x, i, j, inverseLength, columnIndices);
     121        return scale * Math.Pow(1 + 0.5 * d / shape, -shape);
     122      };
     123      cov.CrossCovariance = (x, xt, i, j) => {
     124        double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
     125        return scale * Math.Pow(1 + 0.5 * d / shape, -shape);
     126      };
     127      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength);
     128      return cov;
    134129    }
    135130
    136 
    137     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    138       double d = i == j
    139                    ? 0.0
    140                    : Util.SqrDist(x, i, j, inverseLength, columnIndices);
    141       return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape);
    142     }
    143 
    144     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     131    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double[] inverseLength) {
    145132      if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
    146133      double d = i == j
     
    150137      int k = 0;
    151138      foreach (var columnIndex in columnIndices) {
    152         yield return sf2 * Math.Pow(b, -shape - 1) * Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);
     139        yield return scale * Math.Pow(b, -shape - 1) * Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);
    153140        k++;
    154141      }
    155       yield return 2 * sf2 * Math.Pow(b, -shape);
    156       yield return sf2 * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
    157     }
    158 
    159     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    160       double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
    161       return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape);
     142      yield return 2 * scale * Math.Pow(b, -shape);
     143      yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
    162144    }
    163145  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticIso.cs

    r8929 r8982  
    2525using HeuristicLab.Core;
    2626using HeuristicLab.Data;
     27using HeuristicLab.Parameters;
    2728using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2829
     
    3233    Description = "Isotropic rational quadratic covariance function for Gaussian processes.")]
    3334  public sealed class CovarianceRationalQuadraticIso : ParameterizedNamedItem, ICovarianceFunction {
    34     [Storable]
    35     private double sf2;
    36     [Storable]
    37     private readonly HyperParameter<DoubleValue> scaleParameter;
    38     public IValueParameter<DoubleValue> ScaleParameter { get { return scaleParameter; } }
     35    public IValueParameter<DoubleValue> ScaleParameter {
     36      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
     37    }
    3938
    40     [Storable]
    41     private double inverseLength;
    42     [Storable]
    43     private readonly HyperParameter<DoubleValue> inverseLengthParameter;
    44     public IValueParameter<DoubleValue> InverseLengthParameter { get { return inverseLengthParameter; } }
     39    public IValueParameter<DoubleValue> InverseLengthParameter {
     40      get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; }
     41    }
    4542
    46     [Storable]
    47     private double shape;
    48     [Storable]
    49     private readonly HyperParameter<DoubleValue> shapeParameter;
    50     public IValueParameter<DoubleValue> ShapeParameter { get { return shapeParameter; } }
    51 
     43    public IValueParameter<DoubleValue> ShapeParameter {
     44      get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; }
     45    }
    5246    [StorableConstructor]
    5347    private CovarianceRationalQuadraticIso(bool deserializing)
     
    5751    private CovarianceRationalQuadraticIso(CovarianceRationalQuadraticIso original, Cloner cloner)
    5852      : base(original, cloner) {
    59       this.sf2 = original.sf2;
    60       this.scaleParameter = cloner.Clone(original.scaleParameter);
    61 
    62       this.inverseLength = original.inverseLength;
    63       this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
    64 
    65       this.shape = original.shape;
    66       this.shapeParameter = cloner.Clone(original.shapeParameter);
    67 
    68       RegisterEvents();
    6953    }
    7054
     
    7458      Description = ItemDescription;
    7559
    76       this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the isometric rational quadratic covariance function.");
    77       this.inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric rational quadratic covariance function.");
    78       this.shapeParameter = new HyperParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the isometric rational quadratic covariance function.");
    79 
    80       Parameters.Add(scaleParameter);
    81       Parameters.Add(inverseLengthParameter);
    82       Parameters.Add(shapeParameter);
    83 
    84       RegisterEvents();
     60      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric rational quadratic covariance function."));
     61      Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric rational quadratic covariance function."));
     62      Parameters.Add(new OptionalValueParameter<DoubleValue>("Shape", "The shape parameter (alpha) of the isometric rational quadratic covariance function."));
    8563    }
    8664
     
    8967    }
    9068
    91     [StorableHook(HookType.AfterDeserialization)]
    92     private void AfterDeserialization() {
    93       RegisterEvents();
     69    public int GetNumberOfParameters(int numberOfVariables) {
     70      return (ScaleParameter.Value != null ? 0 : 1) +
     71        (ShapeParameter.Value != null ? 0 : 1) +
     72        (InverseLengthParameter.Value != null ? 0 : 1);
    9473    }
    9574
    96     private void RegisterEvents() {
    97       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
    98       Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; });
    99       Util.AttachValueChangeHandler<DoubleValue, double>(shapeParameter, () => { shape = shapeParameter.Value.Value; });
     75    public void SetParameter(double[] p) {
     76      double scale, shape, inverseLength;
     77      GetParameterValues(p, out scale, out shape, out inverseLength);
     78      ScaleParameter.Value = new DoubleValue(scale);
     79      ShapeParameter.Value = new DoubleValue(shape);
     80      InverseLengthParameter.Value = new DoubleValue(inverseLength);
    10081    }
    10182
    102     public int GetNumberOfParameters(int numberOfVariables) {
    103       return
    104         (scaleParameter.Fixed ? 0 : 1) +
    105         (inverseLengthParameter.Fixed ? 0 : 1) +
    106         (shapeParameter.Fixed ? 0 : 1);
     83    private void GetParameterValues(double[] p, out double scale, out double shape, out double inverseLength) {
     84      int c = 0;
     85      // gather parameter values
     86      if (ScaleParameter.Value != null) {
     87        scale = ScaleParameter.Value.Value;
     88      } else {
     89        scale = Math.Exp(2 * p[c]);
     90        c++;
     91      }
     92      if (ShapeParameter.Value != null) {
     93        shape = ShapeParameter.Value.Value;
     94      } else {
     95        shape = Math.Exp(p[c]);
     96        c++;
     97      }
     98      if (InverseLengthParameter.Value != null) {
     99        inverseLength = InverseLengthParameter.Value.Value;
     100      } else {
     101        inverseLength = 1.0 / Math.Exp(p[c]);
     102        c++;
     103      }
     104      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticIso", "p");
    107105    }
    108106
    109     public void SetParameter(double[] hyp) {
    110       int i = 0;
    111       if (!scaleParameter.Fixed) {
    112         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
    113         i++;
    114       }
    115       if (!shapeParameter.Fixed) {
    116         shapeParameter.SetValue(new DoubleValue(Math.Exp(hyp[i])));
    117         i++;
    118       }
    119       if (!inverseLengthParameter.Fixed) {
    120         inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i])));
    121         i++;
    122       }
    123       if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticIso", "hyp");
     107    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     108      double scale, shape, inverseLength;
     109      GetParameterValues(p, out scale, out shape, out inverseLength);
     110      // create functions
     111      var cov = new ParameterizedCovarianceFunction();
     112      cov.Covariance = (x, i, j) => {
     113        double d = i == j
     114                    ? 0.0
     115                    : Util.SqrDist(x, i, j, inverseLength, columnIndices);
     116        return shape * Math.Pow(1 + 0.5 * d / shape, -shape);
     117      };
     118      cov.CrossCovariance = (x, xt, i, j) => {
     119        double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
     120        return scale * Math.Pow(1 + 0.5 * d / shape, -shape);
     121      };
     122      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength);
     123      return cov;
    124124    }
    125125
    126 
    127     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    128       double d = i == j
    129                    ? 0.0
    130                    : Util.SqrDist(x, i, j, inverseLength, columnIndices);
    131       return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape);
    132     }
    133 
    134     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     126    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double inverseLength) {
    135127      double d = i == j
    136128                   ? 0.0
     
    138130
    139131      double b = 1 + 0.5 * d / shape;
    140       yield return sf2 * Math.Pow(b, -shape - 1) * d;
    141       yield return 2 * sf2 * Math.Pow(b, -shape);
    142       yield return sf2 * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
    143     }
    144 
    145     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    146       double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
    147       return sf2 * Math.Pow(1 + 0.5 * d / shape, -shape);
     132      yield return scale * Math.Pow(b, -shape - 1) * d;
     133      yield return 2 * scale * Math.Pow(b, -shape);
     134      yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
    148135    }
    149136  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceScale.cs

    r8929 r8982  
    3434    Description = "Scale covariance function for Gaussian processes.")]
    3535  public sealed class CovarianceScale : ParameterizedNamedItem, ICovarianceFunction {
    36     [Storable]
    37     private double sf2;
    38     [Storable]
    39     private readonly HyperParameter<DoubleValue> scaleParameter;
    4036    public IValueParameter<DoubleValue> ScaleParameter {
    41       get { return scaleParameter; }
     37      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    4238    }
    4339
    44     [Storable]
    45     private ICovarianceFunction cov;
    46     [Storable]
    47     private readonly ValueParameter<ICovarianceFunction> covParameter;
    4840    public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
    49       get { return covParameter; }
     41      get { return (IValueParameter<ICovarianceFunction>)Parameters["CovarianceFunction"]; }
    5042    }
    5143
     
    5749    private CovarianceScale(CovarianceScale original, Cloner cloner)
    5850      : base(original, cloner) {
    59       this.scaleParameter = cloner.Clone(original.scaleParameter);
    60       this.sf2 = original.sf2;
    61 
    62       this.covParameter = cloner.Clone(original.covParameter);
    63       this.cov = cloner.Clone(original.cov);
    64       RegisterEvents();
    6551    }
    6652
     
    7056      Description = ItemDescription;
    7157
    72       this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter.");
    73       this.covParameter = new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso());
    74       cov = covParameter.Value;
    75 
    76       Parameters.Add(this.scaleParameter);
    77       Parameters.Add(covParameter);
    78 
    79       RegisterEvents();
     58      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter."));
     59      Parameters.Add(new ValueParameter<ICovarianceFunction>("CovarianceFunction", "The covariance function that should be scaled.", new CovarianceSquaredExponentialIso()));
    8060    }
    8161
     
    8464    }
    8565
    86     [StorableHook(HookType.AfterDeserialization)]
    87     private void AfterDeserialization() {
    88       RegisterEvents();
     66    public int GetNumberOfParameters(int numberOfVariables) {
     67      return (ScaleParameter.Value != null ? 0 : 1) + CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables);
    8968    }
    9069
    91     private void RegisterEvents() {
    92       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
    93       covParameter.ValueChanged += (sender, args) => { cov = covParameter.Value; };
     70    public void SetParameter(double[] p) {
     71      double scale;
     72      GetParameterValues(p, out scale);
     73      ScaleParameter.Value = new DoubleValue(scale);
     74      CovarianceFunctionParameter.Value.SetParameter(p.Skip(1).ToArray());
    9475    }
    9576
    96     public int GetNumberOfParameters(int numberOfVariables) {
    97       return (scaleParameter.Fixed ? 0 : 1) + cov.GetNumberOfParameters(numberOfVariables);
     77    private void GetParameterValues(double[] p, out double scale) {
     78      // gather parameter values
     79      if (ScaleParameter.Value != null) {
     80        scale = ScaleParameter.Value.Value;
     81      } else {
     82        scale = Math.Exp(2 * p[0]);
     83      }
    9884    }
    9985
    100     public void SetParameter(double[] hyp) {
    101       int i = 0;
    102       if (!scaleParameter.Fixed) {
    103         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
    104         i++;
    105       }
    106       cov.SetParameter(hyp.Skip(i).ToArray());
     86    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     87      double scale;
     88      GetParameterValues(p, out scale);
     89      var subCov = CovarianceFunctionParameter.Value.GetParameterizedCovarianceFunction(p.Skip(1).ToArray(), columnIndices);
     90      // create functions
     91      var cov = new ParameterizedCovarianceFunction();
     92      cov.Covariance = (x, i, j) => scale * subCov.Covariance(x, i, j);
     93      cov.CrossCovariance = (x, xt, i, j) => scale * subCov.CrossCovariance(x, xt, i, j);
     94      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, subCov);
     95      return cov;
    10796    }
    10897
    109     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    110       return sf2 * cov.GetCovariance(x, i, j, columnIndices);
    111     }
    112 
    113     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    114       yield return 2 * sf2 * cov.GetCovariance(x, i, j, columnIndices);
    115       foreach (var g in cov.GetGradient(x, i, j, columnIndices))
    116         yield return sf2 * g;
    117     }
    118 
    119     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    120       return sf2 * cov.GetCrossCovariance(x, xt, i, j, columnIndices);
     98    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, ParameterizedCovarianceFunction cov) {
     99      yield return 2 * scale * cov.Covariance(x, i, j);
     100      foreach (var g in cov.CovarianceGradient(x, i, j))
     101        yield return scale * g;
    121102    }
    122103  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialArd.cs

    r8933 r8982  
    2626using HeuristicLab.Core;
    2727using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2829using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2930
     
    3233  [Item(Name = "CovarianceSquaredExponentialArd", Description = "Squared exponential covariance function with automatic relevance determination for Gaussian processes.")]
    3334  public sealed class CovarianceSquaredExponentialArd : ParameterizedNamedItem, ICovarianceFunction {
    34     [Storable]
    35     private double sf2;
    36     [Storable]
    37     private readonly HyperParameter<DoubleValue> scaleParameter;
    38     public IValueParameter<DoubleValue> ScaleParameter { get { return scaleParameter; } }
     35    public IValueParameter<DoubleValue> ScaleParameter {
     36      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
     37    }
    3938
    40     [Storable]
    41     private double[] inverseLength;
    42     [Storable]
    43     private readonly HyperParameter<DoubleArray> inverseLengthParameter;
    44     public IValueParameter<DoubleArray> InverseLengthParameter { get { return inverseLengthParameter; } }
     39    public IValueParameter<DoubleArray> InverseLengthParameter {
     40      get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; }
     41    }
    4542
    4643    [StorableConstructor]
     
    4845    private CovarianceSquaredExponentialArd(CovarianceSquaredExponentialArd original, Cloner cloner)
    4946      : base(original, cloner) {
    50       this.sf2 = original.sf2;
    51       this.scaleParameter = cloner.Clone(original.scaleParameter);
    52 
    53       if (original.inverseLength != null) {
    54         this.inverseLength = new double[original.inverseLength.Length];
    55         Array.Copy(original.inverseLength, this.inverseLength, this.inverseLength.Length);
    56       }
    57       this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
    58 
    59       RegisterEvents();
    6047    }
    6148    public CovarianceSquaredExponentialArd()
     
    6451      Description = ItemDescription;
    6552
    66       this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the squared exponential covariance function with ARD.");
    67       this.inverseLengthParameter = new HyperParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination.");
    68 
    69       Parameters.Add(scaleParameter);
    70       Parameters.Add(inverseLengthParameter);
    71 
    72       RegisterEvents();
     53      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the squared exponential covariance function with ARD."));
     54      Parameters.Add(new OptionalValueParameter<DoubleArray>("InverseLength", "The inverse length parameter for automatic relevance determination."));
    7355    }
    7456
     
    7759    }
    7860
    79     [StorableHook(HookType.AfterDeserialization)]
    80     private void AfterDeserialization() {
    81       RegisterEvents();
     61    public int GetNumberOfParameters(int numberOfVariables) {
     62      return
     63        (ScaleParameter.Value != null ? 0 : 1) +
     64        (InverseLengthParameter.Value != null ? 0 : numberOfVariables);
    8265    }
    8366
    84     private void RegisterEvents() {
    85       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
    86       Util.AttachArrayChangeHandler<DoubleArray, double>(inverseLengthParameter, () => {
    87         inverseLength =
    88           inverseLengthParameter.Value.ToArray();
    89       });
     67    public void SetParameter(double[] p) {
     68      double scale;
     69      double[] inverseLength;
     70      GetParameterValues(p, out scale, out inverseLength);
     71      ScaleParameter.Value = new DoubleValue(scale);
     72      InverseLengthParameter.Value = new DoubleArray(inverseLength);
    9073    }
    9174
    92     public int GetNumberOfParameters(int numberOfVariables) {
    93       return
    94         (scaleParameter.Fixed ? 0 : 1) +
    95         (inverseLengthParameter.Fixed ? 0 : numberOfVariables);
     75    private void GetParameterValues(double[] p, out double scale, out double[] inverseLength) {
     76      int c = 0;
     77      // gather parameter values
     78      if (ScaleParameter.Value != null) {
     79        scale = ScaleParameter.Value.Value;
     80      } else {
     81        scale = Math.Exp(2 * p[c]);
     82        c++;
     83      }
     84      if (InverseLengthParameter.Value != null) {
     85        inverseLength = InverseLengthParameter.Value.ToArray();
     86      } else {
     87        inverseLength = p.Skip(1).Select(e => 1.0 / Math.Exp(e)).ToArray();
     88        c += inverseLength.Length;
     89      }
     90      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialArd", "p");
     91    }
     92
     93    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     94      double scale;
     95      double[] inverseLength;
     96      GetParameterValues(p, out scale, out inverseLength);
     97      // create functions
     98      var cov = new ParameterizedCovarianceFunction();
     99      cov.Covariance = (x, i, j) => {
     100        double d = i == j
     101                 ? 0.0
     102                 : Util.SqrDist(x, i, j, inverseLength, columnIndices);
     103        return scale * Math.Exp(-d / 2.0);
     104      };
     105      cov.CrossCovariance = (x, xt, i, j) => {
     106        double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
     107        return scale * Math.Exp(-d / 2.0);
     108      };
     109      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, inverseLength);
     110      return cov;
    96111    }
    97112
    98113
    99     public void SetParameter(double[] hyp) {
    100       int i = 0;
    101       if (!scaleParameter.Fixed) {
    102         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
    103         i++;
    104       }
    105       if (!inverseLengthParameter.Fixed) {
    106         inverseLengthParameter.SetValue(new DoubleArray(hyp.Skip(i).Select(e => 1.0 / Math.Exp(e)).ToArray()));
    107         i += hyp.Skip(i).Count();
    108       }
    109       if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialArd", "hyp");
    110     }
    111 
    112     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    113       double d = i == j
    114                    ? 0.0
    115                    : Util.SqrDist(x, i, j, inverseLength, columnIndices);
    116       return sf2 * Math.Exp(-d / 2.0);
    117     }
    118 
    119     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     114    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double[] inverseLength) {
    120115      if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
    121116      double d = i == j
     
    125120      foreach (var columnIndex in columnIndices) {
    126121        double sqrDist = Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);
    127         yield return sf2 * Math.Exp(-d / 2.0) * sqrDist;
     122        yield return scale * Math.Exp(-d / 2.0) * sqrDist;
    128123        k++;
    129124      }
    130125
    131       yield return 2.0 * sf2 * Math.Exp(-d / 2.0);
    132     }
    133 
    134     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    135       double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
    136       return sf2 * Math.Exp(-d / 2.0);
     126      yield return 2.0 * scale * Math.Exp(-d / 2.0);
    137127    }
    138128  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialIso.cs

    r8929 r8982  
    2222using System;
    2323using System.Collections.Generic;
     24using System.Linq.Expressions;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
    2627using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2729using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2830
     
    3234    Description = "Isotropic squared exponential covariance function for Gaussian processes.")]
    3335  public sealed class CovarianceSquaredExponentialIso : ParameterizedNamedItem, ICovarianceFunction {
    34     [Storable]
    35     private double sf2;
    36     [Storable]
    37     private readonly HyperParameter<DoubleValue> scaleParameter;
    38     public IValueParameter<DoubleValue> ScaleParameter { get { return scaleParameter; } }
     36    public IValueParameter<DoubleValue> ScaleParameter {
     37      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
     38    }
    3939
    40     [Storable]
    41     private double inverseLength;
    42     [Storable]
    43     private readonly HyperParameter<DoubleValue> inverseLengthParameter;
    44     public IValueParameter<DoubleValue> InverseLengthParameter { get { return inverseLengthParameter; } }
     40    public IValueParameter<DoubleValue> InverseLengthParameter {
     41      get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; }
     42    }
    4543
    4644    [StorableConstructor]
     
    5149    private CovarianceSquaredExponentialIso(CovarianceSquaredExponentialIso original, Cloner cloner)
    5250      : base(original, cloner) {
    53       this.sf2 = original.sf2;
    54       this.scaleParameter = cloner.Clone(original.scaleParameter);
    55 
    56       this.inverseLength = original.inverseLength;
    57       this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
    58 
    59       RegisterEvents();
    6051    }
    6152
     
    6556      Description = ItemDescription;
    6657
    67       this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale parameter of the isometric squared exponential covariance function.");
    68       this.inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric squared exponential covariance function.");
    69 
    70       Parameters.Add(scaleParameter);
    71       Parameters.Add(inverseLengthParameter);
    72 
    73       RegisterEvents();
     58      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric squared exponential covariance function."));
     59      Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric squared exponential covariance function."));
    7460    }
    7561
     
    7864    }
    7965
    80     [StorableHook(HookType.AfterDeserialization)]
    81     private void AfterDeserialization() {
    82       RegisterEvents();
     66    public int GetNumberOfParameters(int numberOfVariables) {
     67      return
     68        (ScaleParameter.Value != null ? 0 : 1) +
     69        (InverseLengthParameter.Value != null ? 0 : 1);
    8370    }
    8471
    85     private void RegisterEvents() {
    86       Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
    87       Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; });
    88     }
    89 
    90     public int GetNumberOfParameters(int numberOfVariables) {
    91       return
    92         (scaleParameter.Fixed ? 0 : 1) +
    93         (inverseLengthParameter.Fixed ? 0 : 1);
    94     }
    95 
    96     public void SetParameter(double[] hyp) {
    97       int i = 0;
    98       if (!inverseLengthParameter.Fixed) {
    99         inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i])));
    100         i++;
    101       }
    102       if (!scaleParameter.Fixed) {
    103         scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
    104         i++;
    105       }
    106       if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialIso", "hyp");
     72    public void SetParameter(double[] p) {
     73      double scale, inverseLength;
     74      GetParameterValues(p, out scale, out inverseLength);
     75      ScaleParameter.Value = new DoubleValue(scale);
     76      InverseLengthParameter.Value = new DoubleValue(inverseLength);
    10777    }
    10878
    10979
    110     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    111       double d = i == j
    112                    ? 0.0
    113                    : Util.SqrDist(x, i, j, inverseLength, columnIndices);
    114       return sf2 * Math.Exp(-d / 2.0);
     80    private void GetParameterValues(double[] p, out double scale, out double inverseLength) {
     81      // gather parameter values
     82      int c = 0;
     83      if (InverseLengthParameter.Value != null) {
     84        inverseLength = InverseLengthParameter.Value.Value;
     85      } else {
     86        inverseLength = 1.0 / Math.Exp(p[c]);
     87        c++;
     88      }
     89
     90      if (ScaleParameter.Value != null) {
     91        scale = ScaleParameter.Value.Value;
     92      } else {
     93        scale = Math.Exp(2 * p[c]);
     94        c++;
     95      }
     96      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialIso", "p");
    11597    }
    11698
    117     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     99    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     100      double inverseLength, scale;
     101      GetParameterValues(p, out scale, out inverseLength);
     102      // create functions
     103      var cov = new ParameterizedCovarianceFunction();
     104      cov.Covariance = (x, i, j) => {
     105        double d = i == j
     106                ? 0.0
     107                : Util.SqrDist(x, i, j, inverseLength, columnIndices);
     108        return scale * Math.Exp(-d / 2.0);
     109      };
     110      cov.CrossCovariance = (x, xt, i, j) => {
     111        double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
     112        return scale * Math.Exp(-d / 2.0);
     113      };
     114      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, inverseLength, columnIndices);
     115      return cov;
     116    }
     117
     118    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double sf2, double inverseLength, IEnumerable<int> columnIndices) {
    118119      double d = i == j
    119120                   ? 0.0
     
    123124      yield return 2.0 * sf2 * g;
    124125    }
    125 
    126     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    127       double d = Util.SqrDist(x, i, xt, j, inverseLength, columnIndices);
    128       return sf2 * Math.Exp(-d / 2.0);
    129     }
    130126  }
    131127}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSum.cs

    r8929 r8982  
    2323using System.Collections.Generic;
    2424using System.Linq;
     25using System.Linq.Expressions;
    2526using HeuristicLab.Common;
    2627using HeuristicLab.Core;
     
    6667    }
    6768
    68     public void SetParameter(double[] hyp) {
    69       if (terms.Count == 0) throw new ArgumentException("At least one term is needed for sum covariance function.");
     69    public void SetParameter(double[] p) {
    7070      int offset = 0;
    7171      foreach (var t in terms) {
    7272        var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
    73         t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());
     73        t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray());
    7474        offset += numberOfParameters;
    7575      }
    7676    }
    7777
    78     public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    79       return terms.Select(t => t.GetCovariance(x, i, j, columnIndices)).Sum();
    80     }
     78    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
     79      if (terms.Count == 0) throw new ArgumentException("at least one term is necessary for the product covariance function.");
     80      var functions = new List<ParameterizedCovarianceFunction>();
     81      foreach (var t in terms) {
     82        var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
     83        functions.Add(t.GetParameterizedCovarianceFunction(p.Take(numberOfParameters).ToArray(), columnIndices));
     84        p = p.Skip(numberOfParameters).ToArray();
     85      }
    8186
    82     public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
    83       return terms.Select(t => t.GetGradient(x, i, j, columnIndices)).Aggregate(Enumerable.Concat);
    84     }
    85 
    86     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
    87       return terms.Select(t => t.GetCrossCovariance(x, xt, i, j, columnIndices)).Sum();
     87      var sum = new ParameterizedCovarianceFunction();
     88      sum.Covariance = (x, i, j) => functions.Select(e => e.Covariance(x, i, j)).Sum();
     89      sum.CrossCovariance = (x, xt, i, j) => functions.Select(e => e.CrossCovariance(x, xt, i, j)).Sum();
     90      sum.CovarianceGradient = (x, i, j) => functions.Select(e => e.CovarianceGradient(x, i, j)).Aggregate(Enumerable.Concat);
     91      return sum;
    8892    }
    8993  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationSolutionCreator.cs

    r8679 r8982  
    7777      if (ModelParameter.ActualValue != null) {
    7878        var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
     79        m.FixParameters();
    7980        var data = (IClassificationProblemData)ProblemDataParameter.ActualValue.Clone();
    8081        var model = new DiscriminantFunctionClassificationModel(m, new NormalDistributionCutPointsThresholdCalculator());
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8623 r8982  
    8181
    8282    [Storable]
     83    private double[] meanParameter;
     84    [Storable]
     85    private double[] covarianceParameter;
     86
     87    [Storable]
    8388    private double[,] l;
    8489
     
    99104      this.targetVariable = original.targetVariable;
    100105      this.sqrSigmaNoise = original.sqrSigmaNoise;
     106      if (original.meanParameter != null) {
     107        this.meanParameter = (double[])original.meanParameter.Clone();
     108      }
     109      if (original.covarianceParameter != null) {
     110        this.covarianceParameter = (double[])original.covarianceParameter.Clone();
     111      }
    101112
    102113      // shallow copies of arrays because they cannot be modified
     
    118129
    119130      int nVariables = this.allowedInputVariables.Length;
    120       this.meanFunction.SetParameter(hyp
     131      meanParameter = hyp
    121132        .Take(this.meanFunction.GetNumberOfParameters(nVariables))
    122         .ToArray());
    123       this.covarianceFunction.SetParameter(hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables))
    124         .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
    125         .ToArray());
     133        .ToArray();
     134
     135      covarianceParameter = hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables))
     136                                             .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
     137                                             .ToArray();
    126138      sqrSigmaNoise = Math.Exp(2.0 * hyp.Last());
    127139
     
    138150
    139151      // calculate means and covariances
    140       double[] m = meanFunction.GetMean(x);
     152      var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, x.GetLength(1)));
     153      double[] m = Enumerable.Range(0, x.GetLength(0))
     154        .Select(r => mean.Mean(x, r))
     155        .ToArray();
     156
     157      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1)));
    141158      for (int i = 0; i < n; i++) {
    142159        for (int j = i; j < n; j++) {
    143           l[j, i] = covarianceFunction.GetCovariance(x, i, j) / sqrSigmaNoise;
     160          l[j, i] = cov.Covariance(x, i, j) / sqrSigmaNoise;
    144161          if (j == i) l[j, i] += 1.0;
    145162        }
    146163      }
     164
    147165
    148166      // cholesky decomposition
     
    181199
    182200      double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
    183       for (int i = 0; i < meanGradients.Length; i++) {
    184         var meanGrad = meanFunction.GetGradients(i, x);
    185         meanGradients[i] = -Util.ScalarProd(meanGrad, alpha);
     201      for (int k = 0; k < meanGradients.Length; k++) {
     202        var meanGrad = Enumerable.Range(0, alpha.Length)
     203        .Select(r => mean.Gradient(x, r, k));
     204        meanGradients[k] = -Util.ScalarProd(meanGrad, alpha);
    186205      }
    187206
     
    190209        for (int i = 0; i < n; i++) {
    191210          for (int j = 0; j < i; j++) {
    192             var g = covarianceFunction.GetGradient(x, i, j).ToArray();
     211            var g = cov.CovarianceGradient(x, i, j).ToArray();
    193212            for (int k = 0; k < covGradients.Length; k++) {
    194213              covGradients[k] += lCopy[i, j] * g[k];
     
    196215          }
    197216
    198           var gDiag = covarianceFunction.GetGradient(x, i, i).ToArray();
     217          var gDiag = cov.CovarianceGradient(x, i, i).ToArray();
    199218          for (int k = 0; k < covGradients.Length; k++) {
    200219            // diag
     
    216235    }
    217236
     237    // is called by the solution creator to set all parameter values of the covariance and mean function
     238    // to the optimized values (necessary to make the values visible in the GUI)
     239    public void FixParameters() {
     240      covarianceFunction.SetParameter(covarianceParameter);
     241      meanFunction.SetParameter(meanParameter);
     242      covarianceParameter = new double[0];
     243      meanParameter = new double[0];
     244    }
     245
    218246    #region IRegressionModel Members
    219247    public IEnumerable<double> GetEstimatedValues(Dataset dataset, IEnumerable<int> rows) {
     
    234262      int n = x.GetLength(0);
    235263      var Ks = new double[newN, n];
    236       var ms = meanFunction.GetMean(newX);
     264      var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, newX.GetLength(1)));
     265      var ms = Enumerable.Range(0, newX.GetLength(0))
     266      .Select(r => mean.Mean(newX, r))
     267      .ToArray();
     268      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));
    237269      for (int i = 0; i < newN; i++) {
    238270        for (int j = 0; j < n; j++) {
    239           Ks[i, j] = covarianceFunction.GetCrossCovariance(x, newX, j, i);
     271          Ks[i, j] = cov.CrossCovariance(x, newX, j, i);
    240272        }
    241273      }
     
    252284      var kss = new double[newN];
    253285      double[,] sWKs = new double[n, newN];
     286      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));
    254287
    255288      // for stddev
    256289      for (int i = 0; i < newN; i++)
    257         kss[i] = covarianceFunction.GetCovariance(newX, i, i);
     290        kss[i] = cov.Covariance(newX, i, i);
    258291
    259292      for (int i = 0; i < newN; i++) {
    260293        for (int j = 0; j < n; j++) {
    261           sWKs[j, i] = covarianceFunction.GetCrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
     294          sWKs[j, i] = cov.CrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
    262295        }
    263296      }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs

    r8494 r8982  
    7777      if (ModelParameter.ActualValue != null) {
    7878        var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
     79        m.FixParameters();
    7980        var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone();
    8081        var s = new GaussianProcessRegressionSolution(m, data);
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs

    r8678 r8982  
    2020#endregion
    2121
     22using System;
    2223using System.Collections.Generic;
     24using System.Linq.Expressions;
    2325using HeuristicLab.Core;
    2426
    2527namespace HeuristicLab.Algorithms.DataAnalysis {
     28
     29  public delegate double CovarianceFunctionDelegate(double[,] x, int i, int j);
     30  public delegate double CrossCovarianceFunctionDelegate(double[,] x, double[,] xt, int i, int j);
     31  public delegate IEnumerable<double> CovarianceGradientFunctionDelegate(double[,] x, int i, int j);
     32
     33  public class ParameterizedCovarianceFunction {
     34    public CovarianceFunctionDelegate Covariance { get; set; }
     35    public CrossCovarianceFunctionDelegate CrossCovariance { get; set; }
     36    public CovarianceGradientFunctionDelegate CovarianceGradient { get; set; }
     37  }
     38
    2639  public interface ICovarianceFunction : IItem {
    2740    int GetNumberOfParameters(int numberOfVariables);
    28     void SetParameter(double[] hyp);
    29     double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices = null);
    30     IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices = null);
    31     double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices = null);
     41    void SetParameter(double[] p);
     42    ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices);
    3243  }
    3344}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs

    r8612 r8982  
    1919 */
    2020#endregion
     21
     22using System;
     23using System.Collections.Generic;
    2124using HeuristicLab.Core;
    2225
    2326namespace HeuristicLab.Algorithms.DataAnalysis {
     27  public delegate double MeanFunctionDelegate(double[,] x, int row);
     28  public delegate double MeanGradientDelegate(double[,] x, int row, int k);
     29
     30  public class ParameterizedMeanFunction {
     31    public MeanFunctionDelegate Mean { get; set; }
     32    public MeanGradientDelegate Gradient { get; set; }
     33  }
     34
    2435  public interface IMeanFunction : IItem {
    2536    int GetNumberOfParameters(int numberOfVariables);
    26     void SetParameter(double[] hyp);
    27     double[] GetMean(double[,] x);
    28     double[] GetGradients(int k, double[,] x);
     37    void SetParameter(double[] p);
     38    ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices);
    2939  }
    3040}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanConst.cs

    r8929 r8982  
    2121
    2222using System;
     23using System.Collections.Generic;
    2324using System.Linq;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
    2627using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2729using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2830
     
    3133  [Item(Name = "MeanConst", Description = "Constant mean function for Gaussian processes.")]
    3234  public sealed class MeanConst : ParameterizedNamedItem, IMeanFunction {
    33     [Storable]
    34     private double c;
    35     [Storable]
    36     private readonly HyperParameter<DoubleValue> valueParameter;
    37     public IValueParameter<DoubleValue> ValueParameter { get { return valueParameter; } }
     35    public IValueParameter<DoubleValue> ValueParameter {
     36      get { return (IValueParameter<DoubleValue>)Parameters["Value"]; }
     37    }
    3838
    3939    [StorableConstructor]
     
    4141    private MeanConst(MeanConst original, Cloner cloner)
    4242      : base(original, cloner) {
    43       this.c = original.c;
    44       this.valueParameter = cloner.Clone(original.valueParameter);
    45       RegisterEvents();
    4643    }
    4744    public MeanConst()
     
    5047      this.description = ItemDescription;
    5148
    52       this.valueParameter = new HyperParameter<DoubleValue>("Value", "The constant value for the constant mean function.");
    53       Parameters.Add(valueParameter);
    54       RegisterEvents();
     49      Parameters.Add(new OptionalValueParameter<DoubleValue>("Value", "The constant value for the constant mean function."));
    5550    }
    5651
     
    5954    }
    6055
    61     [StorableHook(HookType.AfterDeserialization)]
    62     private void AfterDeserialization() {
    63       RegisterEvents();
     56    public int GetNumberOfParameters(int numberOfVariables) {
     57      return ValueParameter.Value != null ? 0 : 1;
    6458    }
    6559
    66     private void RegisterEvents() {
    67       Util.AttachValueChangeHandler<DoubleValue, double>(valueParameter, () => { c = valueParameter.Value.Value; });
     60    public void SetParameter(double[] p) {
     61      double c;
     62      GetParameters(p, out c);
     63      ValueParameter.Value = new DoubleValue(c);
    6864    }
    6965
    70     public int GetNumberOfParameters(int numberOfVariables) {
    71       return valueParameter.Fixed ? 0 : 1;
     66    private void GetParameters(double[] p, out double c) {
     67      if (ValueParameter.Value == null) {
     68        c = p[0];
     69      } else {
     70        if (p.Length > 0)
     71          throw new ArgumentException(
     72            "The length of the parameter vector does not match the number of free parameters for the constant mean function.",
     73            "p");
     74        c = ValueParameter.Value.Value;
     75      }
    7276    }
    7377
    74     public void SetParameter(double[] hyp) {
    75       if (!valueParameter.Fixed) {
    76         valueParameter.SetValue(new DoubleValue(hyp[0]));
    77       } else if (hyp.Length > 0)
    78         throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the constant mean function.", "hyp");
    79     }
    80 
    81     public double[] GetMean(double[,] x) {
    82       return Enumerable.Repeat(c, x.GetLength(0)).ToArray();
    83     }
    84 
    85     public double[] GetGradients(int k, double[,] x) {
    86       if (k > 0) throw new ArgumentException();
    87       return Enumerable.Repeat(1.0, x.GetLength(0)).ToArray();
     78    public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) {
     79      double c;
     80      GetParameters(p, out c);
     81      var mf = new ParameterizedMeanFunction();
     82      mf.Mean = (x, i) => c;
     83      mf.Gradient = (x, i, k) => {
     84        if (k > 0) throw new ArgumentException();
     85        return 1.0;
     86      };
     87      return mf;
    8888    }
    8989  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanLinear.cs

    r8929 r8982  
    2121
    2222using System;
     23using System.Collections.Generic;
    2324using System.Linq;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
    2627using HeuristicLab.Data;
     28using HeuristicLab.Parameters;
    2729using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2830
     
    3133  [Item(Name = "MeanLinear", Description = "Linear mean function for Gaussian processes.")]
    3234  public sealed class MeanLinear : ParameterizedNamedItem, IMeanFunction {
    33     [Storable]
    34     private double[] weights;
    35     [Storable]
    36     private readonly HyperParameter<DoubleArray> weightsParameter;
    37     public IValueParameter<DoubleArray> WeightsParameter { get { return weightsParameter; } }
     35    public IValueParameter<DoubleArray> WeightsParameter {
     36      get { return (IValueParameter<DoubleArray>)Parameters["Weights"]; }
     37    }
    3838
    3939    [StorableConstructor]
     
    4141    private MeanLinear(MeanLinear original, Cloner cloner)
    4242      : base(original, cloner) {
    43       if (original.weights != null) {
    44         this.weights = new double[original.weights.Length];
    45         Array.Copy(original.weights, weights, original.weights.Length);
    46       }
    47       weightsParameter = cloner.Clone(original.weightsParameter);
    48       RegisterEvents();
    4943    }
    5044    public MeanLinear()
    5145      : base() {
    52       this.weightsParameter = new HyperParameter<DoubleArray>("Weights", "The weights parameter for the linear mean function.");
    53       Parameters.Add(weightsParameter);
    54       RegisterEvents();
     46      Parameters.Add(new OptionalValueParameter<DoubleArray>("Weights", "The weights parameter for the linear mean function."));
    5547    }
    5648
     
    5951    }
    6052
    61     [StorableHook(HookType.AfterDeserialization)]
    62     private void AfterDeserialization() {
    63       RegisterEvents();
     53    public int GetNumberOfParameters(int numberOfVariables) {
     54      return WeightsParameter.Value != null ? 0 : numberOfVariables;
    6455    }
    6556
    66     private void RegisterEvents() {
    67       Util.AttachArrayChangeHandler<DoubleArray, double>(weightsParameter, () => {
    68         weights = weightsParameter.Value.ToArray();
    69       });
     57    public void SetParameter(double[] p) {
     58      double[] weights;
     59      GetParameter(p, out weights);
     60      WeightsParameter.Value = new DoubleArray(weights);
    7061    }
    7162
    72     public int GetNumberOfParameters(int numberOfVariables) {
    73       return weightsParameter.Fixed ? 0 : numberOfVariables;
     63    public void GetParameter(double[] p, out double[] weights) {
     64      if (WeightsParameter.Value == null) {
     65        weights = p;
     66      } else {
     67        if (p.Length != 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the linear mean function.", "p");
     68        weights = WeightsParameter.Value.ToArray();
     69      }
    7470    }
    7571
    76     public void SetParameter(double[] hyp) {
    77       if (!weightsParameter.Fixed) {
    78         weightsParameter.SetValue(new DoubleArray(hyp));
    79       } else if (hyp.Length != 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the linear mean function.", "hyp");
    80     }
    81 
    82     public double[] GetMean(double[,] x) {
    83       // sanity check
    84       if (weights.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function.");
    85       int cols = x.GetLength(1);
    86       int n = x.GetLength(0);
    87       return (from i in Enumerable.Range(0, n)
    88               let rowVector = Enumerable.Range(0, cols).Select(j => x[i, j])
    89               select Util.ScalarProd(weights, rowVector))
    90         .ToArray();
    91     }
    92 
    93     public double[] GetGradients(int k, double[,] x) {
    94       int cols = x.GetLength(1);
    95       int n = x.GetLength(0);
    96       if (k > cols) throw new ArgumentException();
    97       return (Enumerable.Range(0, n).Select(r => x[r, k])).ToArray();
     72    public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) {
     73      double[] weights;
     74      int[] columns = columnIndices.ToArray();
     75      GetParameter(p, out weights);
     76      var mf = new ParameterizedMeanFunction();
     77      mf.Mean = (x, i) => {
     78        // sanity check
     79        if (weights.Length != columns.Length) throw new ArgumentException("The number of rparameters must match the number of variables for the linear mean function.");
     80        return Util.ScalarProd(weights, Util.GetRow(x, i, columns));
     81      };
     82      mf.Gradient = (x, i, k) => {
     83        if (k > columns.Length) throw new ArgumentException();
     84        return x[i, columns[k]];
     85      };
     86      return mf;
    9887    }
    9988  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanProduct.cs

    r8929 r8982  
    1919 */
    2020#endregion
     21
     22using System.Collections.Generic;
    2123using System.Linq;
    2224using HeuristicLab.Common;
     
    6163    }
    6264
    63     public void SetParameter(double[] hyp) {
     65    public void SetParameter(double[] p) {
    6466      int offset = 0;
    6567      foreach (var t in factors) {
    6668        var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
    67         t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());
     69        t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray());
    6870        offset += numberOfParameters;
    6971      }
    7072    }
    7173
    72     public double[] GetMean(double[,] x) {
    73       var res = factors.First().GetMean(x);
    74       foreach (var t in factors.Skip(1)) {
    75         var a = t.GetMean(x);
    76         for (int i = 0; i < res.Length; i++) res[i] *= a[i];
    77       }
    78       return res;
    79     }
    8074
    81     public double[] GetGradients(int k, double[,] x) {
    82       double[] res = Enumerable.Repeat(1.0, x.GetLength(0)).ToArray();
    83       // find index of factor for the given k
    84       int j = 0;
    85       while (k >= factors[j].GetNumberOfParameters(numberOfVariables)) {
    86         k -= factors[j].GetNumberOfParameters(numberOfVariables);
    87         j++;
    88       }
    89       for (int i = 0; i < factors.Count; i++) {
    90         var f = factors[i];
    91         if (i == j) {
    92           // multiply gradient
    93           var g = f.GetGradients(k, x);
    94           for (int ii = 0; ii < res.Length; ii++) res[ii] *= g[ii];
    95         } else {
    96           // multiply mean
    97           var m = f.GetMean(x);
    98           for (int ii = 0; ii < res.Length; ii++) res[ii] *= m[ii];
     75    public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) {
     76      var factorMf = new List<ParameterizedMeanFunction>();
     77      int totalNumberOfParameters = GetNumberOfParameters(numberOfVariables);
     78      int[] factorIndexMap = new int[totalNumberOfParameters]; // maps k-th hyperparameter to the correct mean-term
     79      int[] hyperParameterIndexMap = new int[totalNumberOfParameters]; // maps k-th hyperparameter to the l-th hyperparameter of the correct mean-term
     80      int c = 0;
     81      // get the parameterized mean function for each term
     82      for (int factorIndex = 0; factorIndex < factors.Count; factorIndex++) {
     83        var numberOfParameters = factors[factorIndex].GetNumberOfParameters(numberOfVariables);
     84        factorMf.Add(factors[factorIndex].GetParameterizedMeanFunction(p.Take(numberOfParameters).ToArray(), columnIndices));
     85        p = p.Skip(numberOfParameters).ToArray();
     86
     87        for (int hyperParameterIndex = 0; hyperParameterIndex < numberOfParameters; hyperParameterIndex++) {
     88          factorIndexMap[c] = factorIndex;
     89          hyperParameterIndexMap[c] = hyperParameterIndex;
     90          c++;
    9991        }
    10092      }
    101       return res;
     93
     94      var mf = new ParameterizedMeanFunction();
     95      mf.Mean = (x, i) => factorMf.Select(t => t.Mean(x, i)).Aggregate((a, b) => a * b);
     96      mf.Gradient = (x, i, k) => {
     97        double result = 1.0;
     98        int hyperParameterFactorIndex = factorIndexMap[k];
     99        for (int factorIndex = 0; factorIndex < factors.Count; factorIndex++) {
     100          if (factorIndex == hyperParameterFactorIndex) {
     101            // multiply gradient
     102            result *= factorMf[factorIndex].Gradient(x, i, hyperParameterIndexMap[k]);
     103          } else {
     104            // multiply mean
     105            result *= factorMf[factorIndex].Mean(x, i);
     106          }
     107        }
     108        return result;
     109      };
     110      return mf;
    102111    }
    103112  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanSum.cs

    r8929 r8982  
    1919 */
    2020#endregion
     21
     22using System.Collections.Generic;
    2123using System.Linq;
    2224using HeuristicLab.Common;
     
    5759    }
    5860
    59     public void SetParameter(double[] hyp) {
     61    public void SetParameter(double[] p) {
    6062      int offset = 0;
    6163      foreach (var t in terms) {
    6264        var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
    63         t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());
     65        t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray());
    6466        offset += numberOfParameters;
    6567      }
    6668    }
    6769
    68     public double[] GetMean(double[,] x) {
    69       var res = terms.First().GetMean(x);
    70       foreach (var t in terms.Skip(1)) {
    71         var a = t.GetMean(x);
    72         for (int i = 0; i < res.Length; i++) res[i] += a[i];
     70    public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) {
     71      var termMf = new List<ParameterizedMeanFunction>();
     72      int totalNumberOfParameters = GetNumberOfParameters(numberOfVariables);
     73      int[] termIndexMap = new int[totalNumberOfParameters]; // maps k-th parameter to the correct mean-term
     74      int[] hyperParameterIndexMap = new int[totalNumberOfParameters]; // maps k-th parameter to the l-th parameter of the correct mean-term
     75      int c = 0;
     76      // get the parameterized mean function for each term
     77      for (int termIndex = 0; termIndex < terms.Count; termIndex++) {
     78        var numberOfParameters = terms[termIndex].GetNumberOfParameters(numberOfVariables);
     79        termMf.Add(terms[termIndex].GetParameterizedMeanFunction(p.Take(numberOfParameters).ToArray(), columnIndices));
     80        p = p.Skip(numberOfParameters).ToArray();
     81
     82        for (int hyperParameterIndex = 0; hyperParameterIndex < numberOfParameters; hyperParameterIndex++) {
     83          termIndexMap[c] = termIndex;
     84          hyperParameterIndexMap[c] = hyperParameterIndex;
     85          c++;
     86        }
    7387      }
    74       return res;
    75     }
    7688
    77     public double[] GetGradients(int k, double[,] x) {
    78       int i = 0;
    79       while (k >= terms[i].GetNumberOfParameters(numberOfVariables)) {
    80         k -= terms[i].GetNumberOfParameters(numberOfVariables);
    81         i++;
    82       }
    83       return terms[i].GetGradients(k, x);
     89      var mf = new ParameterizedMeanFunction();
     90      mf.Mean = (x, i) => termMf.Select(t => t.Mean(x, i)).Sum();
     91      mf.Gradient = (x, i, k) => {
     92        return termMf[termIndexMap[k]].Gradient(x, i, hyperParameterIndexMap[k]);
     93      };
     94      return mf;
    8495    }
    8596  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanFunctions/MeanZero.cs

    r8929 r8982  
    2020#endregion
    2121using System;
     22using System.Collections.Generic;
    2223using System.Linq;
    2324using HeuristicLab.Common;
     
    4546    }
    4647
    47     public void SetParameter(double[] hyp) {
    48       if (hyp.Length > 0) throw new ArgumentException("No hyper-parameters allowed for zero mean function.", "hyp");
     48    public void SetParameter(double[] p) {
     49      if (p.Length > 0) throw new ArgumentException("No parameters allowed for zero mean function.", "p");
    4950    }
    5051
    51     public double[] GetMean(double[,] x) {
    52       return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray();
    53     }
    54 
    55     public double[] GetGradients(int k, double[,] x) {
    56       if (k > 0) throw new ArgumentException();
    57       return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray();
     52    public ParameterizedMeanFunction GetParameterizedMeanFunction(double[] p, IEnumerable<int> columnIndices) {
     53      if (p.Length > 0) throw new ArgumentException("No parameters allowed for zero mean function.", "p");
     54      var mf = new ParameterizedMeanFunction();
     55      mf.Mean = (x, i) => 0.0;
     56      mf.Gradient = (x, i, k) => {
     57        if (k > 0)
     58          throw new ArgumentException();
     59        return 0.0;
     60      };
     61      return mf;
    5862    }
    5963  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs

    r8933 r8982  
    106106    public static IEnumerable<double> GetRow(double[,] x, int r) {
    107107      int cols = x.GetLength(1);
    108       return Enumerable.Range(0, cols).Select(c => x[r, c]);
     108      return GetRow(x, r, Enumerable.Range(0, cols));
     109    }
     110    public static IEnumerable<double> GetRow(double[,] x, int r, IEnumerable<int> columnIndices) {
     111      return columnIndices.Select(c => x[r, c]);
    109112    }
    110113    public static IEnumerable<double> GetCol(double[,] x, int c) {
     
    112115      return Enumerable.Range(0, rows).Select(r => x[r, c]);
    113116    }
    114 
    115 
    116     public static void AttachValueChangeHandler<T, U>(IValueParameter<T> parameter, Action action)
    117       where T : ValueTypeValue<U>
    118       where U : struct {
    119       parameter.ValueChanged += (sender, args) => {
    120         if (parameter.Value != null) {
    121           parameter.Value.ValueChanged += (s, a) => action();
    122           action();
    123         }
    124       };
    125       if (parameter.Value != null) {
    126         parameter.Value.ValueChanged += (s, a) => action();
    127       }
    128     }
    129 
    130     public static void AttachArrayChangeHandler<T, U>(IValueParameter<T> parameter, Action action)
    131       where T : ValueTypeArray<U>
    132       where U : struct {
    133       parameter.ValueChanged += (sender, args) => {
    134         if (parameter.Value != null) {
    135           parameter.Value.ItemChanged += (s, a) => action();
    136           parameter.Value.Reset += (s, a) => action();
    137           action();
    138         }
    139       };
    140       if (parameter.Value != null) {
    141         parameter.Value.ItemChanged += (s, a) => action();
    142         parameter.Value.Reset += (s, a) => action();
    143       }
    144     }
    145117  }
    146118}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r8929 r8982  
    120120    </Compile>
    121121    <Compile Include="FixedDataAnalysisAlgorithm.cs" />
    122     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceConst.cs" />
    123     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinear.cs" />
    124     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinearArd.cs" />
    125     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMask.cs" />
    126     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMaternIso.cs" />
    127     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceNoise.cs" />
    128     <Compile Include="GaussianProcess\CovarianceFunctions\CovariancePeriodic.cs" />
    129     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceProduct.cs" />
    130     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticArd.cs" />
    131     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticIso.cs" />
    132     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceScale.cs" />
    133     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSquaredExponentialArd.cs" />
     122    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceConst.cs">
     123      <SubType>Code</SubType>
     124    </Compile>
     125    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinear.cs">
     126      <SubType>Code</SubType>
     127    </Compile>
     128    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceLinearArd.cs">
     129      <SubType>Code</SubType>
     130    </Compile>
     131    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMask.cs">
     132      <SubType>Code</SubType>
     133    </Compile>
     134    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceMaternIso.cs">
     135      <SubType>Code</SubType>
     136    </Compile>
     137    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceNoise.cs">
     138      <SubType>Code</SubType>
     139    </Compile>
     140    <Compile Include="GaussianProcess\CovarianceFunctions\CovariancePeriodic.cs">
     141      <SubType>Code</SubType>
     142    </Compile>
     143    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceProduct.cs">
     144      <SubType>Code</SubType>
     145    </Compile>
     146    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticArd.cs">
     147      <SubType>Code</SubType>
     148    </Compile>
     149    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceRationalQuadraticIso.cs">
     150      <SubType>Code</SubType>
     151    </Compile>
     152    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceScale.cs">
     153      <SubType>Code</SubType>
     154    </Compile>
     155    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSquaredExponentialArd.cs">
     156      <SubType>Code</SubType>
     157    </Compile>
    134158    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSquaredExponentialIso.cs" />
    135     <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSum.cs" />
     159    <Compile Include="GaussianProcess\CovarianceFunctions\CovarianceSum.cs">
     160      <SubType>Code</SubType>
     161    </Compile>
    136162    <Compile Include="GaussianProcess\GaussianProcessClassificationSolutionCreator.cs" />
    137163    <Compile Include="GaussianProcess\GaussianProcessClassificationModelCreator.cs" />
    138164    <Compile Include="GaussianProcess\GaussianProcessClassification.cs" />
    139     <Compile Include="GaussianProcess\HyperParameter.cs" />
    140165    <Compile Include="GaussianProcess\MeanFunctions\MeanConst.cs" />
    141166    <Compile Include="GaussianProcess\MeanFunctions\MeanLinear.cs" />
    142     <Compile Include="GaussianProcess\MeanFunctions\MeanProduct.cs" />
    143     <Compile Include="GaussianProcess\MeanFunctions\MeanSum.cs" />
     167    <Compile Include="GaussianProcess\MeanFunctions\MeanProduct.cs">
     168      <SubType>Code</SubType>
     169    </Compile>
     170    <Compile Include="GaussianProcess\MeanFunctions\MeanSum.cs">
     171      <SubType>Code</SubType>
     172    </Compile>
    144173    <Compile Include="GaussianProcess\MeanFunctions\MeanZero.cs" />
    145174    <Compile Include="GaussianProcess\GaussianProcessHyperparameterInitializer.cs" />
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs

    r8582 r8982  
    3535
    3636    IEnumerable<double> GetEstimatedVariance(Dataset ds, IEnumerable<int> rows);
     37    void FixParameters();
    3738  }
    3839}
  • trunk/sources/HeuristicLab.Tests/HeuristicLab.Algorithms.DataAnalysis-3.4/GaussianProcessFunctionsTest.cs

    r8620 r8982  
    6666          }
    6767      );
     68      sum = new MeanSum();
     69      sum.Terms.Add(new MeanConst());
     70      sum.Terms.Add(new MeanConst());
    6871      TestMeanFunction(sum, 1,
    6972        new double[] { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 },
     
    8891          }
    8992      );
     93      prod = new MeanProduct();
     94      prod.Factors.Add(new MeanConst());
     95      prod.Factors.Add(new MeanConst());
    9096      TestMeanFunction(prod, 1,
    9197        new double[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
     
    97103      );
    98104
    99       prod.Factors.Clear();
     105      prod = new MeanProduct();
    100106      prod.Factors.Add(new MeanZero());
    101107      prod.Factors.Add(new MeanLinear());
     
    111117          }
    112118      );
     119      prod = new MeanProduct();
     120      prod.Factors.Add(new MeanZero());
     121      prod.Factors.Add(new MeanLinear());
    113122      TestMeanFunction(prod, 1,
    114123        new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
     
    123132      );
    124133
    125       prod.Factors.Clear();
     134      prod = new MeanProduct();
    126135      prod.Factors.Add(new MeanConst());
    127136      prod.Factors.Add(new MeanLinear());
     
    13341343          }
    13351344      );
     1345      cov = new CovarianceSum();
     1346      cov.Terms.Add(new CovarianceSquaredExponentialIso());
     1347      cov.Terms.Add(new CovarianceLinear());
    13361348      TestCovarianceFunction(cov, 1,
    13371349        new double[,]
     
    14411453          }
    14421454      );
     1455      cov = new CovarianceScale();
     1456      cov.CovarianceFunctionParameter.Value = new CovarianceSquaredExponentialIso();
    14431457      TestCovarianceFunction(cov, 1,
    14441458        new double[,]
     
    15501564          }
    15511565      );
     1566      cov = new CovarianceProduct();
     1567      cov.Factors.Add(new CovarianceSquaredExponentialIso());
     1568      cov.Factors.Add(new CovarianceLinear());
    15521569      TestCovarianceFunction(cov, 1,
    15531570        new double[,]
     
    16031620      int nHyp = cf.GetNumberOfParameters(x.GetLength(1));
    16041621      var hyp = Enumerable.Repeat(hypValue, nHyp).ToArray();
    1605       cf.SetParameter(hyp);
    16061622
    16071623      int rows0 = x.GetLength(0);
    16081624      int rows1 = xt.GetLength(0);
    16091625      var actualCov = new double[rows0, rows1];
     1626      var covFunction = cf.GetParameterizedCovarianceFunction(hyp, Enumerable.Range(0, x.GetLength(1)));
    16101627      for (int i = 0; i < rows0; i++)
    16111628        for (int j = 0; j < rows1; j++)
    1612           actualCov[i, j] = cf.GetCrossCovariance(x, xt, i, j);
     1629          actualCov[i, j] = covFunction.CrossCovariance(x, xt, i, j);
    16131630
    16141631      AssertEqual(expectedCov, actualCov, delta);
     
    16161633      for (int i = 0; i < rows0; i++)
    16171634        for (int j = 0; j < rows1; j++) {
    1618           var g = cf.GetGradient(x, i, j).ToArray();
     1635          var g = covFunction.CovarianceGradient(x, i, j).ToArray();
    16191636          for (int k = 0; k < nHyp; k++)
    16201637            Assert.AreEqual(expectedGradients[k][i, j], g[k], delta);
     
    16291646      int nHyp = mf.GetNumberOfParameters(x.GetLength(1));
    16301647      var hyp = Enumerable.Repeat(hypValue, nHyp).ToArray();
    1631       mf.SetParameter(hyp);
    1632 
    1633       var m = mf.GetMean(xt);
     1648      var meanFunction = mf.GetParameterizedMeanFunction(hyp, Enumerable.Range(0, x.GetLength(1)));
     1649
     1650      var m = Enumerable.Range(0, xt.GetLength(0)).Select(i => meanFunction.Mean(xt, i)).ToArray();
    16341651
    16351652      AssertEqual(expectedMean, m);
    16361653
    1637       for (int i = 0; i < nHyp; i++) {
    1638         var g = mf.GetGradients(i, x);
    1639         AssertEqual(expectedGradients[i], g);
     1654      for (int k = 0; k < nHyp; k++) {
     1655        var g = Enumerable.Range(0, xt.GetLength(0)).Select(i => meanFunction.Gradient(x, i, k)).ToArray();
     1656        AssertEqual(expectedGradients[k], g);
    16401657      }
    16411658    }
Note: See TracChangeset for help on using the changeset viewer.