Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
09/05/12 17:04:30 (12 years ago)
Author:
gkronber
Message:

#1902 implemented a few covariance functions as parameterized named items. Implemented rudimentary view for Gaussian process models.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceConst.cs

    r8484 r8582  
    2424using HeuristicLab.Common;
    2525using HeuristicLab.Core;
     26using HeuristicLab.Data;
    2627using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2728
     
    3031  [Item(Name = "CovarianceConst",
    3132    Description = "Constant covariance function for Gaussian processes.")]
    32   public class CovarianceConst : Item, ICovarianceFunction {
     33  public class CovarianceConst : CovarianceFunction {
     34
     35    public IValueParameter<DoubleValue> ScaleParameter {
     36      get { return scaleParameter; }
     37    }
     38
    3339    [Storable]
    34     private double sf2;
    35     public double Scale { get { return sf2; } }
     40    private readonly HyperParameter<DoubleValue> scaleParameter;
     41
     42    [Storable]
     43    private double scale;
    3644
    3745    [StorableConstructor]
     
    4250    protected CovarianceConst(CovarianceConst original, Cloner cloner)
    4351      : base(original, cloner) {
    44       this.sf2 = original.sf2;
     52      this.scaleParameter = cloner.Clone(original.scaleParameter);
     53      this.scale = original.scale;
     54
     55      RegisterEvents();
    4556    }
    4657
    4758    public CovarianceConst()
    4859      : base() {
     60      scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the constant covariance function.");
     61      Parameters.Add(scaleParameter);
     62      RegisterEvents();
    4963    }
     64
     65    [StorableHook(HookType.AfterDeserialization)]
     66    private void AfterDeserialization() {
     67      RegisterEvents();
     68    }
     69
     70    // caching
     71    private void RegisterEvents() {
     72      AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; });
     73    }
     74
    5075
    5176    public override IDeepCloneable Clone(Cloner cloner) {
     
    5378    }
    5479
    55     public int GetNumberOfParameters(int numberOfVariables) {
    56       return 1;
     80    public override int GetNumberOfParameters(int numberOfVariables) {
     81      return scaleParameter.Fixed ? 0 : 1;
    5782    }
    5883
    59     public void SetParameter(double[] hyp) {
    60       if (hyp.Length != 1) throw new ArgumentException("CovarianceConst has only one hyperparameter", "k");
    61       this.sf2 = Math.Exp(2 * hyp[0]);
     84    public override void SetParameter(double[] hyp) {
     85      if (!scaleParameter.Fixed && hyp.Length == 1) {
     86        scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0])));
     87      } else {
     88        throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceConst", "hyp");
     89      }
    6290    }
    6391
    64 
    65     public double GetCovariance(double[,] x, int i, int j) {
    66       return sf2;
     92    public override double GetCovariance(double[,] x, int i, int j) {
     93      return scale;
    6794    }
    6895
    69     public IEnumerable<double> GetGradient(double[,] x, int i, int j) {
    70       yield return 2 * sf2;
     96    public override IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     97      yield return 2.0 * scale;
    7198    }
    7299
    73     public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
    74       return sf2;
     100    public override double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     101      return scale;
    75102    }
    76103  }
Note: See TracChangeset for help on using the changeset viewer.