Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/06/12 15:02:34 (12 years ago)
Author:
gkronber
Message:

#1902 worked on sum and product covariance functions and fixed a few bugs.

Location:
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess
Files:
12 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceLinear.cs

    r8401 r8416  
    2929  [Item(Name = "CovarianceLinear", Description = "Linear covariance function with for Gaussian processes.")]
    3030  public class CovarianceLinear : Item, ICovarianceFunction {
     31    private static readonly double[] emptyArray = new double[0];
     32
    3133    [Storable]
    3234    private double[,] x;
    3335    [Storable]
    3436    private double[,] xt;
    35 
    3637
    3738    private double[,] k;
     
    4546    protected CovarianceLinear(CovarianceLinear original, Cloner cloner)
    4647      : base(original, cloner) {
    47       // note: using shallow copies here!
    48       this.x = original.x;
    49       this.xt = original.xt;
     48      if (original.x != null) {
     49        this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];
     50        Array.Copy(original.x, this.x, x.Length);
    5051
     52        this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
     53        Array.Copy(original.xt, this.xt, xt.Length);
     54
     55        this.k = new double[original.k.GetLength(0), original.k.GetLength(1)];
     56        Array.Copy(original.k, this.k, k.Length);
     57      }
     58      this.symmetric = original.symmetric;
    5159    }
    5260    public CovarianceLinear()
     
    5866    }
    5967
    60     public void SetParameter(double[] hyp, double[,] x) {
    61       if (hyp.Length > 0) throw new ArgumentException();
    62       SetParameter(hyp, x, x);
     68    public void SetParameter(double[] hyp) {
     69      if (hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function.");
     70      k = null;
     71    }
     72
     73    public void SetData(double[,] x) {
     74      SetData(x, x);
    6375      this.symmetric = true;
    6476    }
    6577
    66     public void SetParameter(double[] hyp, double[,] x, double[,] xt) {
     78    public void SetData(double[,] x, double[,] xt) {
    6779      this.x = x;
    6880      this.xt = xt;
     
    7789    }
    7890
    79 
    80     public double[] GetDiagonalCovariances() {
    81       if (x != xt) throw new InvalidOperationException();
    82       int rows = x.GetLength(0);
    83       int cols = x.GetLength(1);
    84       var k = new double[rows];
    85       for (int i = 0; i < rows; i++) {
    86         k[i] = 0;
    87         for (int j = 0; j < cols; j++) {
    88           k[i] += x[i, j] * x[i, j];
    89         }
    90       }
    91       return k;
    92     }
    93 
    9491    public double[] GetGradient(int i, int j) {
    95       throw new NotSupportedException();
     92      return emptyArray;
    9693    }
    9794
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs

    r8323 r8416  
    1 using System.Collections.Generic;
     1#region License Information
     2/* HeuristicLab
     3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 *
     5 * This file is part of HeuristicLab.
     6 *
     7 * HeuristicLab is free software: you can redistribute it and/or modify
     8 * it under the terms of the GNU General Public License as published by
     9 * the Free Software Foundation, either version 3 of the License, or
     10 * (at your option) any later version.
     11 *
     12 * HeuristicLab is distributed in the hope that it will be useful,
     13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
     14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     15 * GNU General Public License for more details.
     16 *
     17 * You should have received a copy of the GNU General Public License
     18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
     19 */
     20#endregion
     21
    222using System.Linq;
     23using HeuristicLab.Common;
     24using HeuristicLab.Core;
     25using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    326
    4 namespace HeuristicLab.Algorithms.DataAnalysis.GaussianProcess {
    5   public class CovarianceProd : ICovarianceFunction {
    6     private IList<ICovarianceFunction> covariances;
     27namespace HeuristicLab.Algorithms.DataAnalysis {
     28  [StorableClass]
     29  [Item(Name = "CovarianceProd",
     30    Description = "Product covariance function for Gaussian processes.")]
     31  public class CovarianceProd : Item, ICovarianceFunction {
     32    [Storable]
     33    private ItemList<ICovarianceFunction> factors;
    734
    8     public int NumberOfParameters {
    9       get { return covariances.Sum(c => c.NumberOfParameters); }
     35    [Storable]
     36    private int numberOfVariables;
     37    public ItemList<ICovarianceFunction> Factors {
     38      get { return factors; }
    1039    }
    1140
    12     public CovarianceProd(IEnumerable<ICovarianceFunction> covariances) {
    13       this.covariances = covariances.ToList();
     41    [StorableConstructor]
     42    protected CovarianceProd(bool deserializing)
     43      : base(deserializing) {
    1444    }
    1545
    16     public void SetMatrix(double[,] x) {
    17       foreach (var covariance in covariances) {
    18         covariance.SetMatrix(x, x);
     46    protected CovarianceProd(CovarianceProd original, Cloner cloner)
     47      : base(original, cloner) {
     48      this.factors = cloner.Clone(original.factors);
     49      this.numberOfVariables = original.numberOfVariables;
     50    }
     51
     52    public CovarianceProd()
     53      : base() {
     54      this.factors = new ItemList<ICovarianceFunction>();
     55    }
     56
     57    public override IDeepCloneable Clone(Cloner cloner) {
     58      return new CovarianceProd(this, cloner);
     59    }
     60
     61    public int GetNumberOfParameters(int numberOfVariables) {
     62      this.numberOfVariables = numberOfVariables;
     63      return factors.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();
     64    }
     65
     66    public void SetParameter(double[] hyp) {
     67      int offset = 0;
     68      foreach (var t in factors) {
     69        var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
     70        t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());
     71        offset += numberOfParameters;
    1972      }
    2073    }
    21 
    22     public void SetMatrix(double[,] x, double[,] xt) {
    23       foreach (var covariance in covariances) {
    24         covariance.SetMatrix(x, xt);
    25       }
     74    public void SetData(double[,] x) {
     75      SetData(x, x);
    2676    }
    2777
    28     public void SetHyperparamter(double[] hyp) {
    29       int i = 0;
    30       foreach (var covariance in covariances) {
    31         int n = covariance.NumberOfParameters;
    32         covariance.SetHyperparamter(hyp.Skip(i).Take(n).ToArray());
    33         i += n;
     78    public void SetData(double[,] x, double[,] xt) {
     79      foreach (var t in factors) {
     80        t.SetData(x, xt);
    3481      }
    3582    }
    3683
    3784    public double GetCovariance(int i, int j) {
    38       return covariances.Select(c => c.GetCovariance(i, j))
    39         .Aggregate((a, b) => a * b);
     85      return factors.Select(t => t.GetCovariance(i, j)).Aggregate((a, b) => a * b);
    4086    }
    4187
    42 
    43     public double[] GetDiagonalCovariances() {
    44       return covariances
    45         .Select(c => c.GetDiagonalCovariances())
    46         .Aggregate((s, d) => s.Zip(d, (a, b) => a * b).ToArray())
    47         .ToArray();
    48     }
    49 
    50     public double[] GetDerivatives(int i, int j) {
    51       return covariances
    52         .Select(c => c.GetDerivatives(i, j))
    53         .Aggregate(Enumerable.Empty<double>(), (h0, h1) => h0.Concat(h1))
    54         .ToArray();
     88    public double[] GetGradient(int i, int j) {
     89      return factors.Select(t => t.GetGradient(i, j)).SelectMany(seq => seq).ToArray();
    5590    }
    5691  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEard.cs

    r8403 r8416  
    4949    protected CovarianceSEard(CovarianceSEard original, Cloner cloner)
    5050      : base(original, cloner) {
    51       // note: using shallow copies here!
    52       this.x = original.x;
    53       this.xt = original.xt;
     51      if (original.x != null) {
     52        this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];
     53        Array.Copy(original.x, this.x, x.Length);
     54
     55        this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
     56        Array.Copy(original.xt, this.xt, xt.Length);
     57
     58        this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)];
     59        Array.Copy(original.sd, this.sd, sd.Length);
     60
     61        this.l = new double[original.l.Length];
     62        Array.Copy(original.l, this.l, l.Length);
     63      }
    5464      this.sf2 = original.sf2;
    55       this.l = original.l;
     65      this.symmetric = original.symmetric;
    5666    }
    5767    public CovarianceSEard()
     
    6373    }
    6474
    65     public void SetParameter(double[] hyp, double[,] x) {
    66       SetParameter(hyp, x, x);
    67       this.symmetric = true;
    68     }
    69 
    70     public void SetParameter(double[] hyp, double[,] x, double[,] xt) {
    71       this.x = x;
    72       this.xt = xt;
    73       this.symmetric = false;
    74 
     75    public void SetParameter(double[] hyp) {
    7576      this.l = hyp.Take(hyp.Length - 1).Select(Math.Exp).ToArray();
    7677      this.sf2 = Math.Exp(2 * hyp[hyp.Length - 1]);
     
    8081    }
    8182
     83    public void SetData(double[,] x) {
     84      SetData(x, x);
     85      this.symmetric = true;
     86    }
     87
     88    public void SetData(double[,] x, double[,] xt) {
     89      this.x = x;
     90      this.xt = xt;
     91      this.symmetric = false;
     92
     93      sd = null;
     94    }
     95
    8296    public double GetCovariance(int i, int j) {
    8397      if (sd == null) CalculateSquaredDistances();
    8498      return sf2 * Math.Exp(-sd[i, j] / 2.0);
    85     }
    86 
    87 
    88     public double[] GetDiagonalCovariances() {
    89       if (x != xt) throw new InvalidOperationException();
    90       int rows = x.GetLength(0);
    91       var sd = new double[rows];
    92       for (int i = 0; i < rows; i++) {
    93         sd[i] = Util.SqrDist(
    94           Util.GetRow(x, i).Select((e, k) => e / l[k]),
    95           Util.GetRow(xt, i).Select((e, k) => e / l[k]));
    96       }
    97       return sd.Select(d => sf2 * Math.Exp(-d / 2.0)).ToArray();
    9899    }
    99100
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEiso.cs

    r8401 r8416  
    5050    protected CovarianceSEiso(CovarianceSEiso original, Cloner cloner)
    5151      : base(original, cloner) {
    52       // note: using shallow copies here
    53       this.x = original.x;
    54       this.xt = original.xt;
     52      if (original.x != null) {
     53        this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];
     54        Array.Copy(original.x, this.x, x.Length);
     55
     56        this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
     57        Array.Copy(original.xt, this.xt, xt.Length);
     58
     59        this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)];
     60        Array.Copy(original.sd, this.sd, sd.Length);
     61        this.sf2 = original.sf2;
     62      }
    5563      this.sf2 = original.sf2;
    5664      this.l = original.l;
     
    7078    }
    7179
    72     public void SetParameter(double[] hyp, double[,] x) {
    73       SetParameter(hyp, x, x);
     80    public void SetParameter(double[] hyp) {
     81      this.l = Math.Exp(hyp[0]);
     82      this.sf2 = Math.Min(1E6, Math.Exp(2 * hyp[1])); // upper limit for scale
     83      sd = null;
     84    }
     85    public void SetData(double[,] x) {
     86      SetData(x, x);
    7487      this.symmetric = true;
    7588    }
    7689
    7790
    78     public void SetParameter(double[] hyp, double[,] x, double[,] xt) {
    79       this.l = Math.Exp(hyp[0]);
    80       this.sf2 = Math.Exp(2 * hyp[1]);
    81 
     91    public void SetData(double[,] x, double[,] xt) {
    8292      this.symmetric = false;
    8393      this.x = x;
     
    90100      return sf2 * Math.Exp(-sd[i, j] / 2.0);
    91101    }
    92 
    93 
    94     public double[] GetDiagonalCovariances() {
    95       if (x != xt) throw new InvalidOperationException();
    96       int rows = x.GetLength(0);
    97       var sd = new double[rows];
    98       for (int i = 0; i < rows; i++) {
    99         sd[i] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, i).Select(e => e / l));
    100       }
    101       return sd.Select(d => sf2 * Math.Exp(-d / 2.0)).ToArray();
    102     }
    103 
    104102
    105103    public double[] GetGradient(int i, int j) {
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs

    r8366 r8416  
    1 using System;
     1#region License Information
     2/* HeuristicLab
     3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 *
     5 * This file is part of HeuristicLab.
     6 *
     7 * HeuristicLab is free software: you can redistribute it and/or modify
     8 * it under the terms of the GNU General Public License as published by
     9 * the Free Software Foundation, either version 3 of the License, or
     10 * (at your option) any later version.
     11 *
     12 * HeuristicLab is distributed in the hope that it will be useful,
     13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
     14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     15 * GNU General Public License for more details.
     16 *
     17 * You should have received a copy of the GNU General Public License
     18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
     19 */
     20#endregion
     21
    222using System.Linq;
    323using HeuristicLab.Common;
     
    525using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    626
    7 namespace HeuristicLab.Algorithms.DataAnalysis.GaussianProcess {
     27namespace HeuristicLab.Algorithms.DataAnalysis {
    828  [StorableClass]
    929  [Item(Name = "CovarianceSum",
     
    2646    protected CovarianceSum(CovarianceSum original, Cloner cloner)
    2747      : base(original, cloner) {
    28       this.terms = cloner.Clone(terms);
     48      this.terms = cloner.Clone(original.terms);
     49      this.numberOfVariables = original.numberOfVariables;
    2950    }
    3051
    3152    public CovarianceSum()
    3253      : base() {
     54      this.terms = new ItemList<ICovarianceFunction>();
    3355    }
    3456
     
    4264    }
    4365
    44     public void SetParameter(double[] hyp, double[,] x) {
     66    public void SetParameter(double[] hyp) {
    4567      int offset = 0;
    4668      foreach (var t in terms) {
    47         t.SetParameter(hyp.Skip(offset).Take(t.GetNumberOfParameters(numberOfVariables)), x);
    48         offset += numberOfVariables;
     69        var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
     70        t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());
     71        offset += numberOfParameters;
     72      }
     73    }
     74    public void SetData(double[,] x) {
     75      SetData(x, x);
     76    }
     77
     78    public void SetData(double[,] x, double[,] xt) {
     79      foreach (var t in terms) {
     80        t.SetData(x, xt);
    4981      }
    5082    }
    5183
    52 
    53     public void SetParameter(double[] hyp, double[,] x, double[,] xt) {
    54       this.l = Math.Exp(hyp[0]);
    55       this.sf2 = Math.Exp(2 * hyp[1]);
    56 
    57       this.symmetric = false;
    58       this.x = x;
    59       this.xt = xt;
    60       sd = null;
     84    public double GetCovariance(int i, int j) {
     85      return terms.Select(t => t.GetCovariance(i, j)).Sum();
    6186    }
    6287
    63     public double GetCovariance(int i, int j) {
    64       if (sd == null) CalculateSquaredDistances();
    65       return sf2 * Math.Exp(-sd[i, j] / 2.0);
    66     }
    67 
    68 
    69     public double[] GetDiagonalCovariances() {
    70       if (x != xt) throw new InvalidOperationException();
    71       int rows = x.GetLength(0);
    72       var sd = new double[rows];
    73       for (int i = 0; i < rows; i++) {
    74         sd[i] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, i).Select(e => e / l));
    75       }
    76       return sd.Select(d => sf2 * Math.Exp(-d / 2.0)).ToArray();
    77     }
    78 
    79 
    8088    public double[] GetGradient(int i, int j) {
    81       var res = new double[2];
    82       res[0] = sf2 * Math.Exp(-sd[i, j] / 2.0) * sd[i, j];
    83       res[1] = 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0);
    84       return res;
    85     }
    86 
    87     private void CalculateSquaredDistances() {
    88       if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();
    89       int rows = x.GetLength(0);
    90       int cols = xt.GetLength(0);
    91       sd = new double[rows, cols];
    92       if (symmetric) {
    93         for (int i = 0; i < rows; i++) {
    94           for (int j = i; j < rows; j++) {
    95             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));
    96             sd[j, i] = sd[i, j];
    97           }
    98         }
    99       } else {
    100         for (int i = 0; i < rows; i++) {
    101           for (int j = 0; j < cols; j++) {
    102             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));
    103           }
    104         }
    105       }
     89      return terms.Select(t => t.GetGradient(i, j)).SelectMany(seq => seq).ToArray();
    10690    }
    10791  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8396 r8416  
    3737    [Storable]
    3838    private double negativeLogLikelihood;
    39 
    4039    public double NegativeLogLikelihood {
    4140      get { return negativeLogLikelihood; }
     
    5352    }
    5453    [Storable]
    55     private double[] hyp;
    56     public IEnumerable<double> Hyperparameters {
    57       get { return hyp; }
    58     }
    59     [Storable]
    6054    private string targetVariable;
    6155    public string TargetVariable {
     
    7266    [Storable]
    7367    private double sqrSigmaNoise;
    74     [Storable]
    75     private double[] meanHyp;
    76     [Storable]
    77     private double[] covHyp;
    7868
    7969    [Storable]
     
    9080    private GaussianProcessModel(GaussianProcessModel original, Cloner cloner)
    9181      : base(original, cloner) {
    92       this.hyp = original.hyp;
    9382      this.meanFunction = cloner.Clone(original.meanFunction);
    9483      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
     84      this.scaling = cloner.Clone(original.scaling);
    9585      this.negativeLogLikelihood = original.negativeLogLikelihood;
    9686      this.targetVariable = original.targetVariable;
     87      this.sqrSigmaNoise = original.sqrSigmaNoise;
     88
     89      // shallow copies of arrays because they cannot be modified
    9790      this.allowedInputVariables = original.allowedInputVariables;
    9891      this.alpha = original.alpha;
    99       this.sqrSigmaNoise = original.sqrSigmaNoise;
    100       this.scaling = cloner.Clone(original.scaling);
    101       this.meanHyp = original.meanHyp;
    102       this.covHyp = original.covHyp;
    10392      this.l = original.l;
    10493      this.x = original.x;
     
    10998      this.name = ItemName;
    11099      this.description = ItemDescription;
    111       this.hyp = hyp.ToArray();
    112       this.meanFunction = meanFunction;
    113       this.covarianceFunction = covarianceFunction;
     100      this.meanFunction = (IMeanFunction)meanFunction.Clone();
     101      this.covarianceFunction = (ICovarianceFunction)covarianceFunction.Clone();
    114102      this.targetVariable = targetVariable;
    115103      this.allowedInputVariables = allowedInputVariables.ToArray();
    116       int nAllowedVariables = allowedInputVariables.Count();
    117104
    118105      sqrSigmaNoise = Math.Exp(2.0 * hyp.First());
    119106      sqrSigmaNoise = Math.Max(10E-6, sqrSigmaNoise); // lower limit for the noise level
    120       meanHyp = hyp.Skip(1).Take(meanFunction.GetNumberOfParameters(nAllowedVariables)).ToArray();
    121       covHyp = hyp.Skip(1 + meanFunction.GetNumberOfParameters(nAllowedVariables)).Take(covarianceFunction.GetNumberOfParameters(nAllowedVariables)).ToArray();
    122 
    123       CalculateModel(ds, targetVariable, allowedInputVariables, rows);
    124     }
    125 
    126     private void CalculateModel(Dataset ds, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows) {
     107
     108      int nVariables = this.allowedInputVariables.Length;
     109      this.meanFunction.SetParameter(hyp.Skip(1)
     110        .Take(this.meanFunction.GetNumberOfParameters(nVariables))
     111        .ToArray());
     112      this.covarianceFunction.SetParameter(hyp.Skip(1 + this.meanFunction.GetNumberOfParameters(nVariables))
     113        .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
     114        .ToArray());
     115
     116      CalculateModel(ds, rows);
     117    }
     118
     119    private void CalculateModel(Dataset ds, IEnumerable<int> rows) {
    127120      scaling = new Scaling(ds, allowedInputVariables, rows);
    128121      x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling);
     
    133126      l = new double[n, n];
    134127
    135       meanFunction.SetParameter(meanHyp, x);
    136       covarianceFunction.SetParameter(covHyp, x);
     128      meanFunction.SetData(x);
     129      covarianceFunction.SetData(x);
    137130
    138131      // calculate means and covariances
     
    245238      //kss = covarianceFunction.GetDiagonalCovariances();
    246239
    247       covarianceFunction.SetParameter(covHyp, x, newX);
    248       meanFunction.SetParameter(meanHyp, newX);
     240      covarianceFunction.SetData(x, newX);
     241      meanFunction.SetData(newX);
    249242      var ms = meanFunction.GetMean(newX);
    250243      for (int i = 0; i < newN; i++) {
     
    268261
    269262    }
    270 
    271     #region events
    272     public event EventHandler Changed;
    273     private void OnChanged(EventArgs e) {
    274       var handlers = Changed;
    275       if (handlers != null)
    276         handlers(this, e);
    277     }
    278     #endregion
    279263  }
    280264}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs

    r8401 r8416  
    8383      var results = ResultsParameter.ActualValue;
    8484      if (!results.ContainsKey(SolutionParameterName)) {
    85         results.Add(new Result(ResultsParameterName, "The Gaussian process regression solution", s));
     85        results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s));
    8686        results.Add(new Result(TrainingRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the training partition.", new DoubleValue(s.TrainingRSquared)));
    8787        results.Add(new Result(TestRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the test partition.", new DoubleValue(s.TestRSquared)));
    8888      } else {
    89         results[ResultsParameterName].Value = s;
     89        results[SolutionParameterName].Value = s;
    9090        results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared);
    9191        results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared);
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs

    r8401 r8416  
    2525  public interface ICovarianceFunction : IItem {
    2626    int GetNumberOfParameters(int numberOfVariables);
    27     void SetParameter(double[] hyp, double[,] x);
    28     void SetParameter(double[] hyp, double[,] x, double[,] xt);
     27    void SetParameter(double[] hyp);
     28    void SetData(double[,] x);
     29    void SetData(double[,] x, double[,] xt);
    2930
    3031    double GetCovariance(int i, int j);
    31     double[] GetDiagonalCovariances();
    3232    double[] GetGradient(int i, int j);
    3333  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs

    r8401 r8416  
    2424  public interface IMeanFunction : IItem {
    2525    int GetNumberOfParameters(int numberOfVariables);
    26     void SetParameter(double[] hyp, double[,] x);
     26    void SetParameter(double[] hyp);
     27    void SetData(double[,] x);
    2728    double[] GetMean(double[,] x);
    2829    double[] GetGradients(int k, double[,] x);
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanConst.cs

    r8401 r8416  
    3232    [Storable]
    3333    private double c;
    34     [Storable]
    35     private int n;
    3634    public int GetNumberOfParameters(int numberOfVariables) {
    3735      return 1;
     
    4240      : base(original, cloner) {
    4341      this.c = original.c;
    44       this.n = original.n;
    4542    }
    4643    public MeanConst()
     
    4845    }
    4946
    50     public void SetParameter(double[] hyp, double[,] x) {
     47    public void SetParameter(double[] hyp) {
    5148      if (hyp.Length != 1) throw new ArgumentException("Only one hyper-parameter allowed for constant mean function.", "hyp");
    5249      this.c = hyp[0];
    53       this.n = x.GetLength(0);
     50    }
     51    public void SetData(double[,] x) {
     52      // nothing to do
    5453    }
    5554
    5655    public double[] GetMean(double[,] x) {
    57       return Enumerable.Repeat(c, n).ToArray();
     56      return Enumerable.Repeat(c, x.GetLength(0)).ToArray();
    5857    }
    5958
    6059    public double[] GetGradients(int k, double[,] x) {
    6160      if (k > 0) throw new ArgumentException();
    62       return Enumerable.Repeat(1.0, n).ToArray();
     61      return Enumerable.Repeat(1.0, x.GetLength(0)).ToArray();
    6362    }
    6463
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanLinear.cs

    r8401 r8416  
    3131    [Storable]
    3232    private double[] alpha;
    33     [Storable]
    34     private int n;
    3533    public int GetNumberOfParameters(int numberOfVariables) {
    3634      return numberOfVariables;
     
    4442        Array.Copy(original.alpha, alpha, original.alpha.Length);
    4543      }
    46       this.n = original.n;
    4744    }
    4845    public MeanLinear()
     
    5047    }
    5148
    52     public void SetParameter(double[] hyp, double[,] x) {
    53       if (hyp.Length != x.GetLength(1)) throw new ArgumentException("Number of hyper-parameters must match the number of variables.", "hyp");
     49    public void SetParameter(double[] hyp) {
    5450      this.alpha = new double[hyp.Length];
    5551      Array.Copy(hyp, alpha, hyp.Length);
    56       this.n = x.GetLength(0);
     52    }
     53    public void SetData(double[,] x) {
     54      // nothing to do
    5755    }
    5856
    5957    public double[] GetMean(double[,] x) {
     58      // sanity check
     59      if (alpha.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function.");
    6060      int cols = x.GetLength(1);
     61      int n = x.GetLength(0);
    6162      return (from i in Enumerable.Range(0, n)
    6263              let rowVector = from j in Enumerable.Range(0, cols)
     
    6869    public double[] GetGradients(int k, double[,] x) {
    6970      int cols = x.GetLength(1);
     71      int n = x.GetLength(0);
    7072      if (k > cols) throw new ArgumentException();
    7173      return (from r in Enumerable.Range(0, n)
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanZero.cs

    r8401 r8416  
    2929  [Item(Name = "MeanZero", Description = "Constant zero mean function for Gaussian processes.")]
    3030  public class MeanZero : Item, IMeanFunction {
    31     [Storable]
    32     private int n;
    3331    public int GetNumberOfParameters(int numberOfVariables) {
    3432      return 0;
     
    3836    protected MeanZero(MeanZero original, Cloner cloner)
    3937      : base(original, cloner) {
    40       this.n = original.n;
    4138    }
    4239    public MeanZero() {
    4340    }
    4441
    45     public void SetParameter(double[] hyp, double[,] x) {
     42    public void SetParameter(double[] hyp) {
    4643      if (hyp.Length > 0) throw new ArgumentException("No hyper-parameters allowed for zero mean function.", "hyp");
    47       this.n = x.GetLength(0);
     44    }
     45
     46    public void SetData(double[,] x) {
     47      // do nothing
    4848    }
    4949
    5050    public double[] GetMean(double[,] x) {
    51       return Enumerable.Repeat(0.0, n).ToArray();
     51      return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray();
    5252    }
    5353
    5454    public double[] GetGradients(int k, double[,] x) {
    5555      if (k > 0) throw new ArgumentException();
    56       return Enumerable.Repeat(0.0, n).ToArray();
     56      return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray();
    5757    }
    5858    public override IDeepCloneable Clone(Cloner cloner) {
Note: See TracChangeset for help on using the changeset viewer.