Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/08/12 23:59:16 (12 years ago)
Author:
ascheibe
Message:

#1861 merged changes from trunk into branch

Location:
branches/HeuristicLab.Mono
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • branches/HeuristicLab.Mono

  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis

  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4

    • Property svn:ignore
      •  

        old new  
        55*.vs10x
        66Plugin.cs
         7*.user
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8323 r8451  
    2828using HeuristicLab.Problems.DataAnalysis;
    2929
    30 namespace HeuristicLab.Algorithms.DataAnalysis.GaussianProcess {
     30namespace HeuristicLab.Algorithms.DataAnalysis {
    3131  /// <summary>
    3232  /// Represents a Gaussian process model.
     
    3535  [Item("GaussianProcessModel", "Represents a Gaussian process posterior.")]
    3636  public sealed class GaussianProcessModel : NamedItem, IGaussianProcessModel {
    37 
    3837    [Storable]
    3938    private double negativeLogLikelihood;
    40 
    4139    public double NegativeLogLikelihood {
    4240      get { return negativeLogLikelihood; }
     
    5452    }
    5553    [Storable]
    56     private double[] hyp;
    57     public IEnumerable<double> Hyperparameters {
    58       get { return hyp; }
    59     }
    60     [Storable]
    6154    private string targetVariable;
    6255    public string TargetVariable {
     
    7366    [Storable]
    7467    private double sqrSigmaNoise;
    75     [Storable]
    76     private double[] meanHyp;
    77     [Storable]
    78     private double[] covHyp;
    7968
    8069    [Storable]
     
    9180    private GaussianProcessModel(GaussianProcessModel original, Cloner cloner)
    9281      : base(original, cloner) {
    93       this.hyp = original.hyp;
    9482      this.meanFunction = cloner.Clone(original.meanFunction);
    9583      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
     84      this.scaling = cloner.Clone(original.scaling);
    9685      this.negativeLogLikelihood = original.negativeLogLikelihood;
    9786      this.targetVariable = original.targetVariable;
     87      this.sqrSigmaNoise = original.sqrSigmaNoise;
     88
     89      // shallow copies of arrays because they cannot be modified
    9890      this.allowedInputVariables = original.allowedInputVariables;
    9991      this.alpha = original.alpha;
    100       this.sqrSigmaNoise = original.sqrSigmaNoise;
    101       this.scaling = cloner.Clone(original.scaling);
    102       this.meanHyp = original.meanHyp;
    103       this.covHyp = original.covHyp;
    10492      this.l = original.l;
    10593      this.x = original.x;
     
    11098      this.name = ItemName;
    11199      this.description = ItemDescription;
    112       this.hyp = hyp.ToArray();
    113       this.meanFunction = meanFunction;
    114       this.covarianceFunction = covarianceFunction;
     100      this.meanFunction = (IMeanFunction)meanFunction.Clone();
     101      this.covarianceFunction = (ICovarianceFunction)covarianceFunction.Clone();
    115102      this.targetVariable = targetVariable;
    116103      this.allowedInputVariables = allowedInputVariables.ToArray();
    117       int nAllowedVariables = allowedInputVariables.Count();
    118104
    119105      sqrSigmaNoise = Math.Exp(2.0 * hyp.First());
    120106      sqrSigmaNoise = Math.Max(10E-6, sqrSigmaNoise); // lower limit for the noise level
    121       meanHyp = hyp.Skip(1).Take(meanFunction.GetNumberOfParameters(nAllowedVariables)).ToArray();
    122       covHyp = hyp.Skip(1 + meanFunction.GetNumberOfParameters(nAllowedVariables)).Take(covarianceFunction.GetNumberOfParameters(nAllowedVariables)).ToArray();
    123 
    124       CalculateModel(ds, targetVariable, allowedInputVariables, rows);
    125     }
    126 
    127     private void CalculateModel(Dataset ds, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<int> rows) {
     107
     108      int nVariables = this.allowedInputVariables.Length;
     109      this.meanFunction.SetParameter(hyp.Skip(1)
     110        .Take(this.meanFunction.GetNumberOfParameters(nVariables))
     111        .ToArray());
     112      this.covarianceFunction.SetParameter(hyp.Skip(1 + this.meanFunction.GetNumberOfParameters(nVariables))
     113        .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
     114        .ToArray());
     115
     116      CalculateModel(ds, rows);
     117    }
     118
     119    private void CalculateModel(Dataset ds, IEnumerable<int> rows) {
    128120      scaling = new Scaling(ds, allowedInputVariables, rows);
    129121      x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling);
     
    134126      l = new double[n, n];
    135127
    136       meanFunction.SetParameter(meanHyp, x);
    137       covarianceFunction.SetParameter(covHyp, x);
     128      meanFunction.SetData(x);
     129      covarianceFunction.SetData(x);
    138130
    139131      // calculate means and covariances
    140       double[] m = meanFunction.GetMean();
     132      double[] m = meanFunction.GetMean(x);
    141133      for (int i = 0; i < n; i++) {
    142134
     
    188180      double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
    189181      for (int i = 0; i < meanGradients.Length; i++) {
    190         var meanGrad = meanFunction.GetGradients(i);
     182        var meanGrad = meanFunction.GetGradients(i, x);
    191183        meanGradients[i] = -Util.ScalarProd(meanGrad, alpha);
    192184      }
    193185
    194186      double[] covGradients = new double[covarianceFunction.GetNumberOfParameters(nAllowedVariables)];
    195       for (int i = 0; i < n; i++) {
    196         for (int j = 0; j < n; j++) {
    197           var covDeriv = covarianceFunction.GetGradient(i, j);
    198           for (int k = 0; k < covGradients.Length; k++) {
    199             covGradients[k] += q[i, j] * covDeriv[k];
     187      if (covGradients.Length > 0) {
     188        for (int i = 0; i < n; i++) {
     189          for (int j = 0; j < n; j++) {
     190            var covDeriv = covarianceFunction.GetGradient(i, j);
     191            for (int k = 0; k < covGradients.Length; k++) {
     192              covGradients[k] += q[i, j] * covDeriv[k];
     193            }
    200194          }
    201195        }
    202       }
    203       covGradients = covGradients.Select(g => g / 2.0).ToArray();
     196        covGradients = covGradients.Select(g => g / 2.0).ToArray();
     197      }
    204198
    205199      return new double[] { noiseGradient }
     
    244238      //kss = covarianceFunction.GetDiagonalCovariances();
    245239
    246       covarianceFunction.SetParameter(covHyp, x, newX);
    247       meanFunction.SetParameter(meanHyp, newX);
    248       var ms = meanFunction.GetMean();
     240      covarianceFunction.SetData(x, newX);
     241      meanFunction.SetData(newX);
     242      var ms = meanFunction.GetMean(newX);
    249243      for (int i = 0; i < newN; i++) {
    250244
     
    267261
    268262    }
    269 
    270     #region events
    271     public event EventHandler Changed;
    272     private void OnChanged(EventArgs e) {
    273       var handlers = Changed;
    274       if (handlers != null)
    275         handlers(this, e);
    276     }
    277     #endregion
    278263  }
    279264}
Note: See TracChangeset for help on using the changeset viewer.