Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialIso.cs @ 13784

Last change on this file since 13784 was 13784, checked in by pfleck, 8 years ago

#2591 Made the creation of a GaussianProcessModel faster by avoiding additional iterators during calculation of the hyperparameter gradients.
The gradients of the hyperparameters are now calculated in one sweep and returned as IList, instead of returning an iterator (with yield return).
This avoids a large amount of Move-calls of the iterator, especially for covariance functions with a lot of hyperparameters.
Besides, the signature of the CovarianceGradientFunctionDelegate is changed, to return an IList instead of an IEnumerable to avoid unnececary ToList or ToArray calls.

File size: 5.4 KB
RevLine 
[8401]1#region License Information
2/* HeuristicLab
[12012]3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[8401]4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
[8484]23using System.Collections.Generic;
[8323]24using HeuristicLab.Common;
25using HeuristicLab.Core;
[8612]26using HeuristicLab.Data;
[8982]27using HeuristicLab.Parameters;
[8323]28using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
29
[8371]30namespace HeuristicLab.Algorithms.DataAnalysis {
[8323]31  [StorableClass]
[8615]32  [Item(Name = "CovarianceSquaredExponentialIso",
[8323]33    Description = "Isotropic squared exponential covariance function for Gaussian processes.")]
[8615]34  public sealed class CovarianceSquaredExponentialIso : ParameterizedNamedItem, ICovarianceFunction {
[8982]35    public IValueParameter<DoubleValue> ScaleParameter {
36      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
37    }
[8612]38
[8982]39    public IValueParameter<DoubleValue> InverseLengthParameter {
40      get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; }
41    }
[8323]42
[10489]43    private bool HasFixedInverseLengthParameter {
44      get { return InverseLengthParameter.Value != null; }
45    }
46    private bool HasFixedScaleParameter {
47      get { return ScaleParameter.Value != null; }
48    }
49
[8323]50    [StorableConstructor]
[8615]51    private CovarianceSquaredExponentialIso(bool deserializing)
[8323]52      : base(deserializing) {
53    }
54
[8615]55    private CovarianceSquaredExponentialIso(CovarianceSquaredExponentialIso original, Cloner cloner)
[8323]56      : base(original, cloner) {
57    }
58
[8615]59    public CovarianceSquaredExponentialIso()
[8323]60      : base() {
[8612]61      Name = ItemName;
62      Description = ItemDescription;
63
[8982]64      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric squared exponential covariance function."));
65      Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric squared exponential covariance function."));
[8323]66    }
67
68    public override IDeepCloneable Clone(Cloner cloner) {
[8615]69      return new CovarianceSquaredExponentialIso(this, cloner);
[8323]70    }
71
[8982]72    public int GetNumberOfParameters(int numberOfVariables) {
73      return
[10489]74        (HasFixedScaleParameter ? 0 : 1) +
75        (HasFixedInverseLengthParameter ? 0 : 1);
[8612]76    }
77
[8982]78    public void SetParameter(double[] p) {
79      double scale, inverseLength;
80      GetParameterValues(p, out scale, out inverseLength);
81      ScaleParameter.Value = new DoubleValue(scale);
82      InverseLengthParameter.Value = new DoubleValue(inverseLength);
[8612]83    }
84
[8323]85
[8982]86    private void GetParameterValues(double[] p, out double scale, out double inverseLength) {
87      // gather parameter values
88      int c = 0;
[10489]89      if (HasFixedInverseLengthParameter) {
[8982]90        inverseLength = InverseLengthParameter.Value.Value;
91      } else {
92        inverseLength = 1.0 / Math.Exp(p[c]);
93        c++;
[8612]94      }
[8982]95
[10489]96      if (HasFixedScaleParameter) {
[8982]97        scale = ScaleParameter.Value.Value;
98      } else {
99        scale = Math.Exp(2 * p[c]);
100        c++;
[8612]101      }
[8982]102      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialIso", "p");
[8416]103    }
[8323]104
[13721]105    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, int[] columnIndices) {
[8982]106      double inverseLength, scale;
107      GetParameterValues(p, out scale, out inverseLength);
[10489]108      var fixedInverseLength = HasFixedInverseLengthParameter;
109      var fixedScale = HasFixedScaleParameter;
[8982]110      // create functions
111      var cov = new ParameterizedCovarianceFunction();
112      cov.Covariance = (x, i, j) => {
113        double d = i == j
114                ? 0.0
[13721]115                : Util.SqrDist(x, i, j, columnIndices, inverseLength);
[8982]116        return scale * Math.Exp(-d / 2.0);
117      };
118      cov.CrossCovariance = (x, xt, i, j) => {
[13721]119        double d = Util.SqrDist(x, i, xt, j, columnIndices, inverseLength);
[8982]120        return scale * Math.Exp(-d / 2.0);
121      };
[10489]122      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, inverseLength, columnIndices,
123        fixedInverseLength, fixedScale);
[8982]124      return cov;
[8323]125    }
126
[9108]127    // order of returned gradients must match the order in GetParameterValues!
[13784]128    private static IList<double> GetGradient(double[,] x, int i, int j, double sf2, double inverseLength, int[] columnIndices,
[10489]129      bool fixedInverseLength, bool fixedScale) {
[8484]130      double d = i == j
131                   ? 0.0
[13721]132                   : Util.SqrDist(x, i, j, columnIndices, inverseLength);
[8484]133      double g = Math.Exp(-d / 2.0);
[13784]134      var gr = new List<double>(2);
135      if (!fixedInverseLength) gr.Add(sf2 * g * d);
136      if (!fixedScale) gr.Add(2.0 * sf2 * g);
137      return gr;
[8323]138    }
139  }
140}
Note: See TracBrowser for help on using the repository browser.