Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMaternIso.cs @ 13784

Last change on this file since 13784 was 13784, checked in by pfleck, 8 years ago

#2591 Made the creation of a GaussianProcessModel faster by avoiding additional iterators during calculation of the hyperparameter gradients.
The gradients of the hyperparameters are now calculated in one sweep and returned as IList, instead of returning an iterator (with yield return).
This avoids a large amount of Move-calls of the iterator, especially for covariance functions with a lot of hyperparameters.
Besides, the signature of the CovarianceGradientFunctionDelegate is changed, to return an IList instead of an IEnumerable to avoid unnececary ToList or ToArray calls.

File size: 6.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Parameters;
29using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
30
31namespace HeuristicLab.Algorithms.DataAnalysis {
32  [StorableClass]
33  [Item(Name = "CovarianceMaternIso",
34    Description = "Matern covariance function for Gaussian processes.")]
35  public sealed class CovarianceMaternIso : ParameterizedNamedItem, ICovarianceFunction {
36    public IValueParameter<DoubleValue> InverseLengthParameter {
37      get { return (IValueParameter<DoubleValue>)Parameters["InverseLength"]; }
38    }
39
40    public IValueParameter<DoubleValue> ScaleParameter {
41      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
42    }
43
44    public IConstrainedValueParameter<IntValue> DParameter {
45      get { return (IConstrainedValueParameter<IntValue>)Parameters["D"]; }
46    }
47    private bool HasFixedScaleParameter {
48      get { return ScaleParameter.Value != null; }
49    }
50    private bool HasFixedInverseLengthParameter {
51      get { return InverseLengthParameter.Value != null; }
52    }
53
54    [StorableConstructor]
55    private CovarianceMaternIso(bool deserializing)
56      : base(deserializing) {
57    }
58
59    private CovarianceMaternIso(CovarianceMaternIso original, Cloner cloner)
60      : base(original, cloner) {
61    }
62
63    public CovarianceMaternIso()
64      : base() {
65      Name = ItemName;
66      Description = ItemDescription;
67
68      Parameters.Add(new OptionalValueParameter<DoubleValue>("InverseLength", "The inverse length parameter of the isometric Matern covariance function."));
69      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter of the isometric Matern covariance function."));
70      var validDValues = new ItemSet<IntValue>();
71      validDValues.Add((IntValue)new IntValue(1).AsReadOnly());
72      validDValues.Add((IntValue)new IntValue(3).AsReadOnly());
73      validDValues.Add((IntValue)new IntValue(5).AsReadOnly());
74      Parameters.Add(new ConstrainedValueParameter<IntValue>("D", "The d parameter (allowed values: 1, 3, or 5) of the isometric Matern covariance function.", validDValues, validDValues.First()));
75    }
76
77    public override IDeepCloneable Clone(Cloner cloner) {
78      return new CovarianceMaternIso(this, cloner);
79    }
80
81    public int GetNumberOfParameters(int numberOfVariables) {
82      return
83        (HasFixedInverseLengthParameter ? 0 : 1) +
84        (HasFixedScaleParameter ? 0 : 1);
85    }
86
87    public void SetParameter(double[] p) {
88      double inverseLength, scale;
89      GetParameterValues(p, out scale, out inverseLength);
90      InverseLengthParameter.Value = new DoubleValue(inverseLength);
91      ScaleParameter.Value = new DoubleValue(scale);
92    }
93
94    private void GetParameterValues(double[] p, out double scale, out double inverseLength) {
95      // gather parameter values
96      int c = 0;
97      if (HasFixedInverseLengthParameter) {
98        inverseLength = InverseLengthParameter.Value.Value;
99      } else {
100        inverseLength = 1.0 / Math.Exp(p[c]);
101        c++;
102      }
103
104      if (HasFixedScaleParameter) {
105        scale = ScaleParameter.Value.Value;
106      } else {
107        scale = Math.Exp(2 * p[c]);
108        c++;
109      }
110      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceMaternIso", "p");
111    }
112
113    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, int[] columnIndices) {
114      double inverseLength, scale;
115      int d = DParameter.Value.Value;
116      GetParameterValues(p, out scale, out inverseLength);
117      var fixedInverseLength = HasFixedInverseLengthParameter;
118      var fixedScale = HasFixedScaleParameter;
119      // create functions
120      var cov = new ParameterizedCovarianceFunction();
121      cov.Covariance = (x, i, j) => {
122        double dist = i == j
123                       ? 0.0
124                       : Math.Sqrt(Util.SqrDist(x, i, j, columnIndices, Math.Sqrt(d) * inverseLength));
125        return scale * m(d, dist);
126      };
127      cov.CrossCovariance = (x, xt, i, j) => {
128        double dist = Math.Sqrt(Util.SqrDist(x, i, xt, j, columnIndices, Math.Sqrt(d) * inverseLength));
129        return scale * m(d, dist);
130      };
131      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, d, scale, inverseLength, columnIndices, fixedInverseLength, fixedScale);
132      return cov;
133    }
134
135    private static double m(int d, double t) {
136      double f;
137      switch (d) {
138        case 1: { f = 1; break; }
139        case 3: { f = 1 + t; break; }
140        case 5: { f = 1 + t * (1 + t / 3.0); break; }
141        default: throw new InvalidOperationException();
142      }
143      return f * Math.Exp(-t);
144    }
145
146    private static double dm(int d, double t) {
147      double df;
148      switch (d) {
149        case 1: { df = 1; break; }
150        case 3: { df = t; break; }
151        case 5: { df = t * (1 + t) / 3.0; break; }
152        default: throw new InvalidOperationException();
153      }
154      return df * t * Math.Exp(-t);
155    }
156
157    private static IList<double> GetGradient(double[,] x, int i, int j, int d, double scale, double inverseLength, int[] columnIndices,
158      bool fixedInverseLength, bool fixedScale) {
159      double dist = i == j
160                   ? 0.0
161                   : Math.Sqrt(Util.SqrDist(x, i, j, columnIndices, Math.Sqrt(d) * inverseLength));
162
163      var g = new List<double>(2);
164      if (!fixedInverseLength) g.Add(scale * dm(d, dist));
165      if (!fixedScale) g.Add(2 * scale * m(d, dist));
166      return g;
167    }
168  }
169}
Note: See TracBrowser for help on using the repository browser.