Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceNeuralNetwork.cs @ 10490

Last change on this file since 10490 was 10490, checked in by gkronber, 10 years ago

#2125 code cleanup

File size: 5.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Linq.Expressions;
26using AutoDiff;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Data;
30using HeuristicLab.Parameters;
31using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
32
33namespace HeuristicLab.Algorithms.DataAnalysis {
34  [StorableClass]
35  [Item(Name = "CovarianceNeuralNetwork",
36    Description = "Neural network covariance function for Gaussian processes.")]
37  public sealed class CovarianceNeuralNetwork : ParameterizedNamedItem, ICovarianceFunction {
38    public IValueParameter<DoubleValue> ScaleParameter {
39      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
40    }
41
42    public IValueParameter<DoubleValue> LengthParameter {
43      get { return (IValueParameter<DoubleValue>)Parameters["Length"]; }
44    }
45    private bool HasFixedScaleParameter {
46      get { return ScaleParameter.Value != null; }
47    }
48    private bool HasFixedLengthParameter {
49      get { return LengthParameter.Value != null; }
50    }
51
52    [StorableConstructor]
53    private CovarianceNeuralNetwork(bool deserializing)
54      : base(deserializing) {
55    }
56
57    private CovarianceNeuralNetwork(CovarianceNeuralNetwork original, Cloner cloner)
58      : base(original, cloner) {
59    }
60
61    public CovarianceNeuralNetwork()
62      : base() {
63      Name = ItemName;
64      Description = ItemDescription;
65
66      Parameters.Add(new OptionalValueParameter<DoubleValue>("Scale", "The scale parameter."));
67      Parameters.Add(new OptionalValueParameter<DoubleValue>("Length", "The length parameter."));
68    }
69
70    public override IDeepCloneable Clone(Cloner cloner) {
71      return new CovarianceNeuralNetwork(this, cloner);
72    }
73
74    public int GetNumberOfParameters(int numberOfVariables) {
75      return
76        (HasFixedScaleParameter ? 0 : 1) +
77        (HasFixedLengthParameter ? 0 : 1);
78    }
79
80    public void SetParameter(double[] p) {
81      double scale, length;
82      GetParameterValues(p, out scale, out length);
83      ScaleParameter.Value = new DoubleValue(scale);
84      LengthParameter.Value = new DoubleValue(length);
85    }
86
87
88    private void GetParameterValues(double[] p, out double scale, out double length) {
89      // gather parameter values
90      int c = 0;
91      if (HasFixedLengthParameter) {
92        length = LengthParameter.Value.Value;
93      } else {
94        length = Math.Exp(2 * p[c]);
95        c++;
96      }
97
98      if (HasFixedScaleParameter) {
99        scale = ScaleParameter.Value.Value;
100      } else {
101        scale = Math.Exp(2 * p[c]);
102        c++;
103      }
104      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceNeuralNetwork", "p");
105    }
106
107    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
108      double length, scale;
109      GetParameterValues(p, out scale, out length);
110      var fixedLength = HasFixedLengthParameter;
111      var fixedScale = HasFixedScaleParameter;
112
113      var cov = new ParameterizedCovarianceFunction();
114      cov.Covariance = (x, i, j) => {
115        double sx = 1.0;
116        double s1 = 1.0;
117        double s2 = 1.0;
118        foreach (var col in columnIndices) {
119          sx += x[i, col] * x[j, col];
120          s1 += x[i, col] * x[i, col];
121          s2 += x[j, col] * x[j, col];
122        }
123
124        return (scale * Math.Asin(sx / (Math.Sqrt((length + s1) * (length + s2)))));
125      };
126      cov.CrossCovariance = (x, xt, i, j) => {
127        double sx = 1.0;
128        double s1 = 1.0;
129        double s2 = 1.0;
130        foreach (var col in columnIndices) {
131          sx += x[i, col] * xt[j, col];
132          s1 += x[i, col] * x[i, col];
133          s2 += xt[j, col] * xt[j, col];
134        }
135
136        return (scale * Math.Asin(sx / (Math.Sqrt((length + s1) * (length + s2)))));
137      };
138      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, length, scale, columnIndices, fixedLength, fixedScale);
139      return cov;
140    }
141
142    // order of returned gradients must match the order in GetParameterValues!
143    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double length, double scale, IEnumerable<int> columnIndices,
144      bool fixedLength, bool fixedScale) {
145      {
146        double sx = 1.0;
147        double s1 = 1.0;
148        double s2 = 1.0;
149        foreach (var col in columnIndices) {
150          sx += x[i, col] * x[j, col];
151          s1 += x[i, col] * x[i, col];
152          s2 += x[j, col] * x[j, col];
153        }
154        var h = (length + s1) * (length + s2);
155        var f = sx / Math.Sqrt(h);
156        if (!fixedLength) {
157          yield return -scale / Math.Sqrt(1.0 - f * f) * ((length * sx * (2.0 * length + s1 + s2)) / Math.Pow(h, 3.0 / 2.0));
158        }
159        if (!fixedScale) {
160          yield return 2.0 * scale * Math.Asin(f);
161        }
162      }
163    }
164  }
165}
Note: See TracBrowser for help on using the repository browser.