1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2014 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using System;
|
---|
23 | using System.Collections.Generic;
|
---|
24 | using System.Linq;
|
---|
25 | using System.Linq.Expressions;
|
---|
26 | using HeuristicLab.Common;
|
---|
27 | using HeuristicLab.Core;
|
---|
28 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
29 |
|
---|
30 | namespace HeuristicLab.Algorithms.DataAnalysis {
|
---|
31 | [StorableClass]
|
---|
32 | [Item(Name = "CovarianceSum",
|
---|
33 | Description = "Sum covariance function for Gaussian processes.")]
|
---|
34 | public sealed class CovarianceSum : Item, ICovarianceFunction {
|
---|
35 | [Storable]
|
---|
36 | private ItemList<ICovarianceFunction> terms;
|
---|
37 |
|
---|
38 | [Storable]
|
---|
39 | private int numberOfVariables;
|
---|
40 | public ItemList<ICovarianceFunction> Terms {
|
---|
41 | get { return terms; }
|
---|
42 | }
|
---|
43 |
|
---|
44 | [StorableConstructor]
|
---|
45 | private CovarianceSum(bool deserializing)
|
---|
46 | : base(deserializing) {
|
---|
47 | }
|
---|
48 |
|
---|
49 | private CovarianceSum(CovarianceSum original, Cloner cloner)
|
---|
50 | : base(original, cloner) {
|
---|
51 | this.terms = cloner.Clone(original.terms);
|
---|
52 | this.numberOfVariables = original.numberOfVariables;
|
---|
53 | }
|
---|
54 |
|
---|
55 | public CovarianceSum()
|
---|
56 | : base() {
|
---|
57 | this.terms = new ItemList<ICovarianceFunction>();
|
---|
58 | }
|
---|
59 |
|
---|
60 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
61 | return new CovarianceSum(this, cloner);
|
---|
62 | }
|
---|
63 |
|
---|
64 | public int GetNumberOfParameters(int numberOfVariables) {
|
---|
65 | this.numberOfVariables = numberOfVariables;
|
---|
66 | return terms.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();
|
---|
67 | }
|
---|
68 |
|
---|
69 | public void SetParameter(double[] p) {
|
---|
70 | int offset = 0;
|
---|
71 | foreach (var t in terms) {
|
---|
72 | var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
|
---|
73 | t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray());
|
---|
74 | offset += numberOfParameters;
|
---|
75 | }
|
---|
76 | }
|
---|
77 |
|
---|
78 | public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
|
---|
79 | if (terms.Count == 0) throw new ArgumentException("at least one term is necessary for the product covariance function.");
|
---|
80 | var functions = new List<ParameterizedCovarianceFunction>();
|
---|
81 | foreach (var t in terms) {
|
---|
82 | var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);
|
---|
83 | functions.Add(t.GetParameterizedCovarianceFunction(p.Take(numberOfParameters).ToArray(), columnIndices));
|
---|
84 | p = p.Skip(numberOfParameters).ToArray();
|
---|
85 | }
|
---|
86 |
|
---|
87 | var sum = new ParameterizedCovarianceFunction();
|
---|
88 | sum.Covariance = (x, i, j) => functions.Select(e => e.Covariance(x, i, j)).Sum();
|
---|
89 | sum.CrossCovariance = (x, xt, i, j) => functions.Select(e => e.CrossCovariance(x, xt, i, j)).Sum();
|
---|
90 | sum.CovarianceGradient = (x, i, j) => functions.Select(e => e.CovarianceGradient(x, i, j)).Aggregate(Enumerable.Concat);
|
---|
91 | return sum;
|
---|
92 | }
|
---|
93 | }
|
---|
94 | }
|
---|