1  #region License Information


2  /* HeuristicLab


3  * Copyright (C) 20022019 Heuristic and Evolutionary Algorithms Laboratory (HEAL)


4  *


5  * This file is part of HeuristicLab.


6  *


7  * HeuristicLab is free software: you can redistribute it and/or modify


8  * it under the terms of the GNU General Public License as published by


9  * the Free Software Foundation, either version 3 of the License, or


10  * (at your option) any later version.


11  *


12  * HeuristicLab is distributed in the hope that it will be useful,


13  * but WITHOUT ANY WARRANTY; without even the implied warranty of


14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the


15  * GNU General Public License for more details.


16  *


17  * You should have received a copy of the GNU General Public License


18  * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.


19  */


20  #endregion


21 


22  using System;


23  using System.Collections.Generic;


24  using System.Linq;


25  using HeuristicLab.Common;


26  using HeuristicLab.Core;


27  using HEAL.Attic;


28 


29  namespace HeuristicLab.Algorithms.DataAnalysis {


30  [StorableType("8F1A684A98BE429ABDA2E1FB7DDF09F0")]


31  [Item(Name = "CovarianceSum",


32  Description = "Sum covariance function for Gaussian processes.")]


33  public sealed class CovarianceSum : Item, ICovarianceFunction {


34  [Storable]


35  private ItemList<ICovarianceFunction> terms;


36 


37  [Storable]


38  private int numberOfVariables;


39  public ItemList<ICovarianceFunction> Terms {


40  get { return terms; }


41  }


42 


43  [StorableConstructor]


44  private CovarianceSum(StorableConstructorFlag _) : base(_) {


45  }


46 


47  private CovarianceSum(CovarianceSum original, Cloner cloner)


48  : base(original, cloner) {


49  this.terms = cloner.Clone(original.terms);


50  this.numberOfVariables = original.numberOfVariables;


51  }


52 


53  public CovarianceSum()


54  : base() {


55  this.terms = new ItemList<ICovarianceFunction>();


56  }


57 


58  public override IDeepCloneable Clone(Cloner cloner) {


59  return new CovarianceSum(this, cloner);


60  }


61 


62  public int GetNumberOfParameters(int numberOfVariables) {


63  this.numberOfVariables = numberOfVariables;


64  return terms.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();


65  }


66 


67  public void SetParameter(double[] p) {


68  int offset = 0;


69  foreach (var t in terms) {


70  var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);


71  t.SetParameter(p.Skip(offset).Take(numberOfParameters).ToArray());


72  offset += numberOfParameters;


73  }


74  }


75 


76  public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, int[] columnIndices) {


77  if (terms.Count == 0) throw new ArgumentException("at least one term is necessary for the product covariance function.");


78  var functions = new List<ParameterizedCovarianceFunction>();


79  foreach (var t in terms) {


80  var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);


81  functions.Add(t.GetParameterizedCovarianceFunction(p.Take(numberOfParameters).ToArray(), columnIndices));


82  p = p.Skip(numberOfParameters).ToArray();


83  }


84 


85  var sum = new ParameterizedCovarianceFunction();


86  sum.Covariance = (x, i, j) => functions.Select(e => e.Covariance(x, i, j)).Sum();


87  sum.CrossCovariance = (x, xt, i, j) => functions.Select(e => e.CrossCovariance(x, xt, i, j)).Sum();


88  sum.CovarianceGradient = (x, i, j) => {


89  var g = new List<double>();


90  foreach (var e in functions)


91  g.AddRange(e.CovarianceGradient(x, i, j));


92  return g;


93  };


94  return sum;


95  }


96  }


97  }

