1  #region License Information


2  /* HeuristicLab


3  * Copyright (C) 20022012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)


4  *


5  * This file is part of HeuristicLab.


6  *


7  * HeuristicLab is free software: you can redistribute it and/or modify


8  * it under the terms of the GNU General Public License as published by


9  * the Free Software Foundation, either version 3 of the License, or


10  * (at your option) any later version.


11  *


12  * HeuristicLab is distributed in the hope that it will be useful,


13  * but WITHOUT ANY WARRANTY; without even the implied warranty of


14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the


15  * GNU General Public License for more details.


16  *


17  * You should have received a copy of the GNU General Public License


18  * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.


19  */


20  #endregion


21 


22  using System;


23  using System.Collections.Generic;


24  using System.Linq;


25  using HeuristicLab.Common;


26  using HeuristicLab.Core;


27  using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;


28 


29  namespace HeuristicLab.Algorithms.DataAnalysis {


30  [StorableClass]


31  [Item(Name = "CovarianceProd",


32  Description = "Product covariance function for Gaussian processes.")]


33  public class CovarianceProd : Item, ICovarianceFunction {


34  [Storable]


35  private ItemList<ICovarianceFunction> factors;


36 


37  [Storable]


38  private int numberOfVariables;


39  public ItemList<ICovarianceFunction> Factors {


40  get { return factors; }


41  }


42 


43  [StorableConstructor]


44  protected CovarianceProd(bool deserializing)


45  : base(deserializing) {


46  }


47 


48  protected CovarianceProd(CovarianceProd original, Cloner cloner)


49  : base(original, cloner) {


50  this.factors = cloner.Clone(original.factors);


51  this.numberOfVariables = original.numberOfVariables;


52  AttachEventHandlers();


53  }


54 


55  public CovarianceProd()


56  : base() {


57  this.factors = new ItemList<ICovarianceFunction>();


58  AttachEventHandlers();


59  }


60 


61  private void AttachEventHandlers() {


62  this.factors.CollectionReset += (sender, args) => ClearCache();


63  this.factors.ItemsAdded += (sender, args) => ClearCache();


64  this.factors.ItemsRemoved += (sender, args) => ClearCache();


65  this.factors.ItemsReplaced += (sender, args) => ClearCache();


66  this.factors.ItemsMoved += (sender, args) => ClearCache();


67  }


68 


69  public override IDeepCloneable Clone(Cloner cloner) {


70  return new CovarianceProd(this, cloner);


71  }


72 


73  public int GetNumberOfParameters(int numberOfVariables) {


74  this.numberOfVariables = numberOfVariables;


75  return factors.Select(f => f.GetNumberOfParameters(numberOfVariables)).Sum();


76  }


77 


78  public void SetParameter(double[] hyp) {


79  if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function.");


80  int offset = 0;


81  foreach (var t in factors) {


82  var numberOfParameters = t.GetNumberOfParameters(numberOfVariables);


83  t.SetParameter(hyp.Skip(offset).Take(numberOfParameters).ToArray());


84  offset += numberOfParameters;


85  }


86  }


87 


88  public double GetCovariance(double[,] x, int i, int j) {


89  return factors.Select(f => f.GetCovariance(x, i, j)).Aggregate((a, b) => a * b);


90  }


91 


92  public IEnumerable<double> GetGradient(double[,] x, int i, int j) {


93  //if (cachedParameterMap == null) {


94  // CalculateParameterMap();


95  //}


96  //int ti = cachedParameterMap[k].Item1;


97  //k = cachedParameterMap[k].Item2;


98  //double gradient = 1.0;


99  //for (int ii = 0; ii < factors.Count; ii++) {


100  // var f = factors[ii];


101  // if (ii == ti) {


102  // gradient *= f.GetGradient(x, i, j, k);


103  // } else {


104  // gradient *= f.GetCovariance(x, i, j);


105  // }


106  //}


107  //return gradient;


108  var covariances = factors.Select(f => f.GetCovariance(x, i, j)).ToArray();


109  for (int ii = 0; ii < factors.Count; ii++) {


110  foreach (var g in factors[ii].GetGradient(x, i, j)) {


111  double res = g;


112  for (int jj = 0; jj < covariances.Length; jj++)


113  if (ii != jj) res *= covariances[jj];


114  yield return res;


115  }


116  }


117  }


118 


119  public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {


120  return factors.Select(f => f.GetCrossCovariance(x, xt, i, j)).Aggregate((a, b) => a * b);


121  }


122 


123  private Dictionary<int, Tuple<int, int>> cachedParameterMap;


124  private void ClearCache() {


125  cachedParameterMap = null;


126  }


127 


128  private void CalculateParameterMap() {


129  cachedParameterMap = new Dictionary<int, Tuple<int, int>>();


130  int k = 0;


131  for (int ti = 0; ti < factors.Count; ti++) {


132  for (int ti_k = 0; ti_k < factors[ti].GetNumberOfParameters(numberOfVariables); ti_k++) {


133  cachedParameterMap[k++] = Tuple.Create(ti, ti_k);


134  }


135  }


136  }


137  }


138  }

