Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
03/05/14 17:30:38 (10 years ago)
Author:
mkommend
Message:

#1998: Updated classification model comparision branch with trunk changes.

Location:
branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess
Files:
27 edited
5 copied

Legend:

Unmodified
Added
Removed
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceConst.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    2222using System;
    2323using System.Collections.Generic;
     24using System.Linq;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
     
    3637      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    3738    }
    38 
     39    private bool HasFixedScaleParameter {
     40      get { return ScaleParameter.Value != null; }
     41    }
    3942    [StorableConstructor]
    4043    private CovarianceConst(bool deserializing)
     
    5962
    6063    public int GetNumberOfParameters(int numberOfVariables) {
    61       return ScaleParameter.Value != null ? 0 : 1;
     64      return HasFixedScaleParameter ? 0 : 1;
    6265    }
    6366
     
    7174      int c = 0;
    7275      // gather parameter values
    73       if (ScaleParameter.Value != null) {
     76      if (HasFixedScaleParameter) {
    7477        scale = ScaleParameter.Value.Value;
    7578      } else {
     
    8790      cov.Covariance = (x, i, j) => scale;
    8891      cov.CrossCovariance = (x, xt, i, j) => scale;
    89       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, columnIndices);
     92      if (HasFixedScaleParameter) {
     93        cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>();
     94      } else {
     95        cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, columnIndices);
     96      }
    9097      return cov;
    9198    }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinear.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceLinearArd.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    3737      get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; }
    3838    }
     39    private bool HasFixedInverseLengthParameter {
     40      get { return InverseLengthParameter.Value != null; }
     41    }
    3942
    4043    [StorableConstructor]
     
    5760
    5861    public int GetNumberOfParameters(int numberOfVariables) {
    59       if (InverseLengthParameter.Value == null)
     62      if (HasFixedInverseLengthParameter)
     63        return 0;
     64      else
    6065        return numberOfVariables;
    61       else
    62         return 0;
    6366    }
    6467
     
    7174    private void GetParameterValues(double[] p, out double[] inverseLength) {
    7275      // gather parameter values
    73       if (InverseLengthParameter.Value != null) {
     76      if (HasFixedInverseLengthParameter) {
    7477        inverseLength = InverseLengthParameter.Value.ToArray();
    7578      } else {
     
    8184      double[] inverseLength;
    8285      GetParameterValues(p, out inverseLength);
     86      var fixedInverseLength = HasFixedInverseLengthParameter;
    8387      // create functions
    8488      var cov = new ParameterizedCovarianceFunction();
    8589      cov.Covariance = (x, i, j) => Util.ScalarProd(x, i, j, inverseLength, columnIndices);
    8690      cov.CrossCovariance = (x, xt, i, j) => Util.ScalarProd(x, i, xt, j, inverseLength, columnIndices);
    87       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, inverseLength, columnIndices);
     91      if (fixedInverseLength)
     92        cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>();
     93      else
     94        cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, inverseLength, columnIndices);
    8895      return cov;
    8996    }
    9097
    9198    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double[] inverseLength, IEnumerable<int> columnIndices) {
    92       if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
    93 
    9499      int k = 0;
    95100      foreach (int columnIndex in columnIndices) {
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMask.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    7474
    7575    public ParameterizedCovarianceFunction GetParameterizedCovarianceFunction(double[] p, IEnumerable<int> columnIndices) {
    76       if (columnIndices != null)
    77         throw new InvalidOperationException("Stacking of masking covariance functions is not supported.");
    7876      var cov = CovarianceFunctionParameter.Value;
    7977      var selectedDimensions = SelectedDimensionsParameter.Value;
    8078
    81       return cov.GetParameterizedCovarianceFunction(p, selectedDimensions);
     79      return cov.GetParameterizedCovarianceFunction(p, selectedDimensions.Intersect(columnIndices));
    8280    }
    8381  }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceMaternIso.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    4545      get { return (IConstrainedValueParameter<IntValue>)Parameters["D"]; }
    4646    }
    47 
     47    private bool HasFixedScaleParameter {
     48      get { return ScaleParameter.Value != null; }
     49    }
     50    private bool HasFixedInverseLengthParameter {
     51      get { return InverseLengthParameter.Value != null; }
     52    }
    4853
    4954    [StorableConstructor]
     
    7681    public int GetNumberOfParameters(int numberOfVariables) {
    7782      return
    78         (InverseLengthParameter.Value != null ? 0 : 1) +
    79         (ScaleParameter.Value != null ? 0 : 1);
     83        (HasFixedInverseLengthParameter ? 0 : 1) +
     84        (HasFixedScaleParameter ? 0 : 1);
    8085    }
    8186
     
    9095      // gather parameter values
    9196      int c = 0;
    92       if (InverseLengthParameter.Value != null) {
     97      if (HasFixedInverseLengthParameter) {
    9398        inverseLength = InverseLengthParameter.Value.Value;
    9499      } else {
     
    97102      }
    98103
    99       if (ScaleParameter.Value != null) {
     104      if (HasFixedScaleParameter) {
    100105        scale = ScaleParameter.Value.Value;
    101106      } else {
     
    110115      int d = DParameter.Value.Value;
    111116      GetParameterValues(p, out scale, out inverseLength);
     117      var fixedInverseLength = HasFixedInverseLengthParameter;
     118      var fixedScale = HasFixedScaleParameter;
    112119      // create functions
    113120      var cov = new ParameterizedCovarianceFunction();
     
    122129        return scale * m(d, dist);
    123130      };
    124       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, d, scale, inverseLength, columnIndices);
     131      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, d, scale, inverseLength, columnIndices, fixedInverseLength, fixedScale);
    125132      return cov;
    126133    }
     
    149156
    150157
    151     private static IEnumerable<double> GetGradient(double[,] x, int i, int j, int d, double scale, double inverseLength, IEnumerable<int> columnIndices) {
     158    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, int d, double scale, double inverseLength, IEnumerable<int> columnIndices,
     159      bool fixedInverseLength, bool fixedScale) {
    152160      double dist = i == j
    153161                   ? 0.0
    154162                   : Math.Sqrt(Util.SqrDist(x, i, j, Math.Sqrt(d) * inverseLength, columnIndices));
    155163
    156       yield return scale * dm(d, dist);
    157       yield return 2 * scale * m(d, dist);
     164      if (!fixedInverseLength) yield return scale * dm(d, dist);
     165      if (!fixedScale) yield return 2 * scale * m(d, dist);
    158166    }
    159167  }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceNoise.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    3737      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    3838    }
     39    private bool HasFixedScaleParameter {
     40      get { return ScaleParameter.Value != null; }
     41    }
    3942
    4043    [StorableConstructor]
     
    6063
    6164    public int GetNumberOfParameters(int numberOfVariables) {
    62       return ScaleParameter.Value != null ? 0 : 1;
     65      return HasFixedScaleParameter ? 0 : 1;
    6366    }
    6467
     
    7275      int c = 0;
    7376      // gather parameter values
    74       if (ScaleParameter.Value != null) {
     77      if (HasFixedScaleParameter) {
    7578        scale = ScaleParameter.Value.Value;
    7679      } else {
     
    8487      double scale;
    8588      GetParameterValues(p, out scale);
     89      var fixedScale = HasFixedScaleParameter;
    8690      // create functions
    8791      var cov = new ParameterizedCovarianceFunction();
    8892      cov.Covariance = (x, i, j) => i == j ? scale : 0.0;
    89       cov.CrossCovariance = (x, xt, i, j) => 0.0;
    90       cov.CovarianceGradient = (x, i, j) => Enumerable.Repeat(i == j ? 2.0 * scale : 0.0, 1);
     93      cov.CrossCovariance = (x, xt, i, j) => Util.SqrDist(x, i, xt, j, 1.0, columnIndices) < 1e-9 ? scale : 0.0;
     94      if (fixedScale)
     95        cov.CovarianceGradient = (x, i, j) => Enumerable.Empty<double>();
     96      else
     97        cov.CovarianceGradient = (x, i, j) => Enumerable.Repeat(i == j ? 2.0 * scale : 0.0, 1);
    9198      return cov;
    9299    }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovariancePeriodic.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    4646    }
    4747
     48    private bool HasFixedScaleParameter {
     49      get { return ScaleParameter.Value != null; }
     50    }
     51    private bool HasFixedInverseLengthParameter {
     52      get { return InverseLengthParameter.Value != null; }
     53    }
     54    private bool HasFixedPeriodParameter {
     55      get { return PeriodParameter.Value != null; }
     56    }
     57
    4858
    4959    [StorableConstructor]
     
    6878
    6979    public int GetNumberOfParameters(int numberOfVariables) {
    70       return (ScaleParameter.Value != null ? 0 : 1) +
    71        (PeriodParameter.Value != null ? 0 : 1) +
    72        (InverseLengthParameter.Value != null ? 0 : 1);
     80      return (HasFixedScaleParameter ? 0 : 1) +
     81       (HasFixedPeriodParameter ? 0 : 1) +
     82       (HasFixedInverseLengthParameter ? 0 : 1);
    7383    }
    7484
     
    8292
    8393
    84     private void GetParameterValues(double[] p, out double scale, out double period, out double inverseLength) {
     94    private void GetParameterValues(double[]
     95      p, out double scale, out double period, out double inverseLength) {
    8596      // gather parameter values
    8697      int c = 0;
    87       if (InverseLengthParameter.Value != null) {
     98      if (HasFixedInverseLengthParameter) {
    8899        inverseLength = InverseLengthParameter.Value.Value;
    89100      } else {
     
    91102        c++;
    92103      }
    93       if (PeriodParameter.Value != null) {
     104      if (HasFixedPeriodParameter) {
    94105        period = PeriodParameter.Value.Value;
    95106      } else {
     
    97108        c++;
    98109      }
    99       if (ScaleParameter.Value != null) {
     110      if (HasFixedScaleParameter) {
    100111        scale = ScaleParameter.Value.Value;
    101112      } else {
     
    109120      double inverseLength, period, scale;
    110121      GetParameterValues(p, out scale, out period, out inverseLength);
     122      var fixedInverseLength = HasFixedInverseLengthParameter;
     123      var fixedPeriod = HasFixedPeriodParameter;
     124      var fixedScale = HasFixedScaleParameter;
    111125      // create functions
    112126      var cov = new ParameterizedCovarianceFunction();
     
    127141        return scale * Math.Exp(-2.0 * k);
    128142      };
    129       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, period, inverseLength);
     143      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, period, inverseLength, fixedInverseLength, fixedPeriod, fixedScale);
    130144      return cov;
    131145    }
    132146
    133147
    134     private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double period, double inverseLength) {
    135       double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period;
    136       double gradient = Math.Sin(v) * inverseLength;
     148    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double period, double inverseLength,
     149      bool fixedInverseLength, bool fixedPeriod, bool fixedScale) {
     150      double k = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period;
     151      double gradient = Math.Sin(k) * inverseLength;
    137152      gradient *= gradient;
    138       yield return 4.0 * scale * Math.Exp(-2.0 * gradient) * gradient;
    139       double r = Math.Sin(v) * inverseLength;
    140       yield return 4.0 * scale * inverseLength * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v;
    141       yield return 2.0 * scale * Math.Exp(-2 * gradient);
     153      if (!fixedInverseLength) yield return 4.0 * scale * Math.Exp(-2.0 * gradient) * gradient;
     154      if (!fixedPeriod) {
     155        double r = Math.Sin(k) * inverseLength;
     156        yield return 2.0 * k * scale * Math.Exp(-2 * r * r) * Math.Sin(2 * k) * inverseLength * inverseLength;
     157      }
     158      if (!fixedScale)
     159        yield return 2.0 * scale * Math.Exp(-2 * gradient);
     160
    142161    }
    143162
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceProduct.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticArd.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    4545      get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; }
    4646    }
     47    private bool HasFixedScaleParameter {
     48      get { return ScaleParameter.Value != null; }
     49    }
     50    private bool HasFixedInverseLengthParameter {
     51      get { return InverseLengthParameter.Value != null; }
     52    }
     53    private bool HasFixedShapeParameter {
     54      get { return ShapeParameter.Value != null; }
     55    }
    4756
    4857    [StorableConstructor]
     
    7180    public int GetNumberOfParameters(int numberOfVariables) {
    7281      return
    73         (ScaleParameter.Value != null ? 0 : 1) +
    74         (ShapeParameter.Value != null ? 0 : 1) +
    75         (InverseLengthParameter.Value != null ? 0 : numberOfVariables);
     82        (HasFixedScaleParameter ? 0 : 1) +
     83        (HasFixedShapeParameter ? 0 : 1) +
     84        (HasFixedInverseLengthParameter ? 0 : numberOfVariables);
    7685    }
    7786
     
    8897      int c = 0;
    8998      // gather parameter values
    90       if (ScaleParameter.Value != null) {
     99      if (HasFixedInverseLengthParameter) {
     100        inverseLength = InverseLengthParameter.Value.ToArray();
     101      } else {
     102        int length = p.Length;
     103        if (!HasFixedScaleParameter) length--;
     104        if (!HasFixedShapeParameter) length--;
     105        inverseLength = p.Select(e => 1.0 / Math.Exp(e)).Take(length).ToArray();
     106        c += inverseLength.Length;
     107      }
     108      if (HasFixedScaleParameter) {
    91109        scale = ScaleParameter.Value.Value;
    92110      } else {
     
    94112        c++;
    95113      }
    96       if (ShapeParameter.Value != null) {
     114      if (HasFixedShapeParameter) {
    97115        shape = ShapeParameter.Value.Value;
    98116      } else {
    99117        shape = Math.Exp(p[c]);
    100118        c++;
    101       }
    102       if (InverseLengthParameter.Value != null) {
    103         inverseLength = InverseLengthParameter.Value.ToArray();
    104       } else {
    105         inverseLength = p.Skip(2).Select(e => 1.0 / Math.Exp(e)).ToArray();
    106         c += inverseLength.Length;
    107119      }
    108120      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceRationalQuadraticArd", "p");
     
    113125      double[] inverseLength;
    114126      GetParameterValues(p, out scale, out shape, out inverseLength);
     127      var fixedInverseLength = HasFixedInverseLengthParameter;
     128      var fixedScale = HasFixedScaleParameter;
     129      var fixedShape = HasFixedShapeParameter;
    115130      // create functions
    116131      var cov = new ParameterizedCovarianceFunction();
     
    125140        return scale * Math.Pow(1 + 0.5 * d / shape, -shape);
    126141      };
    127       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength);
     142      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength, fixedInverseLength, fixedScale, fixedShape);
    128143      return cov;
    129144    }
    130145
    131     private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double[] inverseLength) {
    132       if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
     146    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double[] inverseLength,
     147      bool fixedInverseLength, bool fixedScale, bool fixedShape) {
    133148      double d = i == j
    134149                   ? 0.0
     
    136151      double b = 1 + 0.5 * d / shape;
    137152      int k = 0;
    138       foreach (var columnIndex in columnIndices) {
    139         yield return scale * Math.Pow(b, -shape - 1) * Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);
    140         k++;
     153      if (!fixedInverseLength) {
     154        foreach (var columnIndex in columnIndices) {
     155          yield return
     156            scale * Math.Pow(b, -shape - 1) *
     157            Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);
     158          k++;
     159        }
    141160      }
    142       yield return 2 * scale * Math.Pow(b, -shape);
    143       yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
     161      if (!fixedScale) yield return 2 * scale * Math.Pow(b, -shape);
     162      if (!fixedShape) yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
    144163    }
    145164  }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceRationalQuadraticIso.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    4444      get { return (IValueParameter<DoubleValue>)Parameters["Shape"]; }
    4545    }
     46
     47    private bool HasFixedScaleParameter {
     48      get { return ScaleParameter.Value != null; }
     49    }
     50    private bool HasFixedInverseLengthParameter {
     51      get { return InverseLengthParameter.Value != null; }
     52    }
     53    private bool HasFixedShapeParameter {
     54      get { return ShapeParameter.Value != null; }
     55    }
     56
     57
    4658    [StorableConstructor]
    4759    private CovarianceRationalQuadraticIso(bool deserializing)
     
    6880
    6981    public int GetNumberOfParameters(int numberOfVariables) {
    70       return (ScaleParameter.Value != null ? 0 : 1) +
    71         (ShapeParameter.Value != null ? 0 : 1) +
    72         (InverseLengthParameter.Value != null ? 0 : 1);
     82      return (HasFixedScaleParameter ? 0 : 1) +
     83        (HasFixedShapeParameter ? 0 : 1) +
     84        (HasFixedInverseLengthParameter ? 0 : 1);
    7385    }
    7486
     
    8496      int c = 0;
    8597      // gather parameter values
    86       if (ScaleParameter.Value != null) {
     98      if (HasFixedInverseLengthParameter) {
     99        inverseLength = InverseLengthParameter.Value.Value;
     100      } else {
     101        inverseLength = 1.0 / Math.Exp(p[c]);
     102        c++;
     103      }
     104      if (HasFixedScaleParameter) {
    87105        scale = ScaleParameter.Value.Value;
    88106      } else {
     
    90108        c++;
    91109      }
    92       if (ShapeParameter.Value != null) {
     110      if (HasFixedShapeParameter) {
    93111        shape = ShapeParameter.Value.Value;
    94112      } else {
    95113        shape = Math.Exp(p[c]);
    96         c++;
    97       }
    98       if (InverseLengthParameter.Value != null) {
    99         inverseLength = InverseLengthParameter.Value.Value;
    100       } else {
    101         inverseLength = 1.0 / Math.Exp(p[c]);
    102114        c++;
    103115      }
     
    108120      double scale, shape, inverseLength;
    109121      GetParameterValues(p, out scale, out shape, out inverseLength);
     122      var fixedInverseLength = HasFixedInverseLengthParameter;
     123      var fixedScale = HasFixedScaleParameter;
     124      var fixedShape = HasFixedShapeParameter;
    110125      // create functions
    111126      var cov = new ParameterizedCovarianceFunction();
     
    114129                    ? 0.0
    115130                    : Util.SqrDist(x, i, j, inverseLength, columnIndices);
    116         return shape * Math.Pow(1 + 0.5 * d / shape, -shape);
     131        return scale * Math.Pow(1 + 0.5 * d / shape, -shape);
    117132      };
    118133      cov.CrossCovariance = (x, xt, i, j) => {
     
    120135        return scale * Math.Pow(1 + 0.5 * d / shape, -shape);
    121136      };
    122       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength);
     137      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, shape, inverseLength, fixedInverseLength, fixedScale, fixedShape);
    123138      return cov;
    124139    }
    125140
    126     private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double inverseLength) {
     141    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double shape, double inverseLength,
     142      bool fixedInverseLength, bool fixedScale, bool fixedShape) {
    127143      double d = i == j
    128144                   ? 0.0
     
    130146
    131147      double b = 1 + 0.5 * d / shape;
    132       yield return scale * Math.Pow(b, -shape - 1) * d;
    133       yield return 2 * scale * Math.Pow(b, -shape);
    134       yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
     148      if (!fixedInverseLength) yield return scale * Math.Pow(b, -shape - 1) * d;
     149      if (!fixedScale) yield return 2 * scale * Math.Pow(b, -shape);
     150      if (!fixedShape) yield return scale * Math.Pow(b, -shape) * (0.5 * d / b - shape * Math.Log(b));
    135151    }
    136152  }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceScale.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    3737      get { return (IValueParameter<DoubleValue>)Parameters["Scale"]; }
    3838    }
     39    private bool HasFixedScaleParameter {
     40      get { return ScaleParameter.Value != null; }
     41    }
    3942
    4043    public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
     
    6568
    6669    public int GetNumberOfParameters(int numberOfVariables) {
    67       return (ScaleParameter.Value != null ? 0 : 1) + CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables);
     70      return (HasFixedScaleParameter ? 0 : 1) + CovarianceFunctionParameter.Value.GetNumberOfParameters(numberOfVariables);
    6871    }
    6972
     
    7780    private void GetParameterValues(double[] p, out double scale) {
    7881      // gather parameter values
    79       if (ScaleParameter.Value != null) {
     82      if (HasFixedScaleParameter) {
    8083        scale = ScaleParameter.Value.Value;
    8184      } else {
     
    8790      double scale;
    8891      GetParameterValues(p, out scale);
     92      var fixedScale = HasFixedScaleParameter;
    8993      var subCov = CovarianceFunctionParameter.Value.GetParameterizedCovarianceFunction(p.Skip(1).ToArray(), columnIndices);
    9094      // create functions
     
    9296      cov.Covariance = (x, i, j) => scale * subCov.Covariance(x, i, j);
    9397      cov.CrossCovariance = (x, xt, i, j) => scale * subCov.CrossCovariance(x, xt, i, j);
    94       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, subCov);
     98      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, subCov, fixedScale);
    9599      return cov;
    96100    }
    97101
    98     private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, ParameterizedCovarianceFunction cov) {
    99       yield return 2 * scale * cov.Covariance(x, i, j);
     102    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, ParameterizedCovarianceFunction cov,
     103      bool fixedScale) {
     104      if (!fixedScale) {
     105        yield return 2 * scale * cov.Covariance(x, i, j);
     106      }
    100107      foreach (var g in cov.CovarianceGradient(x, i, j))
    101108        yield return scale * g;
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialArd.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    4040      get { return (IValueParameter<DoubleArray>)Parameters["InverseLength"]; }
    4141    }
     42    private bool HasFixedInverseLengthParameter {
     43      get { return InverseLengthParameter.Value != null; }
     44    }
     45    private bool HasFixedScaleParameter {
     46      get { return ScaleParameter.Value != null; }
     47    }
    4248
    4349    [StorableConstructor]
     
    6167    public int GetNumberOfParameters(int numberOfVariables) {
    6268      return
    63         (ScaleParameter.Value != null ? 0 : 1) +
    64         (InverseLengthParameter.Value != null ? 0 : numberOfVariables);
     69        (HasFixedScaleParameter ? 0 : 1) +
     70        (HasFixedInverseLengthParameter ? 0 : numberOfVariables);
    6571    }
    6672
     
    7682      int c = 0;
    7783      // gather parameter values
    78       if (ScaleParameter.Value != null) {
     84      if (HasFixedInverseLengthParameter) {
     85        inverseLength = InverseLengthParameter.Value.ToArray();
     86      } else {
     87        int length = p.Length;
     88        if (!HasFixedScaleParameter) length--;
     89        inverseLength = p.Select(e => 1.0 / Math.Exp(e)).Take(length).ToArray();
     90        c += inverseLength.Length;
     91      }
     92      if (HasFixedScaleParameter) {
    7993        scale = ScaleParameter.Value.Value;
    8094      } else {
    8195        scale = Math.Exp(2 * p[c]);
    8296        c++;
    83       }
    84       if (InverseLengthParameter.Value != null) {
    85         inverseLength = InverseLengthParameter.Value.ToArray();
    86       } else {
    87         inverseLength = p.Skip(1).Select(e => 1.0 / Math.Exp(e)).ToArray();
    88         c += inverseLength.Length;
    8997      }
    9098      if (p.Length != c) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceSquaredExponentialArd", "p");
     
    95103      double[] inverseLength;
    96104      GetParameterValues(p, out scale, out inverseLength);
     105      var fixedInverseLength = HasFixedInverseLengthParameter;
     106      var fixedScale = HasFixedScaleParameter;
    97107      // create functions
    98108      var cov = new ParameterizedCovarianceFunction();
     
    107117        return scale * Math.Exp(-d / 2.0);
    108118      };
    109       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, inverseLength);
     119      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, columnIndices, scale, inverseLength, fixedInverseLength, fixedScale);
    110120      return cov;
    111121    }
    112122
    113 
    114     private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double[] inverseLength) {
    115       if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
     123    // order of returned gradients must match the order in GetParameterValues!
     124    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices, double scale, double[] inverseLength,
     125      bool fixedInverseLength, bool fixedScale) {
    116126      double d = i == j
    117127                   ? 0.0
    118128                   : Util.SqrDist(x, i, j, inverseLength, columnIndices);
     129
    119130      int k = 0;
    120       foreach (var columnIndex in columnIndices) {
    121         double sqrDist = Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);
    122         yield return scale * Math.Exp(-d / 2.0) * sqrDist;
    123         k++;
     131      if (!fixedInverseLength) {
     132        foreach (var columnIndex in columnIndices) {
     133          double sqrDist = Util.SqrDist(x[i, columnIndex] * inverseLength[k], x[j, columnIndex] * inverseLength[k]);
     134          yield return scale * Math.Exp(-d / 2.0) * sqrDist;
     135          k++;
     136        }
    124137      }
    125 
    126       yield return 2.0 * scale * Math.Exp(-d / 2.0);
     138      if (!fixedScale) yield return 2.0 * scale * Math.Exp(-d / 2.0);
    127139    }
    128140  }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSquaredExponentialIso.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    4242    }
    4343
     44    private bool HasFixedInverseLengthParameter {
     45      get { return InverseLengthParameter.Value != null; }
     46    }
     47    private bool HasFixedScaleParameter {
     48      get { return ScaleParameter.Value != null; }
     49    }
     50
    4451    [StorableConstructor]
    4552    private CovarianceSquaredExponentialIso(bool deserializing)
     
    6673    public int GetNumberOfParameters(int numberOfVariables) {
    6774      return
    68         (ScaleParameter.Value != null ? 0 : 1) +
    69         (InverseLengthParameter.Value != null ? 0 : 1);
     75        (HasFixedScaleParameter ? 0 : 1) +
     76        (HasFixedInverseLengthParameter ? 0 : 1);
    7077    }
    7178
     
    8188      // gather parameter values
    8289      int c = 0;
    83       if (InverseLengthParameter.Value != null) {
     90      if (HasFixedInverseLengthParameter) {
    8491        inverseLength = InverseLengthParameter.Value.Value;
    8592      } else {
     
    8895      }
    8996
    90       if (ScaleParameter.Value != null) {
     97      if (HasFixedScaleParameter) {
    9198        scale = ScaleParameter.Value.Value;
    9299      } else {
     
    100107      double inverseLength, scale;
    101108      GetParameterValues(p, out scale, out inverseLength);
     109      var fixedInverseLength = HasFixedInverseLengthParameter;
     110      var fixedScale = HasFixedScaleParameter;
    102111      // create functions
    103112      var cov = new ParameterizedCovarianceFunction();
     
    112121        return scale * Math.Exp(-d / 2.0);
    113122      };
    114       cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, inverseLength, columnIndices);
     123      cov.CovarianceGradient = (x, i, j) => GetGradient(x, i, j, scale, inverseLength, columnIndices,
     124        fixedInverseLength, fixedScale);
    115125      return cov;
    116126    }
    117127
    118     private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double sf2, double inverseLength, IEnumerable<int> columnIndices) {
     128    // order of returned gradients must match the order in GetParameterValues!
     129    private static IEnumerable<double> GetGradient(double[,] x, int i, int j, double sf2, double inverseLength, IEnumerable<int> columnIndices,
     130      bool fixedInverseLength, bool fixedScale) {
    119131      double d = i == j
    120132                   ? 0.0
    121133                   : Util.SqrDist(x, i, j, inverseLength, columnIndices);
    122134      double g = Math.Exp(-d / 2.0);
    123       yield return sf2 * g * d;
    124       yield return 2.0 * sf2 * g;
     135      if (!fixedInverseLength) yield return sf2 * g * d;
     136      if (!fixedScale) yield return 2.0 * sf2 * g;
    125137    }
    126138  }
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceFunctions/CovarianceSum.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassification.cs

    r8623 r10553  
    22#region License Information
    33/* HeuristicLab
    4  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    55 *
    66 * This file is part of HeuristicLab.
     
    2222
    2323using System;
     24using System.Linq;
    2425using HeuristicLab.Algorithms.GradientDescent;
    2526using HeuristicLab.Common;
     
    3031using HeuristicLab.Parameters;
    3132using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     33using HeuristicLab.PluginInfrastructure;
    3234using HeuristicLab.Problems.DataAnalysis;
    3335
     
    3941  [Creatable("Data Analysis")]
    4042  [StorableClass]
    41   public sealed class GaussianProcessClassification : EngineAlgorithm, IStorableContent {
     43  public sealed class GaussianProcessClassification : GaussianProcessBase, IStorableContent {
    4244    public string Filename { get; set; }
    4345
     
    4850    }
    4951
    50     private const string MeanFunctionParameterName = "MeanFunction";
    51     private const string CovarianceFunctionParameterName = "CovarianceFunction";
    52     private const string MinimizationIterationsParameterName = "Iterations";
    53     private const string ApproximateGradientsParameterName = "ApproximateGradients";
    54     private const string SeedParameterName = "Seed";
    55     private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
     52    private const string ModelParameterName = "Model";
    5653
    5754    #region parameter properties
    58     public IValueParameter<IMeanFunction> MeanFunctionParameter {
    59       get { return (IValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
     55    public IConstrainedValueParameter<IGaussianProcessClassificationModelCreator> GaussianProcessModelCreatorParameter {
     56      get { return (IConstrainedValueParameter<IGaussianProcessClassificationModelCreator>)Parameters[ModelCreatorParameterName]; }
    6057    }
    61     public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
    62       get { return (IValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
     58    public IFixedValueParameter<GaussianProcessClassificationSolutionCreator> GaussianProcessSolutionCreatorParameter {
     59      get { return (IFixedValueParameter<GaussianProcessClassificationSolutionCreator>)Parameters[SolutionCreatorParameterName]; }
    6360    }
    64     public IValueParameter<IntValue> MinimizationIterationsParameter {
    65       get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }
    66     }
    67     public IValueParameter<IntValue> SeedParameter {
    68       get { return (IValueParameter<IntValue>)Parameters[SeedParameterName]; }
    69     }
    70     public IValueParameter<BoolValue> SetSeedRandomlyParameter {
    71       get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
    72     }
    73     #endregion
    74     #region properties
    75     public IMeanFunction MeanFunction {
    76       set { MeanFunctionParameter.Value = value; }
    77       get { return MeanFunctionParameter.Value; }
    78     }
    79     public ICovarianceFunction CovarianceFunction {
    80       set { CovarianceFunctionParameter.Value = value; }
    81       get { return CovarianceFunctionParameter.Value; }
    82     }
    83     public int MinimizationIterations {
    84       set { MinimizationIterationsParameter.Value.Value = value; }
    85       get { return MinimizationIterationsParameter.Value.Value; }
    86     }
    87     public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } }
    88     public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } }
    8961    #endregion
    9062
     
    9365    private GaussianProcessClassification(GaussianProcessClassification original, Cloner cloner)
    9466      : base(original, cloner) {
     67      RegisterEventHandlers();
    9568    }
    9669    public GaussianProcessClassification()
    97       : base() {
     70      : base(new ClassificationProblem()) {
    9871      this.name = ItemName;
    9972      this.description = ItemDescription;
    10073
    101       Problem = new ClassificationProblem();
     74      var modelCreators = ApplicationManager.Manager.GetInstances<IGaussianProcessClassificationModelCreator>();
     75      var defaultModelCreator = modelCreators.First(c => c is GaussianProcessClassificationModelCreator);
    10276
    103       Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst()));
    104       Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso()));
    105       Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20)));
    106       Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
    107       Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
     77      // GP regression and classification algorithms only differ in the model and solution creators,
     78      // thus we use a common base class and use operator parameters to implement the specific versions.
     79      // Different model creators can be implemented,
     80      // but the solution creator is implemented in a generic fashion already and we don't allow derived solution creators
     81      Parameters.Add(new ConstrainedValueParameter<IGaussianProcessClassificationModelCreator>(ModelCreatorParameterName, "The operator to create the Gaussian process model.",
     82        new ItemSet<IGaussianProcessClassificationModelCreator>(modelCreators), defaultModelCreator));
     83      // this parameter is not intended to be changed,
     84      Parameters.Add(new FixedValueParameter<GaussianProcessClassificationSolutionCreator>(SolutionCreatorParameterName, "The solution creator for the algorithm",
     85        new GaussianProcessClassificationSolutionCreator()));
     86      Parameters[SolutionCreatorParameterName].Hidden = true;
    10887
    109       Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false)));
    110       Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed
    111 
    112       var randomCreator = new HeuristicLab.Random.RandomCreator();
    113       var gpInitializer = new GaussianProcessHyperparameterInitializer();
    114       var bfgsInitializer = new LbfgsInitializer();
    115       var makeStep = new LbfgsMakeStep();
    116       var branch = new ConditionalBranch();
    117       var modelCreator = new GaussianProcessClassificationModelCreator();
    118       var updateResults = new LbfgsUpdateResults();
    119       var analyzer = new LbfgsAnalyzer();
    120       var finalModelCreator = new GaussianProcessClassificationModelCreator();
    121       var finalAnalyzer = new LbfgsAnalyzer();
    122       var solutionCreator = new GaussianProcessClassificationSolutionCreator();
    123 
    124       OperatorGraph.InitialOperator = randomCreator;
    125       randomCreator.SeedParameter.ActualName = SeedParameterName;
    126       randomCreator.SeedParameter.Value = null;
    127       randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName;
    128       randomCreator.SetSeedRandomlyParameter.Value = null;
    129       randomCreator.Successor = gpInitializer;
    130 
    131       gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
    132       gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
    133       gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    134       gpInitializer.HyperparameterParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    135       gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name;
    136       gpInitializer.Successor = bfgsInitializer;
    137 
    138       bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName;
    139       bfgsInitializer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    140       bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
    141       bfgsInitializer.Successor = makeStep;
    142 
    143       makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
    144       makeStep.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    145       makeStep.Successor = branch;
    146 
    147       branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
    148       branch.FalseBranch = modelCreator;
    149       branch.TrueBranch = finalModelCreator;
    150 
    151       modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    152       modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
    153       modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
    154       modelCreator.Successor = updateResults;
    155 
    156       updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
    157       updateResults.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
    158       updateResults.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
    159       updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
    160       updateResults.Successor = analyzer;
    161 
    162       analyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
    163       analyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    164       analyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
    165       analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
    166       analyzer.PointsTableParameter.ActualName = "Hyperparameter table";
    167       analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
    168       analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table";
    169       analyzer.Successor = makeStep;
    170 
    171       finalModelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    172       finalModelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
    173       finalModelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
    174       finalModelCreator.HyperparameterParameter.ActualName = bfgsInitializer.PointParameter.ActualName;
    175       finalModelCreator.Successor = finalAnalyzer;
    176 
    177       finalAnalyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
    178       finalAnalyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    179       finalAnalyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
    180       finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName;
    181       finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
    182       finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName;
    183       finalAnalyzer.Successor = solutionCreator;
    184 
    185       solutionCreator.ModelParameter.ActualName = finalModelCreator.ModelParameter.Name;
    186       solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
     88      ParameterizedModelCreators();
     89      ParameterizeSolutionCreator(GaussianProcessSolutionCreatorParameter.Value);
     90      RegisterEventHandlers();
    18791    }
    18892
     93
    18994    [StorableHook(HookType.AfterDeserialization)]
    190     private void AfterDeserialization() { }
     95    private void AfterDeserialization() {
     96      RegisterEventHandlers();
     97    }
    19198
    19299    public override IDeepCloneable Clone(Cloner cloner) {
    193100      return new GaussianProcessClassification(this, cloner);
    194101    }
     102
     103    #region events
     104    private void RegisterEventHandlers() {
     105      GaussianProcessModelCreatorParameter.ValueChanged += ModelCreatorParameter_ValueChanged;
     106    }
     107
     108    private void ModelCreatorParameter_ValueChanged(object sender, EventArgs e) {
     109      ParameterizedModelCreator(GaussianProcessModelCreatorParameter.Value);
     110    }
     111    #endregion
     112
     113    private void ParameterizedModelCreators() {
     114      foreach (var creator in GaussianProcessModelCreatorParameter.ValidValues) {
     115        ParameterizedModelCreator(creator);
     116      }
     117    }
     118
     119    private void ParameterizedModelCreator(IGaussianProcessClassificationModelCreator modelCreator) {
     120      modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
     121      modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
     122      modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
     123
     124      // parameter names fixed by the algorithm
     125      modelCreator.ModelParameter.ActualName = ModelParameterName;
     126      modelCreator.HyperparameterParameter.ActualName = HyperparameterParameterName;
     127      modelCreator.HyperparameterGradientsParameter.ActualName = HyperparameterGradientsParameterName;
     128      modelCreator.NegativeLogLikelihoodParameter.ActualName = NegativeLogLikelihoodParameterName;
     129    }
     130
     131    private void ParameterizeSolutionCreator(GaussianProcessClassificationSolutionCreator solutionCreator) {
     132      solutionCreator.ModelParameter.ActualName = ModelParameterName;
     133      solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
     134    }
    195135  }
    196136}
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationModelCreator.cs

    r8623 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    3434  [Item(Name = "GaussianProcessClassificationModelCreator",
    3535    Description = "Creates a Gaussian process model for least-squares classification given the data, the hyperparameters, a mean function, and a covariance function.")]
    36   public sealed class GaussianProcessClassificationModelCreator : GaussianProcessModelCreator {
     36  public sealed class GaussianProcessClassificationModelCreator : GaussianProcessModelCreator, IGaussianProcessClassificationModelCreator {
    3737    private const string ProblemDataParameterName = "ProblemData";
    3838
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationSolutionCreator.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessHyperparameterInitializer.cs

    r8732 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    284284      var kss = new double[newN];
    285285      double[,] sWKs = new double[n, newN];
    286       var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));
     286      var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1)));
    287287
    288288      // for stddev
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModelCreator.cs

    r8401 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs

    r8615 r10553  
    22#region License Information
    33/* HeuristicLab
    4  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     4 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    55 *
    66 * This file is part of HeuristicLab.
     
    2222
    2323using System;
     24using System.Linq;
    2425using HeuristicLab.Algorithms.GradientDescent;
    2526using HeuristicLab.Common;
     
    3031using HeuristicLab.Parameters;
    3132using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     33using HeuristicLab.PluginInfrastructure;
    3234using HeuristicLab.Problems.DataAnalysis;
    3335
     
    3941  [Creatable("Data Analysis")]
    4042  [StorableClass]
    41   public sealed class GaussianProcessRegression : EngineAlgorithm, IStorableContent {
     43  public sealed class GaussianProcessRegression : GaussianProcessBase, IStorableContent {
    4244    public string Filename { get; set; }
    4345
     
    4850    }
    4951
    50     private const string MeanFunctionParameterName = "MeanFunction";
    51     private const string CovarianceFunctionParameterName = "CovarianceFunction";
    52     private const string MinimizationIterationsParameterName = "Iterations";
    53     private const string ApproximateGradientsParameterName = "ApproximateGradients";
    54     private const string SeedParameterName = "Seed";
    55     private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
     52    private const string ModelParameterName = "Model";
    5653
    5754    #region parameter properties
    58     public IValueParameter<IMeanFunction> MeanFunctionParameter {
    59       get { return (IValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
     55    public IConstrainedValueParameter<IGaussianProcessRegressionModelCreator> GaussianProcessModelCreatorParameter {
     56      get { return (IConstrainedValueParameter<IGaussianProcessRegressionModelCreator>)Parameters[ModelCreatorParameterName]; }
    6057    }
    61     public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
    62       get { return (IValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
     58    public IFixedValueParameter<GaussianProcessRegressionSolutionCreator> GaussianProcessSolutionCreatorParameter {
     59      get { return (IFixedValueParameter<GaussianProcessRegressionSolutionCreator>)Parameters[SolutionCreatorParameterName]; }
    6360    }
    64     public IValueParameter<IntValue> MinimizationIterationsParameter {
    65       get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }
    66     }
    67     public IValueParameter<IntValue> SeedParameter {
    68       get { return (IValueParameter<IntValue>)Parameters[SeedParameterName]; }
    69     }
    70     public IValueParameter<BoolValue> SetSeedRandomlyParameter {
    71       get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
    72     }
    73     #endregion
    74     #region properties
    75     public IMeanFunction MeanFunction {
    76       set { MeanFunctionParameter.Value = value; }
    77       get { return MeanFunctionParameter.Value; }
    78     }
    79     public ICovarianceFunction CovarianceFunction {
    80       set { CovarianceFunctionParameter.Value = value; }
    81       get { return CovarianceFunctionParameter.Value; }
    82     }
    83     public int MinimizationIterations {
    84       set { MinimizationIterationsParameter.Value.Value = value; }
    85       get { return MinimizationIterationsParameter.Value.Value; }
    86     }
    87     public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } }
    88     public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } }
    8961    #endregion
    9062
     
    9365    private GaussianProcessRegression(GaussianProcessRegression original, Cloner cloner)
    9466      : base(original, cloner) {
     67      RegisterEventHandlers();
    9568    }
    9669    public GaussianProcessRegression()
    97       : base() {
     70      : base(new RegressionProblem()) {
    9871      this.name = ItemName;
    9972      this.description = ItemDescription;
    10073
    101       Problem = new RegressionProblem();
     74      var modelCreators = ApplicationManager.Manager.GetInstances<IGaussianProcessRegressionModelCreator>();
     75      var defaultModelCreator = modelCreators.First(c => c is GaussianProcessRegressionModelCreator);
    10276
    103       Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst()));
    104       Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso()));
    105       Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20)));
    106       Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
    107       Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
     77      // GP regression and classification algorithms only differ in the model and solution creators,
     78      // thus we use a common base class and use operator parameters to implement the specific versions.
     79      // Different model creators can be implemented,
     80      // but the solution creator is implemented in a generic fashion already and we don't allow derived solution creators
     81      Parameters.Add(new ConstrainedValueParameter<IGaussianProcessRegressionModelCreator>(ModelCreatorParameterName, "The operator to create the Gaussian process model.",
     82        new ItemSet<IGaussianProcessRegressionModelCreator>(modelCreators), defaultModelCreator));
     83      // this parameter is not intended to be changed,
     84      Parameters.Add(new FixedValueParameter<GaussianProcessRegressionSolutionCreator>(SolutionCreatorParameterName, "The solution creator for the algorithm",
     85        new GaussianProcessRegressionSolutionCreator()));
     86      Parameters[SolutionCreatorParameterName].Hidden = true;
    10887
    109       Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false)));
    110       Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed
    111 
    112       var randomCreator = new HeuristicLab.Random.RandomCreator();
    113       var gpInitializer = new GaussianProcessHyperparameterInitializer();
    114       var bfgsInitializer = new LbfgsInitializer();
    115       var makeStep = new LbfgsMakeStep();
    116       var branch = new ConditionalBranch();
    117       var modelCreator = new GaussianProcessRegressionModelCreator();
    118       var updateResults = new LbfgsUpdateResults();
    119       var analyzer = new LbfgsAnalyzer();
    120       var finalModelCreator = new GaussianProcessRegressionModelCreator();
    121       var finalAnalyzer = new LbfgsAnalyzer();
    122       var solutionCreator = new GaussianProcessRegressionSolutionCreator();
    123 
    124       OperatorGraph.InitialOperator = randomCreator;
    125       randomCreator.SeedParameter.ActualName = SeedParameterName;
    126       randomCreator.SeedParameter.Value = null;
    127       randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName;
    128       randomCreator.SetSeedRandomlyParameter.Value = null;
    129       randomCreator.Successor = gpInitializer;
    130 
    131       gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
    132       gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
    133       gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    134       gpInitializer.HyperparameterParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    135       gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name;
    136       gpInitializer.Successor = bfgsInitializer;
    137 
    138       bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName;
    139       bfgsInitializer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    140       bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
    141       bfgsInitializer.Successor = makeStep;
    142 
    143       makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
    144       makeStep.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    145       makeStep.Successor = branch;
    146 
    147       branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
    148       branch.FalseBranch = modelCreator;
    149       branch.TrueBranch = finalModelCreator;
    150 
    151       modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    152       modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
    153       modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
    154       modelCreator.Successor = updateResults;
    155 
    156       updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
    157       updateResults.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
    158       updateResults.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
    159       updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
    160       updateResults.Successor = analyzer;
    161 
    162       analyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
    163       analyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    164       analyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
    165       analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
    166       analyzer.PointsTableParameter.ActualName = "Hyperparameter table";
    167       analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
    168       analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table";
    169       analyzer.Successor = makeStep;
    170 
    171       finalModelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    172       finalModelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
    173       finalModelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
    174       finalModelCreator.HyperparameterParameter.ActualName = bfgsInitializer.PointParameter.ActualName;
    175       finalModelCreator.Successor = finalAnalyzer;
    176 
    177       finalAnalyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
    178       finalAnalyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    179       finalAnalyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
    180       finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName;
    181       finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
    182       finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName;
    183       finalAnalyzer.Successor = solutionCreator;
    184 
    185       solutionCreator.ModelParameter.ActualName = finalModelCreator.ModelParameter.Name;
    186       solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
     88      ParameterizedModelCreators();
     89      ParameterizeSolutionCreator(GaussianProcessSolutionCreatorParameter.Value);
     90      RegisterEventHandlers();
    18791    }
    18892
     93
    18994    [StorableHook(HookType.AfterDeserialization)]
    190     private void AfterDeserialization() { }
     95    private void AfterDeserialization() {
     96      RegisterEventHandlers();
     97    }
    19198
    19299    public override IDeepCloneable Clone(Cloner cloner) {
    193100      return new GaussianProcessRegression(this, cloner);
    194101    }
     102
     103    #region events
     104    private void RegisterEventHandlers() {
     105      GaussianProcessModelCreatorParameter.ValueChanged += ModelCreatorParameter_ValueChanged;
     106    }
     107
     108    private void ModelCreatorParameter_ValueChanged(object sender, EventArgs e) {
     109      ParameterizedModelCreator(GaussianProcessModelCreatorParameter.Value);
     110    }
     111    #endregion
     112
     113    private void ParameterizedModelCreators() {
     114      foreach (var creator in GaussianProcessModelCreatorParameter.ValidValues) {
     115        ParameterizedModelCreator(creator);
     116      }
     117    }
     118
     119    private void ParameterizedModelCreator(IGaussianProcessRegressionModelCreator modelCreator) {
     120      modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
     121      modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
     122      modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
     123
     124      // parameter names fixed by the algorithm
     125      modelCreator.ModelParameter.ActualName = ModelParameterName;
     126      modelCreator.HyperparameterParameter.ActualName = HyperparameterParameterName;
     127      modelCreator.HyperparameterGradientsParameter.ActualName = HyperparameterGradientsParameterName;
     128      modelCreator.NegativeLogLikelihoodParameter.ActualName = NegativeLogLikelihoodParameterName;
     129    }
     130
     131    private void ParameterizeSolutionCreator(GaussianProcessRegressionSolutionCreator solutionCreator) {
     132      solutionCreator.ModelParameter.ActualName = ModelParameterName;
     133      solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
     134    }
    195135  }
    196136}
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs

    r8484 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
     
    3434  [Item(Name = "GaussianProcessRegressionModelCreator",
    3535    Description = "Creates a Gaussian process model for regression given the data, the hyperparameters, a mean function, and a covariance function.")]
    36   public sealed class GaussianProcessRegressionModelCreator : GaussianProcessModelCreator {
     36  public sealed class GaussianProcessRegressionModelCreator : GaussianProcessModelCreator, IGaussianProcessRegressionModelCreator {
    3737    private const string ProblemDataParameterName = "ProblemData";
    3838
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolution.cs

    r8837 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
  • branches/ClassificationModelComparison/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs

    r8982 r10553  
    11#region License Information
    22/* HeuristicLab
    3  * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
     3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
    44 *
    55 * This file is part of HeuristicLab.
Note: See TracChangeset for help on using the changeset viewer.