Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
09/06/12 09:52:52 (12 years ago)
Author:
ascheibe
Message:

#1861 merged changes from trunk into branch

Location:
branches/HeuristicLab.Mono
Files:
33 edited
11 copied

Legend:

Unmodified
Added
Removed
  • branches/HeuristicLab.Mono

  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis

  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/CrossValidation.cs

    r7738 r8585  
    447447        problemDataClone.TestPartition.Start = SamplesStart.Value; problemDataClone.TestPartition.End = SamplesEnd.Value;
    448448        // clone models
    449         var ensembleSolution = new ClassificationEnsembleSolution(
    450           solutions.Value.Select(x => cloner.Clone(x.Model)),
    451           problemDataClone,
    452           solutions.Value.Select(x => cloner.Clone(x.ProblemData.TrainingPartition)),
    453           solutions.Value.Select(x => cloner.Clone(x.ProblemData.TestPartition)));
     449        var ensembleSolution = new ClassificationEnsembleSolution(problemDataClone);
     450        ensembleSolution.AddClassificationSolutions(solutions.Value);
    454451
    455452        aggregatedResults.Add(new Result(solutions.Key + " (ensemble)", ensembleSolution));
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceLinear.cs

    r8451 r8585  
    2121
    2222using System;
     23using System.Collections.Generic;
    2324using HeuristicLab.Common;
    2425using HeuristicLab.Core;
     
    2829  [StorableClass]
    2930  [Item(Name = "CovarianceLinear", Description = "Linear covariance function for Gaussian processes.")]
    30   public class CovarianceLinear : Item, ICovarianceFunction {
    31     private static readonly double[] emptyArray = new double[0];
    32 
    33     [Storable]
    34     private double[,] x;
    35     [Storable]
    36     private double[,] xt;
    37 
    38     private double[,] k;
    39     private bool symmetric;
    40 
    41     public int GetNumberOfParameters(int numberOfVariables) {
     31  public class CovarianceLinear : CovarianceFunction {
     32    public override int GetNumberOfParameters(int numberOfVariables) {
    4233      return 0;
    4334    }
     
    4637    protected CovarianceLinear(CovarianceLinear original, Cloner cloner)
    4738      : base(original, cloner) {
    48       if (original.x != null) {
    49         this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];
    50         Array.Copy(original.x, this.x, x.Length);
    51 
    52         this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
    53         Array.Copy(original.xt, this.xt, xt.Length);
    54 
    55         this.k = new double[original.k.GetLength(0), original.k.GetLength(1)];
    56         Array.Copy(original.k, this.k, k.Length);
    57       }
    58       this.symmetric = original.symmetric;
    5939    }
    6040    public CovarianceLinear()
     
    6646    }
    6747
    68     public void SetParameter(double[] hyp) {
     48    public override void SetParameter(double[] hyp) {
    6949      if (hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function.");
    70       k = null;
    7150    }
    7251
    73     public void SetData(double[,] x) {
    74       SetData(x, x);
    75       this.symmetric = true;
     52    public override double GetCovariance(double[,] x, int i, int j) {
     53      return Util.ScalarProd(x, i, j);
    7654    }
    7755
    78     public void SetData(double[,] x, double[,] xt) {
    79       this.x = x;
    80       this.xt = xt;
    81       this.symmetric = false;
    82 
    83       k = null;
     56    public override IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     57      yield break;
    8458    }
    8559
    86     public double GetCovariance(int i, int j) {
    87       if (k == null) CalculateInnerProduct();
    88       return k[i, j];
    89     }
    90 
    91     public double[] GetGradient(int i, int j) {
    92       return emptyArray;
    93     }
    94 
    95 
    96     private void CalculateInnerProduct() {
    97       if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();
    98       int rows = x.GetLength(0);
    99       int cols = xt.GetLength(0);
    100       k = new double[rows, cols];
    101       if (symmetric) {
    102         for (int i = 0; i < rows; i++) {
    103           for (int j = i; j < cols; j++) {
    104             k[i, j] = Util.ScalarProd(Util.GetRow(x, i),
    105                                       Util.GetRow(x, j));
    106             k[j, i] = k[i, j];
    107           }
    108         }
    109       } else {
    110         for (int i = 0; i < rows; i++) {
    111           for (int j = 0; j < cols; j++) {
    112             k[i, j] = Util.ScalarProd(Util.GetRow(x, i),
    113                                       Util.GetRow(xt, j));
    114           }
    115         }
    116       }
     60    public override double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     61      return Util.ScalarProd(x, i, xt, j);
    11762    }
    11863  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovariancePeriodic.cs

    r8451 r8585  
    2121
    2222using System;
     23using System.Collections.Generic;
     24using System.Linq;
    2325using HeuristicLab.Common;
    2426using HeuristicLab.Core;
     27using HeuristicLab.Data;
    2528using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2629
     
    2831  [StorableClass]
    2932  [Item(Name = "CovariancePeriodic", Description = "Periodic covariance function for Gaussian processes.")]
    30   public class CovariancePeriodic : Item, ICovarianceFunction {
     33  public class CovariancePeriodic : CovarianceFunction {
     34    public IValueParameter<DoubleValue> ScaleParameter {
     35      get { return scaleParameter; }
     36    }
     37    public IValueParameter<DoubleValue> InverseLengthParameter {
     38      get { return inverseLengthParameter; }
     39    }
     40    public IValueParameter<DoubleValue> PeriodParameter {
     41      get { return periodParameter; }
     42    }
     43
    3144    [Storable]
    32     private double[,] x;
     45    private HyperParameter<DoubleValue> scaleParameter;
    3346    [Storable]
    34     private double[,] xt;
     47    private HyperParameter<DoubleValue> inverseLengthParameter;
    3548    [Storable]
    36     private double sf2;
     49    private HyperParameter<DoubleValue> periodParameter;
     50
    3751    [Storable]
    38     private double l;
     52    private double scale;
    3953    [Storable]
    40     private double p;
     54    private double inverseLength;
     55    [Storable]
     56    private double period;
    4157
    42     private bool symmetric;
    4358
    44     private double[,] sd;
    45     public int GetNumberOfParameters(int numberOfVariables) {
    46       return 3;
    47     }
    4859    [StorableConstructor]
    4960    protected CovariancePeriodic(bool deserializing) : base(deserializing) { }
    5061    protected CovariancePeriodic(CovariancePeriodic original, Cloner cloner)
    5162      : base(original, cloner) {
    52       if (original.x != null) {
    53         x = new double[original.x.GetLength(0), original.x.GetLength(1)];
    54         Array.Copy(original.x, x, x.Length);
    55         xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
    56         Array.Copy(original.xt, xt, xt.Length);
    57       }
    58       sf2 = original.sf2;
    59       l = original.l;
    60       p = original.p;
    61       symmetric = original.symmetric;
     63      this.scaleParameter = cloner.Clone(original.scaleParameter);
     64      this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
     65      this.periodParameter = cloner.Clone(original.periodParameter);
     66      this.scale = original.scale;
     67      this.inverseLength = original.inverseLength;
     68      this.period = original.period;
     69
     70      RegisterEvents();
    6271    }
     72
    6373    public CovariancePeriodic()
    6474      : base() {
     75      scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the periodic covariance function.");
     76      inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter for the periodic covariance function.");
     77      periodParameter = new HyperParameter<DoubleValue>("Period", "The period parameter for the periodic covariance function.");
     78      Parameters.Add(scaleParameter);
     79      Parameters.Add(inverseLengthParameter);
     80      Parameters.Add(periodParameter);
     81
     82      RegisterEvents();
     83    }
     84
     85    [StorableHook(HookType.AfterDeserialization)]
     86    private void AfterDeserialization() {
     87      RegisterEvents();
    6588    }
    6689
     
    6992    }
    7093
    71     public void SetParameter(double[] hyp) {
    72       if (hyp.Length != 3) throw new ArgumentException();
    73       this.l = Math.Exp(hyp[0]);
    74       this.p = Math.Exp(hyp[1]);
    75       this.sf2 = Math.Exp(2 * hyp[2]);
    76 
    77       sf2 = Math.Min(10E6, sf2); // upper limit for the scale
    78 
    79       sd = null;
    80     }
    81     public void SetData(double[,] x) {
    82       SetData(x, x);
    83       this.symmetric = true;
     94    // caching
     95    private void RegisterEvents() {
     96      AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; });
     97      AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; });
     98      AttachValueChangeHandler<DoubleValue, double>(periodParameter, () => { period = periodParameter.Value.Value; });
    8499    }
    85100
    86     public void SetData(double[,] x, double[,] xt) {
    87       this.x = x;
    88       this.xt = xt;
    89       this.symmetric = false;
    90 
    91       sd = null;
     101    public override int GetNumberOfParameters(int numberOfVariables) {
     102      return
     103        (new[] { scaleParameter, inverseLengthParameter, periodParameter }).Count(p => !p.Fixed);
    92104    }
    93105
    94     public double GetCovariance(int i, int j) {
    95       if (sd == null) CalculateSquaredDistances();
    96       double k = sd[i, j];
    97       k = Math.PI * k / p;
    98       k = Math.Sin(k) / l;
     106    public override void SetParameter(double[] hyp) {
     107      int i = 0;
     108      if (!inverseLengthParameter.Fixed) {
     109        inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i])));
     110        i++;
     111      }
     112      if (!periodParameter.Fixed) {
     113        periodParameter.SetValue(new DoubleValue(Math.Exp(hyp[i])));
     114        i++;
     115      }
     116      if (!scaleParameter.Fixed) {
     117        scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
     118        i++;
     119      }
     120      if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovariancePeriod", "hyp");
     121    }
     122
     123    public override double GetCovariance(double[,] x, int i, int j) {
     124      double k = i == j ? 0.0 : GetDistance(x, x, i, j);
     125      k = Math.PI * k / period;
     126      k = Math.Sin(k) * inverseLength;
    99127      k = k * k;
    100128
    101       return sf2 * Math.Exp(-2.0 * k);
     129      return scale * Math.Exp(-2.0 * k);
    102130    }
    103131
    104 
    105     public double[] GetDiagonalCovariances() {
    106       if (x != xt) throw new InvalidOperationException();
    107       int rows = x.GetLength(0);
    108       var cov = new double[rows];
    109       for (int i = 0; i < rows; i++) {
    110         double k = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, i)));
    111         k = Math.PI * k / p;
    112         k = Math.Sin(k) / l;
    113         k = k * k;
    114         cov[i] = sf2 * Math.Exp(-2.0 * k);
    115       }
    116       return cov;
     132    public override IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     133      double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j) / period;
     134      double gradient = Math.Sin(v) * inverseLength;
     135      gradient *= gradient;
     136      yield return 4.0 * scale * Math.Exp(-2.0 * gradient) * gradient;
     137      double r = Math.Sin(v) * inverseLength;
     138      yield return 4.0 * scale * inverseLength * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v;
     139      yield return 2.0 * scale * Math.Exp(-2 * gradient);
    117140    }
    118141
    119     public double[] GetGradient(int i, int j) {
     142    public override double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     143      double k = GetDistance(x, xt, i, j);
     144      k = Math.PI * k / period;
     145      k = Math.Sin(k) * inverseLength;
     146      k = k * k;
    120147
    121       var res = new double[3];
    122       double k = sd[i, j];
    123       k = Math.PI * k / p;
    124       {
    125         double newK = Math.Sin(k) / l;
    126         newK = newK * newK;
    127         res[0] = 4 * sf2 * Math.Exp(-2 * newK) * newK;
    128       }
    129       {
    130         double r = Math.Sin(k) / l;
    131         res[1] = 4 * sf2 / l * Math.Exp(-2 * r * r) * r * Math.Cos(k) * k;
    132       }
    133       {
    134         double newK = Math.Sin(k) / l;
    135         newK = newK * newK;
    136         res[2] = 2 * sf2 * Math.Exp(-2 * newK);
    137       }
    138 
    139       return res;
     148      return scale * Math.Exp(-2.0 * k);
    140149    }
    141150
    142     private void CalculateSquaredDistances() {
    143       if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();
    144       int rows = x.GetLength(0);
    145       int cols = xt.GetLength(0);
    146       sd = new double[rows, cols];
    147 
    148       if (symmetric) {
    149         for (int i = 0; i < rows; i++) {
    150           for (int j = i; j < cols; j++) {
    151             sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(x, j)));
    152             sd[j, i] = sd[i, j];
    153           }
    154         }
    155       } else {
    156         for (int i = 0; i < rows; i++) {
    157           for (int j = 0; j < cols; j++) {
    158             sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, j)));
    159           }
    160         }
    161       }
     151    private double GetDistance(double[,] x, double[,] xt, int i, int j) {
     152      return Math.Sqrt(Util.SqrDist(x, i, xt, j));
    162153    }
    163154  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs

    r8451 r8585  
    2020#endregion
    2121
     22using System;
     23using System.Collections.Generic;
    2224using System.Linq;
    2325using HeuristicLab.Common;
     
    6163    public int GetNumberOfParameters(int numberOfVariables) {
    6264      this.numberOfVariables = numberOfVariables;
    63       return factors.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();
     65      return factors.Select(f => f.GetNumberOfParameters(numberOfVariables)).Sum();
    6466    }
    6567
    6668    public void SetParameter(double[] hyp) {
     69      if (factors.Count == 0) throw new ArgumentException("at least one factor is necessary for the product covariance function.");
    6770      int offset = 0;
    6871      foreach (var t in factors) {
     
    7275      }
    7376    }
    74     public void SetData(double[,] x) {
    75       SetData(x, x);
     77
     78    public double GetCovariance(double[,] x, int i, int j) {
     79      return factors.Select(f => f.GetCovariance(x, i, j)).Aggregate((a, b) => a * b);
    7680    }
    7781
    78     public void SetData(double[,] x, double[,] xt) {
    79       foreach (var t in factors) {
    80         t.SetData(x, xt);
     82    public IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     83      var covariances = factors.Select(f => f.GetCovariance(x, i, j)).ToArray();
     84      for (int ii = 0; ii < factors.Count; ii++) {
     85        foreach (var g in factors[ii].GetGradient(x, i, j)) {
     86          double res = g;
     87          for (int jj = 0; jj < covariances.Length; jj++)
     88            if (ii != jj) res *= covariances[jj];
     89          yield return res;
     90        }
    8191      }
    8292    }
    8393
    84     public double GetCovariance(int i, int j) {
    85       return factors.Select(t => t.GetCovariance(i, j)).Aggregate((a, b) => a * b);
    86     }
    87 
    88     public double[] GetGradient(int i, int j) {
    89       return Enumerable.Range(0, GetNumberOfParameters(numberOfVariables)).Select(k => GetGradient(i, j, k)).ToArray();
    90     }
    91     public double GetGradient(int i, int j, int k) {
    92       // map from parameter index to factor
    93       var vi = factors.Select((f, idx) => Enumerable.Repeat(idx, f.GetNumberOfParameters(numberOfVariables))).SelectMany(x => x).ToArray();
    94       double res = 1.0;
    95       int jj = Enumerable.Range(0, k).Count(e => vi[e] == vi[k]);
    96       for (int ii = 0; ii < factors.Count; ii++) {
    97         var f = factors[ii];
    98         if (ii == vi[k]) {
    99           res *= f.GetGradient(i, j)[jj];
    100         } else {
    101           res *= f.GetCovariance(i, j);
    102         }
    103       }
    104       return res;
     94    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     95      return factors.Select(f => f.GetCrossCovariance(x, xt, i, j)).Aggregate((a, b) => a * b);
    10596    }
    10697  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEard.cs

    r8451 r8585  
    2121
    2222using System;
     23using System.Collections.Generic;
    2324using System.Linq;
    2425using HeuristicLab.Common;
     
    3132  public class CovarianceSEard : Item, ICovarianceFunction {
    3233    [Storable]
    33     private double[,] x;
     34    private double sf2;
     35    public double Scale { get { return sf2; } }
     36
    3437    [Storable]
    35     private double[,] xt;
    36     [Storable]
    37     private double sf2;
    38     [Storable]
    39     private double[] l;
    40 
    41     private double[,] sd;
    42     private bool symmetric;
     38    private double[] inverseLength;
     39    public double[] InverseLength {
     40      get {
     41        if (inverseLength == null) return new double[0];
     42        var copy = new double[inverseLength.Length];
     43        Array.Copy(inverseLength, copy, copy.Length);
     44        return copy;
     45      }
     46    }
    4347
    4448    public int GetNumberOfParameters(int numberOfVariables) {
     
    4953    protected CovarianceSEard(CovarianceSEard original, Cloner cloner)
    5054      : base(original, cloner) {
    51       if (original.x != null) {
    52         this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];
    53         Array.Copy(original.x, this.x, x.Length);
    54 
    55         this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
    56         Array.Copy(original.xt, this.xt, xt.Length);
    57 
    58         this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)];
    59         Array.Copy(original.sd, this.sd, sd.Length);
    60 
    61         this.l = new double[original.l.Length];
    62         Array.Copy(original.l, this.l, l.Length);
    63       }
     55      this.inverseLength = original.InverseLength; // array is cloned in the getter
    6456      this.sf2 = original.sf2;
    65       this.symmetric = original.symmetric;
    6657    }
    6758    public CovarianceSEard()
     
    7465
    7566    public void SetParameter(double[] hyp) {
    76       this.l = hyp.Take(hyp.Length - 1).Select(Math.Exp).ToArray();
     67      this.inverseLength = hyp.Take(hyp.Length - 1).Select(p => 1.0 / Math.Exp(p)).ToArray();
    7768      this.sf2 = Math.Exp(2 * hyp[hyp.Length - 1]);
    78       sf2 = Math.Min(10E6, sf2); // upper limit for the scale
    79 
    80       sd = null;
    8169    }
    8270
    83     public void SetData(double[,] x) {
    84       SetData(x, x);
    85       this.symmetric = true;
     71    public double GetCovariance(double[,] x, int i, int j) {
     72      double d = i == j
     73                   ? 0.0
     74                   : Util.SqrDist(x, i, j, inverseLength);
     75      return sf2 * Math.Exp(-d / 2.0);
    8676    }
    8777
    88     public void SetData(double[,] x, double[,] xt) {
    89       this.x = x;
    90       this.xt = xt;
    91       this.symmetric = false;
     78    public IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     79      double d = i == j
     80                   ? 0.0
     81                   : Util.SqrDist(x, i, j, inverseLength);
    9282
    93       sd = null;
     83      for (int ii = 0; ii < inverseLength.Length; ii++) {
     84        double sqrDist = Util.SqrDist(x[i, ii] * inverseLength[ii], x[j, ii] * inverseLength[ii]);
     85        yield return sf2 * Math.Exp(-d / 2.0) * sqrDist;
     86      }
     87      yield return 2.0 * sf2 * Math.Exp(-d / 2.0);
    9488    }
    9589
    96     public double GetCovariance(int i, int j) {
    97       if (sd == null) CalculateSquaredDistances();
    98       return sf2 * Math.Exp(-sd[i, j] / 2.0);
    99     }
    100 
    101     public double[] GetGradient(int i, int j) {
    102       var res = new double[l.Length + 1];
    103       for (int k = 0; k < l.Length; k++) {
    104         double sqrDist = Util.SqrDist(x[i, k] / l[k], xt[j, k] / l[k]);
    105 
    106         res[k] = sf2 * Math.Exp(-sd[i, j] / 2.0) * sqrDist;
    107       }
    108       res[res.Length - 1] = 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0);
    109       return res;
    110     }
    111 
    112 
    113     private void CalculateSquaredDistances() {
    114       if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();
    115       int rows = x.GetLength(0);
    116       int cols = xt.GetLength(0);
    117       sd = new double[rows, cols];
    118       if (symmetric) {
    119         for (int i = 0; i < rows; i++) {
    120           for (int j = i; j < cols; j++) {
    121             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select((e, k) => e / l[k]),
    122                                     Util.GetRow(xt, j).Select((e, k) => e / l[k]));
    123             sd[j, i] = sd[i, j];
    124           }
    125         }
    126       } else {
    127         for (int i = 0; i < rows; i++) {
    128           for (int j = 0; j < cols; j++) {
    129             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select((e, k) => e / l[k]),
    130                                     Util.GetRow(xt, j).Select((e, k) => e / l[k]));
    131           }
    132         }
    133       }
     90    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     91      double d = Util.SqrDist(x, i, xt, j, inverseLength);
     92      return sf2 * Math.Exp(-d / 2.0);
    13493    }
    13594  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEiso.cs

    r8451 r8585  
    2121
    2222using System;
    23 using System.Linq;
     23using System.Collections.Generic;
    2424using HeuristicLab.Common;
    2525using HeuristicLab.Core;
     
    3232  public class CovarianceSEiso : Item, ICovarianceFunction {
    3333    [Storable]
    34     private double[,] x;
     34    private double sf2;
     35    public double Scale { get { return sf2; } }
    3536    [Storable]
    36     private double[,] xt;
    37     [Storable]
    38     private double sf2;
    39     [Storable]
    40     private double l;
    41     [Storable]
    42     private bool symmetric;
    43     private double[,] sd;
     37    private double inverseLength;
     38    public double InverseLength { get { return inverseLength; } }
    4439
    4540    [StorableConstructor]
     
    5045    protected CovarianceSEiso(CovarianceSEiso original, Cloner cloner)
    5146      : base(original, cloner) {
    52       if (original.x != null) {
    53         this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];
    54         Array.Copy(original.x, this.x, x.Length);
    55 
    56         this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
    57         Array.Copy(original.xt, this.xt, xt.Length);
    58 
    59         this.sd = new double[original.sd.GetLength(0), original.sd.GetLength(1)];
    60         Array.Copy(original.sd, this.sd, sd.Length);
    61         this.sf2 = original.sf2;
    62       }
    6347      this.sf2 = original.sf2;
    64       this.l = original.l;
    65       this.symmetric = original.symmetric;
     48      this.inverseLength = original.inverseLength;
    6649    }
    6750
     
    7962
    8063    public void SetParameter(double[] hyp) {
    81       this.l = Math.Exp(hyp[0]);
    82       this.sf2 = Math.Min(1E6, Math.Exp(2 * hyp[1])); // upper limit for scale
    83       sd = null;
    84     }
    85     public void SetData(double[,] x) {
    86       SetData(x, x);
    87       this.symmetric = true;
     64      if (hyp.Length != 2) throw new ArgumentException("CovarianceSEiso has two hyperparameters", "k");
     65      this.inverseLength = 1.0 / Math.Exp(hyp[0]);
     66      this.sf2 = Math.Exp(2 * hyp[1]);
    8867    }
    8968
    9069
    91     public void SetData(double[,] x, double[,] xt) {
    92       this.symmetric = false;
    93       this.x = x;
    94       this.xt = xt;
    95       sd = null;
     70    public double GetCovariance(double[,] x, int i, int j) {
     71      double d = i == j
     72                   ? 0.0
     73                   : Util.SqrDist(x, i, j, inverseLength);
     74      return sf2 * Math.Exp(-d / 2.0);
    9675    }
    9776
    98     public double GetCovariance(int i, int j) {
    99       if (sd == null) CalculateSquaredDistances();
    100       return sf2 * Math.Exp(-sd[i, j] / 2.0);
     77    public IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     78      double d = i == j
     79                   ? 0.0
     80                   : Util.SqrDist(x, i, j, inverseLength);
     81      double g = Math.Exp(-d / 2.0);
     82      yield return sf2 * g * d;
     83      yield return 2.0 * sf2 * g;
    10184    }
    10285
    103     public double[] GetGradient(int i, int j) {
    104       var res = new double[2];
    105       res[0] = sf2 * Math.Exp(-sd[i, j] / 2.0) * sd[i, j];
    106       res[1] = 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0);
    107       return res;
    108     }
    109 
    110     private void CalculateSquaredDistances() {
    111       if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();
    112       int rows = x.GetLength(0);
    113       int cols = xt.GetLength(0);
    114       sd = new double[rows, cols];
    115       if (symmetric) {
    116         for (int i = 0; i < rows; i++) {
    117           for (int j = i; j < rows; j++) {
    118             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));
    119             sd[j, i] = sd[i, j];
    120           }
    121         }
    122       } else {
    123         for (int i = 0; i < rows; i++) {
    124           for (int j = 0; j < cols; j++) {
    125             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));
    126           }
    127         }
    128       }
     86    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     87      double d = Util.SqrDist(x, i, xt, j, inverseLength);
     88      return sf2 * Math.Exp(-d / 2.0);
    12989    }
    13090  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs

    r8451 r8585  
    2020#endregion
    2121
     22using System;
     23using System.Collections.Generic;
    2224using System.Linq;
    2325using HeuristicLab.Common;
     
    6567
    6668    public void SetParameter(double[] hyp) {
     69      if (terms.Count == 0) throw new ArgumentException("At least one term is needed for sum covariance function.");
    6770      int offset = 0;
    6871      foreach (var t in terms) {
     
    7275      }
    7376    }
    74     public void SetData(double[,] x) {
    75       SetData(x, x);
     77
     78    public double GetCovariance(double[,] x, int i, int j) {
     79      return terms.Select(t => t.GetCovariance(x, i, j)).Sum();
    7680    }
    7781
    78     public void SetData(double[,] x, double[,] xt) {
    79       foreach (var t in terms) {
    80         t.SetData(x, xt);
    81       }
     82    public IEnumerable<double> GetGradient(double[,] x, int i, int j) {
     83      return terms.Select(t => t.GetGradient(x, i, j)).Aggregate(Enumerable.Concat);
    8284    }
    8385
    84     public double GetCovariance(int i, int j) {
    85       return terms.Select(t => t.GetCovariance(i, j)).Sum();
    86     }
    87 
    88     public double[] GetGradient(int i, int j) {
    89       return terms.Select(t => t.GetGradient(i, j)).SelectMany(seq => seq).ToArray();
     86    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j) {
     87      return terms.Select(t => t.GetCrossCovariance(x, xt, i, j)).Sum();
    9088    }
    9189  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessHyperparameterInitializer.cs

    r8451 r8585  
    9191      var rand = RandomParameter.ActualValue;
    9292      for (int i = 0; i < r.Length; i++)
    93         r[i] = rand.NextDouble() * 4 - 2;
     93        r[i] = rand.NextDouble() * 2 - 1;
    9494
    9595      HyperparameterParameter.ActualValue = r;
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8451 r8585  
    4242
    4343    [Storable]
     44    private double[] hyperparameterGradients;
     45    public double[] HyperparameterGradients {
     46      get {
     47        var copy = new double[hyperparameterGradients.Length];
     48        Array.Copy(hyperparameterGradients, copy, copy.Length);
     49        return copy;
     50      }
     51    }
     52
     53    [Storable]
    4454    private ICovarianceFunction covarianceFunction;
    4555    public ICovarianceFunction CovarianceFunction {
     
    6676    [Storable]
    6777    private double sqrSigmaNoise;
     78    public double SigmaNoise {
     79      get { return Math.Sqrt(sqrSigmaNoise); }
     80    }
    6881
    6982    [Storable]
     
    7386    private double[,] x;
    7487    [Storable]
    75     private Scaling scaling;
     88    private Scaling inputScaling;
    7689
    7790
     
    8295      this.meanFunction = cloner.Clone(original.meanFunction);
    8396      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
    84       this.scaling = cloner.Clone(original.scaling);
     97      this.inputScaling = cloner.Clone(original.inputScaling);
    8598      this.negativeLogLikelihood = original.negativeLogLikelihood;
    8699      this.targetVariable = original.targetVariable;
     
    103116      this.allowedInputVariables = allowedInputVariables.ToArray();
    104117
    105       sqrSigmaNoise = Math.Exp(2.0 * hyp.First());
    106       sqrSigmaNoise = Math.Max(10E-6, sqrSigmaNoise); // lower limit for the noise level
    107118
    108119      int nVariables = this.allowedInputVariables.Length;
    109       this.meanFunction.SetParameter(hyp.Skip(1)
     120      this.meanFunction.SetParameter(hyp
    110121        .Take(this.meanFunction.GetNumberOfParameters(nVariables))
    111122        .ToArray());
    112       this.covarianceFunction.SetParameter(hyp.Skip(1 + this.meanFunction.GetNumberOfParameters(nVariables))
     123      this.covarianceFunction.SetParameter(hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables))
    113124        .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
    114125        .ToArray());
     126      sqrSigmaNoise = Math.Exp(2.0 * hyp.Last());
    115127
    116128      CalculateModel(ds, rows);
     
    118130
    119131    private void CalculateModel(Dataset ds, IEnumerable<int> rows) {
    120       scaling = new Scaling(ds, allowedInputVariables, rows);
    121       x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling);
    122 
    123       var y = ds.GetDoubleValues(targetVariable, rows).ToArray();
     132      inputScaling = new Scaling(ds, allowedInputVariables, rows);
     133      x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling);
     134      var y = ds.GetDoubleValues(targetVariable, rows);
    124135
    125136      int n = x.GetLength(0);
     
    127138
    128139      meanFunction.SetData(x);
    129       covarianceFunction.SetData(x);
    130140
    131141      // calculate means and covariances
    132142      double[] m = meanFunction.GetMean(x);
    133143      for (int i = 0; i < n; i++) {
    134 
    135144        for (int j = i; j < n; j++) {
    136           l[j, i] = covarianceFunction.GetCovariance(i, j) / sqrSigmaNoise;
     145          l[j, i] = covarianceFunction.GetCovariance(x, i, j) / sqrSigmaNoise;
    137146          if (j == i) l[j, i] += 1.0;
    138147        }
     
    144153
    145154      var res = alglib.trfac.spdmatrixcholesky(ref l, n, false);
    146       if (!res) throw new InvalidOperationException("Matrix is not positive semidefinite");
     155      if (!res) throw new ArgumentException("Matrix is not positive semidefinite");
    147156
    148157      // calculate sum of diagonal elements for likelihood
     
    156165        alpha[i] = alpha[i] / sqrSigmaNoise;
    157166      negativeLogLikelihood = 0.5 * Util.ScalarProd(ym, alpha) + diagSum + (n / 2.0) * Math.Log(2.0 * Math.PI * sqrSigmaNoise);
    158     }
    159 
    160     public double[] GetHyperparameterGradients() {
     167
    161168      // derivatives
    162       int n = x.GetLength(0);
    163169      int nAllowedVariables = x.GetLength(1);
    164       double[,] q = new double[n, n];
    165       double[,] eye = new double[n, n];
    166       for (int i = 0; i < n; i++) eye[i, i] = 1.0;
    167 
    168       int info;
    169       alglib.densesolverreport denseSolveRep;
    170 
    171       alglib.spdmatrixcholeskysolvem(l, n, false, eye, n, out info, out denseSolveRep, out q);
    172       // double[,] a2 = outerProd(alpha, alpha);
     170
     171      alglib.matinvreport matInvRep;
     172      double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)];
     173      Array.Copy(l, lCopy, lCopy.Length);
     174
     175      alglib.spdmatrixcholeskyinverse(ref lCopy, n, false, out info, out matInvRep);
     176      if (info != 1) throw new ArgumentException("Can't invert matrix to calculate gradients.");
    173177      for (int i = 0; i < n; i++) {
    174         for (int j = 0; j < n; j++)
    175           q[i, j] = q[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; // a2[i, j];
    176       }
    177 
    178       double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => q[i, i]).Sum();
     178        for (int j = 0; j <= i; j++)
     179          lCopy[i, j] = lCopy[i, j] / sqrSigmaNoise - alpha[i] * alpha[j];
     180      }
     181
     182      double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => lCopy[i, i]).Sum();
    179183
    180184      double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
     
    187191      if (covGradients.Length > 0) {
    188192        for (int i = 0; i < n; i++) {
    189           for (int j = 0; j < n; j++) {
    190             var covDeriv = covarianceFunction.GetGradient(i, j);
     193          for (int j = 0; j < i; j++) {
     194            var g = covarianceFunction.GetGradient(x, i, j).ToArray();
    191195            for (int k = 0; k < covGradients.Length; k++) {
    192               covGradients[k] += q[i, j] * covDeriv[k];
     196              covGradients[k] += lCopy[i, j] * g[k];
    193197            }
    194198          }
    195         }
    196         covGradients = covGradients.Select(g => g / 2.0).ToArray();
    197       }
    198 
    199       return new double[] { noiseGradient }
    200         .Concat(meanGradients)
    201         .Concat(covGradients).ToArray();
     199
     200          var gDiag = covarianceFunction.GetGradient(x, i, i).ToArray();
     201          for (int k = 0; k < covGradients.Length; k++) {
     202            // diag
     203            covGradients[k] += 0.5 * lCopy[i, i] * gDiag[k];
     204          }
     205        }
     206      }
     207
     208      hyperparameterGradients =
     209        meanGradients
     210        .Concat(covGradients)
     211        .Concat(new double[] { noiseGradient }).ToArray();
     212
    202213    }
    203214
     
    212223    }
    213224    public GaussianProcessRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    214       return new GaussianProcessRegressionSolution(this, problemData);
     225      return new GaussianProcessRegressionSolution(this, new RegressionProblemData(problemData));
    215226    }
    216227    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    220231
    221232    private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) {
    222       var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, scaling);
     233      var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
    223234      int newN = newX.GetLength(0);
    224235      int n = x.GetLength(0);
    225       // var predMean = new double[newN];
    226       // predVar = new double[newN];
    227 
    228 
    229 
    230       // var kss = new double[newN];
    231236      var Ks = new double[newN, n];
    232       double[,] sWKs = new double[n, newN];
    233       // double[,] v;
    234 
    235 
    236       // for stddev
    237       //covarianceFunction.SetParameter(covHyp, newX);
    238       //kss = covarianceFunction.GetDiagonalCovariances();
    239 
    240       covarianceFunction.SetData(x, newX);
    241237      meanFunction.SetData(newX);
    242238      var ms = meanFunction.GetMean(newX);
    243239      for (int i = 0; i < newN; i++) {
    244 
    245240        for (int j = 0; j < n; j++) {
    246           Ks[i, j] = covarianceFunction.GetCovariance(j, i);
    247           sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise);
    248         }
    249       }
     241          Ks[i, j] = covarianceFunction.GetCrossCovariance(x, newX, j, i);
     242        }
     243      }
     244
     245      return Enumerable.Range(0, newN)
     246        .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha));
     247    }
     248
     249    public IEnumerable<double> GetEstimatedVariance(Dataset dataset, IEnumerable<int> rows) {
     250      var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
     251      int newN = newX.GetLength(0);
     252      int n = x.GetLength(0);
     253
     254      var kss = new double[newN];
     255      double[,] sWKs = new double[n, newN];
    250256
    251257      // for stddev
    252       // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v);
    253 
     258      for (int i = 0; i < newN; i++)
     259        kss[i] = covarianceFunction.GetCovariance(newX, i, i);
    254260
    255261      for (int i = 0; i < newN; i++) {
    256         // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);
    257         yield return ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha);
    258         // var sumV2 = prod(GetCol(v, i), GetCol(v, i));
    259         // predVar[i] = kss[i] - sumV2;
    260       }
    261 
     262        for (int j = 0; j < n; j++) {
     263          sWKs[j, i] = covarianceFunction.GetCrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
     264        }
     265      }
     266
     267      // for stddev
     268      alglib.ablas.rmatrixlefttrsm(n, newN, l, 0, 0, false, false, 0, ref sWKs, 0, 0);
     269
     270      for (int i = 0; i < newN; i++) {
     271        var sumV = Util.ScalarProd(Util.GetCol(sWKs, i), Util.GetCol(sWKs, i));
     272        kss[i] -= sumV;
     273        if (kss[i] < 0) kss[i] = 0;
     274      }
     275      return kss;
    262276    }
    263277  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs

    r8451 r8585  
    2222
    2323using System;
    24 using System.Collections.Generic;
    25 using System.Linq;
    2624using HeuristicLab.Algorithms.GradientDescent;
    2725using HeuristicLab.Common;
     
    3230using HeuristicLab.Parameters;
    3331using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    34 using HeuristicLab.PluginInfrastructure;
    3532using HeuristicLab.Problems.DataAnalysis;
    3633
     
    5956
    6057    #region parameter properties
    61     public IConstrainedValueParameter<IMeanFunction> MeanFunctionParameter {
    62       get { return (IConstrainedValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
     58    public IValueParameter<IMeanFunction> MeanFunctionParameter {
     59      get { return (IValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
    6360    }
    64     public IConstrainedValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
    65       get { return (IConstrainedValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
     61    public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
     62      get { return (IValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
    6663    }
    6764    public IValueParameter<IntValue> MinimizationIterationsParameter {
     
    104101      Problem = new RegressionProblem();
    105102
    106       List<IMeanFunction> meanFunctions = ApplicationManager.Manager.GetInstances<IMeanFunction>().ToList();
    107       List<ICovarianceFunction> covFunctions = ApplicationManager.Manager.GetInstances<ICovarianceFunction>().ToList();
    108 
    109       Parameters.Add(new ConstrainedValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.",
    110         new ItemSet<IMeanFunction>(meanFunctions), meanFunctions.OfType<MeanConst>().First()));
    111       Parameters.Add(new ConstrainedValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.",
    112         new ItemSet<ICovarianceFunction>(covFunctions), covFunctions.OfType<CovarianceSEiso>().First()));
     103      Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst()));
     104      Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSEiso()));
    113105      Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20)));
    114106      Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs

    r8451 r8585  
    2020#endregion
    2121
     22using System;
    2223using System.Linq;
    2324using HeuristicLab.Common;
     
    6061
    6162    public override IOperation Apply() {
    62       var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction);
    63       ModelParameter.ActualValue = model;
    64       NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood);
    65       HyperparameterGradientsParameter.ActualValue = new RealVector(model.GetHyperparameterGradients());
     63      try {
     64        var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction);
     65        ModelParameter.ActualValue = model;
     66        NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood);
     67        HyperparameterGradientsParameter.ActualValue = new RealVector(model.HyperparameterGradients);
     68        return base.Apply();
     69      }
     70      catch (ArgumentException) { }
     71      catch (alglib.alglibexception) { }
     72      NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(1E300);
     73      HyperparameterGradientsParameter.ActualValue = new RealVector(Hyperparameter.Count());
    6674      return base.Apply();
    6775    }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolution.cs

    r8451 r8585  
    2020#endregion
    2121
     22using System.Collections.Generic;
     23using System.Linq;
    2224using HeuristicLab.Common;
    2325using HeuristicLab.Core;
     
    5153      return new GaussianProcessRegressionSolution(this, cloner);
    5254    }
     55
     56    public IEnumerable<double> EstimatedVariance {
     57      get { return GetEstimatedVariance(Enumerable.Range(0, ProblemData.Dataset.Rows)); }
     58    }
     59    public IEnumerable<double> EstimatedTrainingVariance {
     60      get { return GetEstimatedVariance(ProblemData.TrainingIndices); }
     61    }
     62    public IEnumerable<double> EstimatedTestVariance {
     63      get { return GetEstimatedVariance(ProblemData.TestIndices); }
     64    }
     65
     66    public IEnumerable<double> GetEstimatedVariance(IEnumerable<int> rows) {
     67      return Model.GetEstimatedVariance(ProblemData.Dataset, rows);
     68    }
    5369  }
    5470}
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs

    r8451 r8585  
    7575
    7676    public override IOperation Apply() {
    77       var m = ModelParameter.ActualValue;
    78       var data = ProblemDataParameter.ActualValue;
    79       var s = new GaussianProcessRegressionSolution(m, data);
     77      if (ModelParameter.ActualValue != null) {
     78        var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
     79        var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone();
     80        var s = new GaussianProcessRegressionSolution(m, data);
    8081
    8182
    82       SolutionParameter.ActualValue = s;
    83       var results = ResultsParameter.ActualValue;
    84       if (!results.ContainsKey(SolutionParameterName)) {
    85         results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s));
    86         results.Add(new Result(TrainingRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the training partition.", new DoubleValue(s.TrainingRSquared)));
    87         results.Add(new Result(TestRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the test partition.", new DoubleValue(s.TestRSquared)));
    88       } else {
    89         results[SolutionParameterName].Value = s;
    90         results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared);
    91         results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared);
     83        SolutionParameter.ActualValue = s;
     84        var results = ResultsParameter.ActualValue;
     85        if (!results.ContainsKey(SolutionParameterName)) {
     86          results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s));
     87          results.Add(new Result(TrainingRSquaredResultName,
     88                                 "The Pearson's R² of the Gaussian process solution on the training partition.",
     89                                 new DoubleValue(s.TrainingRSquared)));
     90          results.Add(new Result(TestRSquaredResultName,
     91                                 "The Pearson's R² of the Gaussian process solution on the test partition.",
     92                                 new DoubleValue(s.TestRSquared)));
     93        } else {
     94          results[SolutionParameterName].Value = s;
     95          results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared);
     96          results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared);
     97        }
    9298      }
    9399      return base.Apply();
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs

    r8451 r8585  
    2020#endregion
    2121
     22using System.Collections.Generic;
    2223using HeuristicLab.Core;
    2324
     
    2627    int GetNumberOfParameters(int numberOfVariables);
    2728    void SetParameter(double[] hyp);
    28     void SetData(double[,] x);
    29     void SetData(double[,] x, double[,] xt);
    30 
    31     double GetCovariance(int i, int j);
    32     double[] GetGradient(int i, int j);
     29    double GetCovariance(double[,] x, int i, int j);
     30    IEnumerable<double> GetGradient(double[,] x, int i, int j);
     31    double GetCrossCovariance(double[,] x, double[,] xt, int i, int j);
    3332  }
    3433}
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanConst.cs

    r8451 r8585  
    3232    [Storable]
    3333    private double c;
     34    public double Value { get { return c; } }
     35
    3436    public int GetNumberOfParameters(int numberOfVariables) {
    3537      return 1;
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanLinear.cs

    r8451 r8585  
    3131    [Storable]
    3232    private double[] alpha;
     33    public double[] Weights {
     34      get {
     35        if (alpha == null) return new double[0];
     36        var copy = new double[alpha.Length];
     37        Array.Copy(alpha, copy, copy.Length);
     38        return copy;
     39      }
     40    }
    3341    public int GetNumberOfParameters(int numberOfVariables) {
    3442      return numberOfVariables;
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanProd.cs

    r8451 r8585  
    8282    public double[] GetGradients(int k, double[,] x) {
    8383      double[] res = Enumerable.Repeat(1.0, x.GetLength(0)).ToArray();
    84       foreach (var f in factors) {
    85         var numParam = f.GetNumberOfParameters(numberOfVariables);
    86         if (k >= 0 && k < numParam) {
     84      // find index of factor for the given k
     85      int j = 0;
     86      while (k >= factors[j].GetNumberOfParameters(numberOfVariables)) {
     87        k -= factors[j].GetNumberOfParameters(numberOfVariables);
     88        j++;
     89      }
     90      for (int i = 0; i < factors.Count; i++) {
     91        var f = factors[i];
     92        if (i == j) {
    8793          // multiply gradient
    8894          var g = f.GetGradients(k, x);
    89           for (int i = 0; i < res.Length; i++) res[i] *= g[i];
    90           k -= numParam;
     95          for (int ii = 0; ii < res.Length; ii++) res[ii] *= g[ii];
    9196        } else {
    9297          // multiply mean
    9398          var m = f.GetMean(x);
    94           for (int i = 0; i < res.Length; i++) res[i] *= m[i];
    95           k -= numParam;
     99          for (int ii = 0; ii < res.Length; ii++) res[ii] *= m[ii];
    96100        }
    97101      }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs

    r8451 r8585  
    2020#endregion
    2121
    22 using System;
    2322using System.Collections.Generic;
    2423using System.Linq;
     
    3029    }
    3130
     31    public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) {
     32      return x.Zip(y, (a, b) => (a - b) * (a - b)).Sum();
     33    }
     34
    3235    public static double SqrDist(double x, double y) {
    3336      double d = x - y;
    34       return Math.Max(d * d, 0.0);
     37      return d * d;
    3538    }
    3639
    37     public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) {
    38       return x.Zip(y, SqrDist).Sum();
     40    public static double SqrDist(double[,] x, int i, int j, double scale = 1.0) {
     41      return SqrDist(x, i, x, j, scale);
     42    }
     43
     44    public static double SqrDist(double[,] x, int i, double[,] xt, int j, double scale = 1.0) {
     45      double ss = 0.0;
     46      for (int k = 0; k < x.GetLength(1); k++) {
     47        double d = x[i, k] - xt[j, k];
     48        ss += d * d;
     49      }
     50      return scale * scale * ss;
     51    }
     52
     53    public static double SqrDist(double[,] x, int i, int j, double[] scale) {
     54      return SqrDist(x, i, x, j, scale);
     55    }
     56
     57    public static double SqrDist(double[,] x, int i, double[,] xt, int j, double[] scale) {
     58      double ss = 0.0;
     59      for (int k = 0; k < x.GetLength(1); k++) {
     60        double d = x[i, k] - xt[j, k];
     61        ss += d * d * scale[k] * scale[k];
     62      }
     63      return ss;
     64    }
     65    public static double ScalarProd(double[,] x, int i, int j, double scale = 1.0) {
     66      return ScalarProd(x, i, x, j, scale);
     67    }
     68
     69    public static double ScalarProd(double[,] x, int i, double[,] xt, int j, double scale = 1.0) {
     70      double sum = 0.0;
     71      for (int k = 0; k < x.GetLength(1); k++) {
     72        sum += x[i, k] * xt[j, k];
     73      }
     74      return scale * scale * sum;
     75    }
     76    public static double ScalarProd(double[,] x, int i, int j, double[] scale) {
     77      return ScalarProd(x, i, x, j, scale);
     78    }
     79
     80    public static double ScalarProd(double[,] x, int i, double[,] xt, int j, double[] scale) {
     81      double sum = 0.0;
     82      for (int k = 0; k < x.GetLength(1); k++) {
     83        sum += x[i, k] * scale[k] * xt[j, k] * scale[k];
     84      }
     85      return sum;
    3986    }
    4087
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r8451 r8585  
    120120    </Compile>
    121121    <Compile Include="FixedDataAnalysisAlgorithm.cs" />
     122    <Compile Include="GaussianProcess\HyperParameter.cs" />
     123    <Compile Include="GaussianProcess\CovarianceFunction.cs" />
     124    <Compile Include="GaussianProcess\CovarianceRQArd.cs" />
     125    <Compile Include="GaussianProcess\CovarianceMaternIso.cs" />
     126    <Compile Include="GaussianProcess\CovarianceLinearArd.cs" />
     127    <Compile Include="GaussianProcess\CovarianceRQiso.cs" />
     128    <Compile Include="GaussianProcess\CovarianceNoise.cs" />
     129    <Compile Include="GaussianProcess\CovarianceConst.cs" />
    122130    <Compile Include="GaussianProcess\MeanProd.cs" />
    123131    <Compile Include="GaussianProcess\MeanSum.cs" />
     
    145153    <Compile Include="Interfaces\IGaussianProcessModel.cs" />
    146154    <Compile Include="Interfaces\IGaussianProcessSolution.cs" />
     155    <Compile Include="Interfaces\INcaClassificationSolution.cs" />
     156    <Compile Include="Interfaces\INcaModel.cs" />
    147157    <Compile Include="Interfaces\INearestNeighbourClassificationSolution.cs" />
    148158    <Compile Include="Interfaces\INearestNeighbourRegressionSolution.cs" />
     
    175185    <Compile Include="Linear\MultinomialLogitClassificationSolution.cs" />
    176186    <Compile Include="Linear\MultinomialLogitModel.cs" />
     187    <Compile Include="Nca\Initialization\INcaInitializer.cs" />
     188    <Compile Include="Nca\Initialization\LdaInitializer.cs" />
     189    <Compile Include="Nca\Initialization\PcaInitializer.cs" />
     190    <Compile Include="Nca\Initialization\RandomInitializer.cs" />
     191    <Compile Include="Nca\Matrix.cs" />
     192    <Compile Include="Nca\NcaAlgorithm.cs" />
     193    <Compile Include="Nca\NcaClassificationSolution.cs" />
     194    <Compile Include="Nca\NcaModel.cs" />
    177195    <Compile Include="NearestNeighbour\NearestNeighbourClassification.cs" />
    178196    <Compile Include="NearestNeighbour\NearestNeighbourClassificationSolution.cs" />
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs

    r8451 r8585  
    2020#endregion
    2121
     22using System.Collections.Generic;
    2223using HeuristicLab.Problems.DataAnalysis;
    2324
     
    2829  public interface IGaussianProcessModel : IRegressionModel {
    2930    double NegativeLogLikelihood { get; }
     31    double SigmaNoise { get; }
    3032    IMeanFunction MeanFunction { get; }
    3133    ICovarianceFunction CovarianceFunction { get; }
    32     double[] GetHyperparameterGradients();
     34    double[] HyperparameterGradients { get; }
     35
     36    IEnumerable<double> GetEstimatedVariance(Dataset ds, IEnumerable<int> rows);
    3337  }
    3438}
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearDiscriminantAnalysis.cs

    r8139 r8585  
    111111      IClassificationProblemData problemData,
    112112      IEnumerable<int> rows) {
    113       return new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter);
     113      var model = new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter);
     114      model.SetAccuracyMaximizingThresholds(problemData);
     115      return model;
    114116    }
    115117  }
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitModel.cs

    r7259 r8585  
    109109
    110110    public MultinomialLogitClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    111       return new MultinomialLogitClassificationSolution(problemData, this);
     111      return new MultinomialLogitClassificationSolution(new ClassificationProblemData(problemData), this);
    112112    }
    113113    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/Scaling.cs

    r8451 r8585  
    5959      return ds.GetDoubleValues(variable, rows).Select(x => (x - min) / (max - min));
    6060    }
     61
     62    public void GetScalingParameters(string variable, out double min, out double max) {
     63      min = scalingParameters[variable].Item1;
     64      max = scalingParameters[variable].Item2;
     65    }
    6166  }
    6267}
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassification.cs

    r8139 r8585  
    2121
    2222using System;
    23 using System.Collections.Generic;
    2423using System.Linq;
    2524using HeuristicLab.Common;
    2625using HeuristicLab.Core;
    2726using HeuristicLab.Data;
    28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2927using HeuristicLab.Optimization;
     28using HeuristicLab.Parameters;
    3029using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3130using HeuristicLab.Problems.DataAnalysis;
    32 using HeuristicLab.Problems.DataAnalysis.Symbolic;
    33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
    34 using HeuristicLab.Parameters;
    3531
    3632namespace HeuristicLab.Algorithms.DataAnalysis {
     
    8480
    8581    public static IClassificationSolution CreateNearestNeighbourClassificationSolution(IClassificationProblemData problemData, int k) {
    86       Dataset dataset = problemData.Dataset;
    87       string targetVariable = problemData.TargetVariable;
    88       IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
    89       IEnumerable<int> rows = problemData.TrainingIndices;
    90       double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    91       if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    92         throw new NotSupportedException("Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
     82      var problemDataClone = (IClassificationProblemData)problemData.Clone();
     83      return new NearestNeighbourClassificationSolution(problemDataClone, Train(problemDataClone, k));
     84    }
    9385
    94       alglib.nearestneighbor.kdtree kdtree = new alglib.nearestneighbor.kdtree();
    95 
    96       int nRows = inputMatrix.GetLength(0);
    97       int nFeatures = inputMatrix.GetLength(1) - 1;
    98       double[] classValues = dataset.GetDoubleValues(targetVariable).Distinct().OrderBy(x => x).ToArray();
    99       int nClasses = classValues.Count();
    100       // map original class values to values [0..nClasses-1]
    101       Dictionary<double, double> classIndices = new Dictionary<double, double>();
    102       for (int i = 0; i < nClasses; i++) {
    103         classIndices[classValues[i]] = i;
    104       }
    105       for (int row = 0; row < nRows; row++) {
    106         inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
    107       }
    108       alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdtree);
    109       var problemDataClone = (IClassificationProblemData) problemData.Clone();
    110       return new NearestNeighbourClassificationSolution(problemDataClone, new NearestNeighbourModel(kdtree, k, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()));
     86    public static INearestNeighbourModel Train(IClassificationProblemData problemData, int k) {
     87      return new NearestNeighbourModel(problemData.Dataset,
     88        problemData.TrainingIndices,
     89        k,
     90        problemData.TargetVariable,
     91        problemData.AllowedInputVariables,
     92        problemData.ClassValues.ToArray());
    11193    }
    11294    #endregion
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs

    r7294 r8585  
    3333  /// </summary>
    3434  [StorableClass]
    35   [Item("NearestNeighbourModel", "Represents a neural network for regression and classification.")]
     35  [Item("NearestNeighbourModel", "Represents a nearest neighbour model for regression and classification.")]
    3636  public sealed class NearestNeighbourModel : NamedItem, INearestNeighbourModel {
    3737
     
    5656    [Storable]
    5757    private int k;
     58
    5859    [StorableConstructor]
    5960    private NearestNeighbourModel(bool deserializing)
     
    9596        this.classValues = (double[])original.classValues.Clone();
    9697    }
    97     public NearestNeighbourModel(alglib.nearestneighbor.kdtree kdTree, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
    98       : base() {
    99       this.name = ItemName;
    100       this.description = ItemDescription;
    101       this.kdTree = kdTree;
     98    public NearestNeighbourModel(Dataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) {
     99      Name = ItemName;
     100      Description = ItemDescription;
    102101      this.k = k;
    103102      this.targetVariable = targetVariable;
    104103      this.allowedInputVariables = allowedInputVariables.ToArray();
    105       if (classValues != null)
     104
     105      var inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,
     106                                   allowedInputVariables.Concat(new string[] { targetVariable }),
     107                                   rows);
     108
     109      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
     110        throw new NotSupportedException(
     111          "Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
     112
     113      this.kdTree = new alglib.nearestneighbor.kdtree();
     114
     115      var nRows = inputMatrix.GetLength(0);
     116      var nFeatures = inputMatrix.GetLength(1) - 1;
     117
     118      if (classValues != null) {
    106119        this.classValues = (double[])classValues.Clone();
     120        int nClasses = classValues.Length;
     121        // map original class values to values [0..nClasses-1]
     122        var classIndices = new Dictionary<double, double>();
     123        for (int i = 0; i < nClasses; i++)
     124          classIndices[classValues[i]] = i;
     125
     126        for (int row = 0; row < nRows; row++) {
     127          inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
     128        }
     129      }
     130      alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
    107131    }
    108132
     
    140164
    141165    public IEnumerable<double> GetEstimatedClassValues(Dataset dataset, IEnumerable<int> rows) {
     166      if (classValues == null) throw new InvalidOperationException("No class values are defined.");
    142167      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
    143168
     
    177202
    178203    public INearestNeighbourRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    179       return new NearestNeighbourRegressionSolution(problemData, this);
     204      return new NearestNeighbourRegressionSolution(new RegressionProblemData(problemData), this);
    180205    }
    181206    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    183208    }
    184209    public INearestNeighbourClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    185       return new NearestNeighbourClassificationSolution(problemData, this);
     210      return new NearestNeighbourClassificationSolution(new ClassificationProblemData(problemData), this);
    186211    }
    187212    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourRegression.cs

    r8139 r8585  
    2121
    2222using System;
    23 using System.Collections.Generic;
    24 using System.Linq;
    2523using HeuristicLab.Common;
    2624using HeuristicLab.Core;
    2725using HeuristicLab.Data;
    28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2926using HeuristicLab.Optimization;
     27using HeuristicLab.Parameters;
    3028using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3129using HeuristicLab.Problems.DataAnalysis;
    32 using HeuristicLab.Problems.DataAnalysis.Symbolic;
    33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
    34 using HeuristicLab.Parameters;
    3530
    3631namespace HeuristicLab.Algorithms.DataAnalysis {
     
    8479
    8580    public static IRegressionSolution CreateNearestNeighbourRegressionSolution(IRegressionProblemData problemData, int k) {
    86       Dataset dataset = problemData.Dataset;
    87       string targetVariable = problemData.TargetVariable;
    88       IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
    89       IEnumerable<int> rows = problemData.TrainingIndices;
    90       double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    91       if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    92         throw new NotSupportedException("Nearest neighbour regression does not support NaN or infinity values in the input dataset.");
     81      var clonedProblemData = (IRegressionProblemData)problemData.Clone();
     82      return new NearestNeighbourRegressionSolution(clonedProblemData, Train(problemData, k));
     83    }
    9384
    94       alglib.nearestneighbor.kdtree kdtree = new alglib.nearestneighbor.kdtree();
    95 
    96       int nRows = inputMatrix.GetLength(0);
    97 
    98       alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdtree);
    99 
    100       return new NearestNeighbourRegressionSolution((IRegressionProblemData)problemData.Clone(), new NearestNeighbourModel(kdtree, k, targetVariable, allowedInputVariables));
     85    public static INearestNeighbourModel Train(IRegressionProblemData problemData, int k) {
     86      return new NearestNeighbourModel(problemData.Dataset,
     87        problemData.TrainingIndices,
     88        k,
     89        problemData.TargetVariable,
     90        problemData.AllowedInputVariables);
    10191    }
    10292    #endregion
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs

    r7694 r8585  
    130130
    131131    public INeuralNetworkEnsembleRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    132       return new NeuralNetworkEnsembleRegressionSolution(problemData, this);
     132      return new NeuralNetworkEnsembleRegressionSolution(new RegressionEnsembleProblemData(problemData), this);
    133133    }
    134134    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    136136    }
    137137    public INeuralNetworkEnsembleClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    138       return new NeuralNetworkEnsembleClassificationSolution(problemData, this);
     138      return new NeuralNetworkEnsembleClassificationSolution(new ClassificationEnsembleProblemData(problemData), this);
    139139    }
    140140    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs

    r7259 r8585  
    138138
    139139    public INeuralNetworkRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    140       return new NeuralNetworkRegressionSolution(problemData, this);
     140      return new NeuralNetworkRegressionSolution(new RegressionProblemData(problemData), this);
    141141    }
    142142    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    144144    }
    145145    public INeuralNetworkClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    146       return new NeuralNetworkClassificationSolution(problemData, this);
     146      return new NeuralNetworkClassificationSolution(new ClassificationProblemData(problemData), this);
    147147    }
    148148    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/Plugin.cs.frame

    r8451 r8585  
    2828  [Plugin("HeuristicLab.Algorithms.DataAnalysis", "Provides wrappers for data analysis algorithms implemented in external libraries (linear regression, linear discriminant analysis, k-means clustering, support vector classification and regression)", "3.4.3.$WCREV$")]
    2929  [PluginFile("HeuristicLab.Algorithms.DataAnalysis-3.4.dll", PluginFileType.Assembly)]
    30   [PluginDependency("HeuristicLab.ALGLIB", "3.5.0")]
     30  [PluginDependency("HeuristicLab.ALGLIB", "3.6.0")]
    3131  [PluginDependency("HeuristicLab.Algorithms.GradientDescent", "3.3")]
    3232  [PluginDependency("HeuristicLab.Analysis", "3.3")]
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs

    r7259 r8585  
    132132
    133133    public IRandomForestRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    134       return new RandomForestRegressionSolution(problemData, this);
     134      return new RandomForestRegressionSolution(new RegressionProblemData(problemData), this);
    135135    }
    136136    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    138138    }
    139139    public IRandomForestClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    140       return new RandomForestClassificationSolution(problemData, this);
     140      return new RandomForestClassificationSolution(new ClassificationProblemData(problemData), this);
    141141    }
    142142    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.Mono/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineModel.cs

    r8323 r8585  
    124124    }
    125125    public SupportVectorRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    126       return new SupportVectorRegressionSolution(this, problemData);
     126      return new SupportVectorRegressionSolution(this, new RegressionProblemData(problemData));
    127127    }
    128128    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    153153
    154154    public SupportVectorClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    155       return new SupportVectorClassificationSolution(this, problemData);
     155      return new SupportVectorClassificationSolution(this, new ClassificationProblemData(problemData));
    156156    }
    157157    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
Note: See TracChangeset for help on using the changeset viewer.