Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
10/05/12 11:58:17 (12 years ago)
Author:
mkommend
Message:

#1081: Merged trunk changes and fixed compilation errors due to the merge.

Location:
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4
Files:
43 edited
14 copied

Legend:

Unmodified
Added
Removed
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/CrossValidation.cs

    r7738 r8742  
    4444
    4545      executionState = ExecutionState.Stopped;
    46       runs = new RunCollection();
     46      runs = new RunCollection { AlgorithmName = name };
    4747      runsCounter = 0;
    4848
     
    246246    #endregion
    247247
     248    protected override void OnNameChanged() {
     249      base.OnNameChanged();
     250      Runs.AlgorithmName = Name;
     251    }
     252
    248253    public void Prepare() {
    249254      if (ExecutionState == ExecutionState.Started)
     
    447452        problemDataClone.TestPartition.Start = SamplesStart.Value; problemDataClone.TestPartition.End = SamplesEnd.Value;
    448453        // clone models
    449         var ensembleSolution = new ClassificationEnsembleSolution(
    450           solutions.Value.Select(x => cloner.Clone(x.Model)),
    451           problemDataClone,
    452           solutions.Value.Select(x => cloner.Clone(x.ProblemData.TrainingPartition)),
    453           solutions.Value.Select(x => cloner.Clone(x.ProblemData.TestPartition)));
     454        var ensembleSolution = new ClassificationEnsembleSolution(problemDataClone);
     455        ensembleSolution.AddClassificationSolutions(solutions.Value);
    454456
    455457        aggregatedResults.Add(new Result(solutions.Key + " (ensemble)", ensembleSolution));
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceConst.cs

    r8477 r8742  
    2121
    2222using System;
     23using System.Collections.Generic;
    2324using HeuristicLab.Common;
    2425using HeuristicLab.Core;
     26using HeuristicLab.Data;
    2527using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2628
     
    2931  [Item(Name = "CovarianceConst",
    3032    Description = "Constant covariance function for Gaussian processes.")]
    31   public class CovarianceConst : Item, ICovarianceFunction {
     33  public sealed class CovarianceConst : ParameterizedNamedItem, ICovarianceFunction {
     34
    3235    [Storable]
    33     private double sf2;
    34     public double Scale { get { return sf2; } }
     36    private double scale;
     37    [Storable]
     38    private readonly HyperParameter<DoubleValue> scaleParameter;
     39    public IValueParameter<DoubleValue> ScaleParameter {
     40      get { return scaleParameter; }
     41    }
    3542
    3643    [StorableConstructor]
    37     protected CovarianceConst(bool deserializing)
     44    private CovarianceConst(bool deserializing)
    3845      : base(deserializing) {
    3946    }
    4047
    41     protected CovarianceConst(CovarianceConst original, Cloner cloner)
     48    private CovarianceConst(CovarianceConst original, Cloner cloner)
    4249      : base(original, cloner) {
    43       this.sf2 = original.sf2;
     50      this.scaleParameter = cloner.Clone(original.scaleParameter);
     51      this.scale = original.scale;
     52
     53      RegisterEvents();
    4454    }
    4555
    4656    public CovarianceConst()
    4757      : base() {
     58      Name = ItemName;
     59      Description = ItemDescription;
     60
     61      scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the constant covariance function.");
     62      Parameters.Add(scaleParameter);
     63      RegisterEvents();
    4864    }
     65
     66    [StorableHook(HookType.AfterDeserialization)]
     67    private void AfterDeserialization() {
     68      RegisterEvents();
     69    }
     70
     71    // caching
     72    private void RegisterEvents() {
     73      Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; });
     74    }
     75
    4976
    5077    public override IDeepCloneable Clone(Cloner cloner) {
     
    5380
    5481    public int GetNumberOfParameters(int numberOfVariables) {
    55       return 1;
     82      return scaleParameter.Fixed ? 0 : 1;
    5683    }
    5784
    5885    public void SetParameter(double[] hyp) {
    59       this.sf2 = Math.Exp(2 * hyp[0]);
    60     }
    61     public void SetData(double[,] x) {
    62       // nothing to do
     86      if (!scaleParameter.Fixed && hyp.Length == 1) {
     87        scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0])));
     88      } else {
     89        throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceConst", "hyp");
     90      }
    6391    }
    6492
    65 
    66     public void SetData(double[,] x, double[,] xt) {
    67       // nothing to do
     93    public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     94      return scale;
    6895    }
    6996
    70     public double GetCovariance(int i, int j) {
    71       return sf2;
     97    public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     98      yield return 2.0 * scale;
    7299    }
    73100
    74     public double GetGradient(int i, int j, int k) {
    75       if (k != 0) throw new ArgumentException("CovarianceConst has only one hyperparameters", "k");
    76       return 2 * sf2;
     101    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
     102      return scale;
    77103    }
    78104  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceLinear.cs

    r8477 r8742  
    2121
    2222using System;
     23using System.Collections.Generic;
    2324using HeuristicLab.Common;
    2425using HeuristicLab.Core;
     
    2829  [StorableClass]
    2930  [Item(Name = "CovarianceLinear", Description = "Linear covariance function for Gaussian processes.")]
    30   public class CovarianceLinear : Item, ICovarianceFunction {
    31     [Storable]
    32     private double[,] x;
    33     [Storable]
    34     private double[,] xt;
    35 
    36     private double[,] k;
    37     private bool symmetric;
    38 
    39     public int GetNumberOfParameters(int numberOfVariables) {
    40       return 0;
    41     }
     31  public sealed class CovarianceLinear : Item, ICovarianceFunction {
    4232    [StorableConstructor]
    43     protected CovarianceLinear(bool deserializing) : base(deserializing) { }
    44     protected CovarianceLinear(CovarianceLinear original, Cloner cloner)
     33    private CovarianceLinear(bool deserializing) : base(deserializing) { }
     34    private CovarianceLinear(CovarianceLinear original, Cloner cloner)
    4535      : base(original, cloner) {
    46       if (original.x != null) {
    47         this.x = new double[original.x.GetLength(0), original.x.GetLength(1)];
    48         Array.Copy(original.x, this.x, x.Length);
    49 
    50         this.xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
    51         Array.Copy(original.xt, this.xt, xt.Length);
    52 
    53         this.k = new double[original.k.GetLength(0), original.k.GetLength(1)];
    54         Array.Copy(original.k, this.k, k.Length);
    55       }
    56       this.symmetric = original.symmetric;
    5736    }
    5837    public CovarianceLinear()
     
    6443    }
    6544
     45    public int GetNumberOfParameters(int numberOfVariables) {
     46      return 0;
     47    }
     48
    6649    public void SetParameter(double[] hyp) {
    6750      if (hyp.Length > 0) throw new ArgumentException("No hyperparameters are allowed for the linear covariance function.");
    68       k = null;
    6951    }
    7052
    71     public void SetData(double[,] x) {
    72       SetData(x, x);
    73       this.symmetric = true;
     53    public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     54      return Util.ScalarProd(x, i, j, 1, columnIndices);
    7455    }
    7556
    76     public void SetData(double[,] x, double[,] xt) {
    77       this.x = x;
    78       this.xt = xt;
    79       this.symmetric = false;
    80 
    81       k = null;
     57    public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     58      yield break;
    8259    }
    8360
    84     public double GetCovariance(int i, int j) {
    85       if (k == null) CalculateInnerProduct();
    86       return k[i, j];
    87     }
    88 
    89     public double GetGradient(int i, int j, int k) {
    90       throw new NotSupportedException("CovarianceLinear does not have hyperparameters.");
    91     }
    92 
    93 
    94     private void CalculateInnerProduct() {
    95       if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();
    96       int rows = x.GetLength(0);
    97       int cols = xt.GetLength(0);
    98       k = new double[rows, cols];
    99       if (symmetric) {
    100         for (int i = 0; i < rows; i++) {
    101           for (int j = i; j < cols; j++) {
    102             k[i, j] = Util.ScalarProd(Util.GetRow(x, i),
    103                                       Util.GetRow(x, j));
    104             k[j, i] = k[i, j];
    105           }
    106         }
    107       } else {
    108         for (int i = 0; i < rows; i++) {
    109           for (int j = 0; j < cols; j++) {
    110             k[i, j] = Util.ScalarProd(Util.GetRow(x, i),
    111                                       Util.GetRow(xt, j));
    112           }
    113         }
    114       }
     61    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
     62      return Util.ScalarProd(x, i, xt, j);
    11563    }
    11664  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceNoise.cs

    r8477 r8742  
    2121
    2222using System;
     23using System.Collections.Generic;
    2324using HeuristicLab.Common;
    2425using HeuristicLab.Core;
     26using HeuristicLab.Data;
    2527using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2628
     
    2931  [Item(Name = "CovarianceNoise",
    3032    Description = "Noise covariance function for Gaussian processes.")]
    31   public class CovarianceNoise : Item, ICovarianceFunction {
     33  public sealed class CovarianceNoise : ParameterizedNamedItem, ICovarianceFunction {
     34
     35
    3236    [Storable]
    3337    private double sf2;
    34     public double Scale { get { return sf2; } }
     38    [Storable]
     39    private readonly HyperParameter<DoubleValue> scaleParameter;
     40    public IValueParameter<DoubleValue> ScaleParameter {
     41      get { return scaleParameter; }
     42    }
    3543
    3644    [StorableConstructor]
    37     protected CovarianceNoise(bool deserializing)
     45    private CovarianceNoise(bool deserializing)
    3846      : base(deserializing) {
    3947    }
    4048
    41     protected CovarianceNoise(CovarianceNoise original, Cloner cloner)
     49    private CovarianceNoise(CovarianceNoise original, Cloner cloner)
    4250      : base(original, cloner) {
     51      this.scaleParameter = cloner.Clone(original.scaleParameter);
    4352      this.sf2 = original.sf2;
     53      RegisterEvents();
    4454    }
    4555
    4656    public CovarianceNoise()
    4757      : base() {
     58      Name = ItemName;
     59      Description = ItemDescription;
     60
     61      this.scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of noise.");
     62      Parameters.Add(this.scaleParameter);
     63
     64      RegisterEvents();
    4865    }
    4966
     
    5269    }
    5370
     71    [StorableHook(HookType.AfterDeserialization)]
     72    private void AfterDeserialization() {
     73      RegisterEvents();
     74    }
     75
     76    private void RegisterEvents() {
     77      Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { sf2 = scaleParameter.Value.Value; });
     78    }
     79
    5480    public int GetNumberOfParameters(int numberOfVariables) {
    55       return 1;
     81      return scaleParameter.Fixed ? 0 : 1;
    5682    }
    5783
    5884    public void SetParameter(double[] hyp) {
    59       this.sf2 = Math.Exp(2 * hyp[0]);
    60     }
    61     public void SetData(double[,] x) {
    62       // nothing to do
     85      if (!scaleParameter.Fixed) {
     86        scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[0])));
     87      } else {
     88        if (hyp.Length > 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovarianceNoise", "hyp");
     89      }
    6390    }
    6491
    65 
    66     public void SetData(double[,] x, double[,] xt) {
    67       // nothing to do
     92    public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     93      return sf2;
    6894    }
    6995
    70     public double GetCovariance(int i, int j) {
    71       if (i == j) return sf2;
    72       else return 0.0;
     96    public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     97      yield return 2 * sf2;
    7398    }
    7499
    75     public double GetGradient(int i, int j, int k) {
    76       if (k != 0) throw new ArgumentException("CovarianceConst has only one hyperparameters", "k");
    77       if (i == j)
    78         return 2 * sf2;
    79       else
    80         return 0.0;
     100    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
     101      return 0.0;
    81102    }
    82103  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovariancePeriodic.cs

    r8477 r8742  
    2121
    2222using System;
     23using System.Collections.Generic;
     24using System.Linq;
    2325using HeuristicLab.Common;
    2426using HeuristicLab.Core;
     27using HeuristicLab.Data;
    2528using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2629
     
    2831  [StorableClass]
    2932  [Item(Name = "CovariancePeriodic", Description = "Periodic covariance function for Gaussian processes.")]
    30   public class CovariancePeriodic : Item, ICovarianceFunction {
     33  public sealed class CovariancePeriodic : ParameterizedNamedItem, ICovarianceFunction {
     34
    3135    [Storable]
    32     private double[,] x;
     36    private double scale;
    3337    [Storable]
    34     private double[,] xt;
     38    private readonly HyperParameter<DoubleValue> scaleParameter;
     39    public IValueParameter<DoubleValue> ScaleParameter {
     40      get { return scaleParameter; }
     41    }
     42
    3543    [Storable]
    36     private double sf2;
    37     public double Scale { get { return sf2; } }
     44    private double inverseLength;
    3845    [Storable]
    39     private double l;
    40     public double Length { get { return l; } }
     46    private readonly HyperParameter<DoubleValue> inverseLengthParameter;
     47    public IValueParameter<DoubleValue> InverseLengthParameter {
     48      get { return inverseLengthParameter; }
     49    }
     50
    4151    [Storable]
    42     private double p;
    43     public double Period { get { return p; } }
     52    private double period;
     53    [Storable]
     54    private readonly HyperParameter<DoubleValue> periodParameter;
     55    public IValueParameter<DoubleValue> PeriodParameter {
     56      get { return periodParameter; }
     57    }
    4458
    45     private bool symmetric;
    4659
    47     private double[,] sd;
    48     public int GetNumberOfParameters(int numberOfVariables) {
    49       return 3;
     60    [StorableConstructor]
     61    private CovariancePeriodic(bool deserializing) : base(deserializing) { }
     62    private CovariancePeriodic(CovariancePeriodic original, Cloner cloner)
     63      : base(original, cloner) {
     64      this.scaleParameter = cloner.Clone(original.scaleParameter);
     65      this.inverseLengthParameter = cloner.Clone(original.inverseLengthParameter);
     66      this.periodParameter = cloner.Clone(original.periodParameter);
     67      this.scale = original.scale;
     68      this.inverseLength = original.inverseLength;
     69      this.period = original.period;
     70
     71      RegisterEvents();
    5072    }
    51     [StorableConstructor]
    52     protected CovariancePeriodic(bool deserializing) : base(deserializing) { }
    53     protected CovariancePeriodic(CovariancePeriodic original, Cloner cloner)
    54       : base(original, cloner) {
    55       if (original.x != null) {
    56         x = new double[original.x.GetLength(0), original.x.GetLength(1)];
    57         Array.Copy(original.x, x, x.Length);
    58         xt = new double[original.xt.GetLength(0), original.xt.GetLength(1)];
    59         Array.Copy(original.xt, xt, xt.Length);
    60       }
    61       sf2 = original.sf2;
    62       l = original.l;
    63       p = original.p;
    64       symmetric = original.symmetric;
    65     }
     73
    6674    public CovariancePeriodic()
    6775      : base() {
     76      Name = ItemName;
     77      Description = ItemDescription;
     78
     79      scaleParameter = new HyperParameter<DoubleValue>("Scale", "The scale of the periodic covariance function.");
     80      inverseLengthParameter = new HyperParameter<DoubleValue>("InverseLength", "The inverse length parameter for the periodic covariance function.");
     81      periodParameter = new HyperParameter<DoubleValue>("Period", "The period parameter for the periodic covariance function.");
     82      Parameters.Add(scaleParameter);
     83      Parameters.Add(inverseLengthParameter);
     84      Parameters.Add(periodParameter);
     85
     86      RegisterEvents();
     87    }
     88
     89    [StorableHook(HookType.AfterDeserialization)]
     90    private void AfterDeserialization() {
     91      RegisterEvents();
    6892    }
    6993
     
    7296    }
    7397
    74     public void SetParameter(double[] hyp) {
    75       if (hyp.Length != 3) throw new ArgumentException();
    76       this.l = Math.Exp(hyp[0]);
    77       this.p = Math.Exp(hyp[1]);
    78       this.sf2 = Math.Exp(2 * hyp[2]);
    79       // sf2 = Math.Min(10E6, sf2); // upper limit for the scale
    80 
    81       sd = null;
    82     }
    83     public void SetData(double[,] x) {
    84       SetData(x, x);
    85       this.symmetric = true;
     98    // caching
     99    private void RegisterEvents() {
     100      Util.AttachValueChangeHandler<DoubleValue, double>(scaleParameter, () => { scale = scaleParameter.Value.Value; });
     101      Util.AttachValueChangeHandler<DoubleValue, double>(inverseLengthParameter, () => { inverseLength = inverseLengthParameter.Value.Value; });
     102      Util.AttachValueChangeHandler<DoubleValue, double>(periodParameter, () => { period = periodParameter.Value.Value; });
    86103    }
    87104
    88     public void SetData(double[,] x, double[,] xt) {
    89       this.x = x;
    90       this.xt = xt;
    91       this.symmetric = false;
    92 
    93       sd = null;
     105    public int GetNumberOfParameters(int numberOfVariables) {
     106      return
     107        (new[] { scaleParameter, inverseLengthParameter, periodParameter }).Count(p => !p.Fixed);
    94108    }
    95109
    96     public double GetCovariance(int i, int j) {
    97       if (sd == null) CalculateSquaredDistances();
    98       double k = sd[i, j];
    99       k = Math.PI * k / p;
    100       k = Math.Sin(k) / l;
     110    public void SetParameter(double[] hyp) {
     111      int i = 0;
     112      if (!inverseLengthParameter.Fixed) {
     113        inverseLengthParameter.SetValue(new DoubleValue(1.0 / Math.Exp(hyp[i])));
     114        i++;
     115      }
     116      if (!periodParameter.Fixed) {
     117        periodParameter.SetValue(new DoubleValue(Math.Exp(hyp[i])));
     118        i++;
     119      }
     120      if (!scaleParameter.Fixed) {
     121        scaleParameter.SetValue(new DoubleValue(Math.Exp(2 * hyp[i])));
     122        i++;
     123      }
     124      if (hyp.Length != i) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for CovariancePeriod", "hyp");
     125    }
     126
     127    public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     128      double k = i == j ? 0.0 : GetDistance(x, x, i, j, columnIndices);
     129      k = Math.PI * k / period;
     130      k = Math.Sin(k) * inverseLength;
    101131      k = k * k;
    102132
    103       return sf2 * Math.Exp(-2.0 * k);
     133      return scale * Math.Exp(-2.0 * k);
    104134    }
    105135
    106     public double GetGradient(int i, int j, int k) {
    107       double v = Math.PI * sd[i, j] / p;
    108       switch (k) {
    109         case 0: {
    110             double newK = Math.Sin(v) / l;
    111             newK = newK * newK;
    112             return 4 * sf2 * Math.Exp(-2 * newK) * newK;
    113           }
    114         case 1: {
    115             double r = Math.Sin(v) / l;
    116             return 4 * sf2 / l * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v;
    117           }
    118         case 2: {
    119             double newK = Math.Sin(v) / l;
    120             newK = newK * newK;
    121             return 2 * sf2 * Math.Exp(-2 * newK);
    122 
    123           }
    124         default: {
    125             throw new ArgumentException("CovariancePeriodic only has three hyperparameters.", "k");
    126           }
    127       }
     136    public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     137      double v = i == j ? 0.0 : Math.PI * GetDistance(x, x, i, j, columnIndices) / period;
     138      double gradient = Math.Sin(v) * inverseLength;
     139      gradient *= gradient;
     140      yield return 4.0 * scale * Math.Exp(-2.0 * gradient) * gradient;
     141      double r = Math.Sin(v) * inverseLength;
     142      yield return 4.0 * scale * inverseLength * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v;
     143      yield return 2.0 * scale * Math.Exp(-2 * gradient);
    128144    }
    129145
    130     private void CalculateSquaredDistances() {
    131       if (x.GetLength(1) != xt.GetLength(1)) throw new InvalidOperationException();
    132       int rows = x.GetLength(0);
    133       int cols = xt.GetLength(0);
    134       sd = new double[rows, cols];
     146    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
     147      double k = GetDistance(x, xt, i, j, columnIndices);
     148      k = Math.PI * k / period;
     149      k = Math.Sin(k) * inverseLength;
     150      k = k * k;
    135151
    136       if (symmetric) {
    137         for (int i = 0; i < rows; i++) {
    138           for (int j = i; j < cols; j++) {
    139             sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(x, j)));
    140             sd[j, i] = sd[i, j];
    141           }
    142         }
    143       } else {
    144         for (int i = 0; i < rows; i++) {
    145           for (int j = 0; j < cols; j++) {
    146             sd[i, j] = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, j)));
    147           }
    148         }
    149       }
     152      return scale * Math.Exp(-2.0 * k);
     153    }
     154
     155    private double GetDistance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
     156      return Math.Sqrt(Util.SqrDist(x, i, xt, j, 1, columnIndices));
    150157    }
    151158  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs

    r8477 r8742  
    3131  [Item(Name = "CovarianceSum",
    3232    Description = "Sum covariance function for Gaussian processes.")]
    33   public class CovarianceSum : Item, ICovarianceFunction {
     33  public sealed class CovarianceSum : Item, ICovarianceFunction {
    3434    [Storable]
    3535    private ItemList<ICovarianceFunction> terms;
     
    4242
    4343    [StorableConstructor]
    44     protected CovarianceSum(bool deserializing)
     44    private CovarianceSum(bool deserializing)
    4545      : base(deserializing) {
    4646    }
    4747
    48     protected CovarianceSum(CovarianceSum original, Cloner cloner)
     48    private CovarianceSum(CovarianceSum original, Cloner cloner)
    4949      : base(original, cloner) {
    5050      this.terms = cloner.Clone(original.terms);
    5151      this.numberOfVariables = original.numberOfVariables;
    52       AttachEventHandlers();
    5352    }
    5453
     
    5655      : base() {
    5756      this.terms = new ItemList<ICovarianceFunction>();
    58       AttachEventHandlers();
    59     }
    60 
    61     private void AttachEventHandlers() {
    62       this.terms.CollectionReset += (sender, args) => ClearCache();
    63       this.terms.ItemsAdded += (sender, args) => ClearCache();
    64       this.terms.ItemsRemoved += (sender, args) => ClearCache();
    65       this.terms.ItemsReplaced += (sender, args) => ClearCache();
    66       this.terms.ItemsMoved += (sender, args) => ClearCache();
    6757    }
    6858
     
    7767
    7868    public void SetParameter(double[] hyp) {
     69      if (terms.Count == 0) throw new ArgumentException("At least one term is needed for sum covariance function.");
    7970      int offset = 0;
    8071      foreach (var t in terms) {
     
    8475      }
    8576    }
    86     public void SetData(double[,] x) {
    87       SetData(x, x);
     77
     78    public double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     79      return terms.Select(t => t.GetCovariance(x, i, j, columnIndices)).Sum();
    8880    }
    8981
    90     public void SetData(double[,] x, double[,] xt) {
    91       foreach (var t in terms) {
    92         t.SetData(x, xt);
    93       }
     82    public IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices) {
     83      return terms.Select(t => t.GetGradient(x, i, j, columnIndices)).Aggregate(Enumerable.Concat);
    9484    }
    9585
    96     public double GetCovariance(int i, int j) {
    97       return terms.Select(t => t.GetCovariance(i, j)).Sum();
    98     }
    99 
    100     private Dictionary<int, Tuple<int, int>> cachedParameterMap;
    101     public double GetGradient(int i, int j, int k) {
    102       if (cachedParameterMap == null) {
    103         CalculateParameterMap();
    104       }
    105       int ti = cachedParameterMap[k].Item1;
    106       k = cachedParameterMap[k].Item2;
    107       return terms[ti].GetGradient(i, j, k);
    108     }
    109     private void ClearCache() {
    110       cachedParameterMap = null;
    111     }
    112 
    113     private void CalculateParameterMap() {
    114       cachedParameterMap = new Dictionary<int, Tuple<int, int>>();
    115       int k = 0;
    116       for (int ti = 0; ti < terms.Count; ti++) {
    117         for (int ti_k = 0; ti_k < terms[ti].GetNumberOfParameters(numberOfVariables); ti_k++) {
    118           cachedParameterMap[k++] = Tuple.Create(ti, ti_k);
    119         }
    120       }
     86    public double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices) {
     87      return terms.Select(t => t.GetCrossCovariance(x, xt, i, j, columnIndices)).Sum();
    12188    }
    12289  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessHyperparameterInitializer.cs

    r8477 r8742  
    9191      var rand = RandomParameter.ActualValue;
    9292      for (int i = 0; i < r.Length; i++)
    93         r[i] = rand.NextDouble() * 2 - 1;
     93        r[i] = rand.NextDouble() * 10 - 5;
    9494
    9595      HyperparameterParameter.ActualValue = r;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8477 r8742  
    4242
    4343    [Storable]
     44    private double[] hyperparameterGradients;
     45    public double[] HyperparameterGradients {
     46      get {
     47        var copy = new double[hyperparameterGradients.Length];
     48        Array.Copy(hyperparameterGradients, copy, copy.Length);
     49        return copy;
     50      }
     51    }
     52
     53    [Storable]
    4454    private ICovarianceFunction covarianceFunction;
    4555    public ICovarianceFunction CovarianceFunction {
     
    6676    [Storable]
    6777    private double sqrSigmaNoise;
     78    public double SigmaNoise {
     79      get { return Math.Sqrt(sqrSigmaNoise); }
     80    }
    6881
    6982    [Storable]
     
    124137      l = new double[n, n];
    125138
    126       meanFunction.SetData(x);
    127       covarianceFunction.SetData(x);
    128 
    129139      // calculate means and covariances
    130140      double[] m = meanFunction.GetMean(x);
    131141      for (int i = 0; i < n; i++) {
    132142        for (int j = i; j < n; j++) {
    133           l[j, i] = covarianceFunction.GetCovariance(i, j) / sqrSigmaNoise;
     143          l[j, i] = covarianceFunction.GetCovariance(x, i, j) / sqrSigmaNoise;
    134144          if (j == i) l[j, i] += 1.0;
    135145        }
     
    153163        alpha[i] = alpha[i] / sqrSigmaNoise;
    154164      negativeLogLikelihood = 0.5 * Util.ScalarProd(ym, alpha) + diagSum + (n / 2.0) * Math.Log(2.0 * Math.PI * sqrSigmaNoise);
    155     }
    156 
    157     public double[] GetHyperparameterGradients() {
     165
    158166      // derivatives
    159       int n = x.GetLength(0);
    160167      int nAllowedVariables = x.GetLength(1);
    161168
    162       int info;
    163169      alglib.matinvreport matInvRep;
    164170      double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)];
     
    183189      if (covGradients.Length > 0) {
    184190        for (int i = 0; i < n; i++) {
     191          for (int j = 0; j < i; j++) {
     192            var g = covarianceFunction.GetGradient(x, i, j).ToArray();
     193            for (int k = 0; k < covGradients.Length; k++) {
     194              covGradients[k] += lCopy[i, j] * g[k];
     195            }
     196          }
     197
     198          var gDiag = covarianceFunction.GetGradient(x, i, i).ToArray();
    185199          for (int k = 0; k < covGradients.Length; k++) {
    186             for (int j = 0; j < i; j++) {
    187               covGradients[k] += lCopy[i, j] * covarianceFunction.GetGradient(i, j, k);
    188             }
    189             covGradients[k] += 0.5 * lCopy[i, i] * covarianceFunction.GetGradient(i, i, k);
     200            // diag
     201            covGradients[k] += 0.5 * lCopy[i, i] * gDiag[k];
    190202          }
    191203        }
    192204      }
    193205
    194       return
     206      hyperparameterGradients =
    195207        meanGradients
    196208        .Concat(covGradients)
    197209        .Concat(new double[] { noiseGradient }).ToArray();
     210
    198211    }
    199212
     
    208221    }
    209222    public GaussianProcessRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    210       return new GaussianProcessRegressionSolution(this, problemData);
     223      return new GaussianProcessRegressionSolution(this, new RegressionProblemData(problemData));
    211224    }
    212225    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    214227    }
    215228    #endregion
     229
    216230
    217231    private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) {
     
    219233      int newN = newX.GetLength(0);
    220234      int n = x.GetLength(0);
    221       // var predMean = new double[newN];
    222       // predVar = new double[newN];
    223 
    224 
    225 
    226       // var kss = new double[newN];
    227235      var Ks = new double[newN, n];
    228       //double[,] sWKs = new double[n, newN];
    229       // double[,] v;
    230 
    231 
    232       // for stddev
    233       //covarianceFunction.SetParameter(covHyp, newX);
    234       //kss = covarianceFunction.GetDiagonalCovariances();
    235 
    236       covarianceFunction.SetData(x, newX);
    237       meanFunction.SetData(newX);
    238236      var ms = meanFunction.GetMean(newX);
    239237      for (int i = 0; i < newN; i++) {
    240238        for (int j = 0; j < n; j++) {
    241           Ks[i, j] = covarianceFunction.GetCovariance(j, i);
    242           //sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise);
    243         }
    244       }
    245 
    246       // for stddev
    247       // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v);
     239          Ks[i, j] = covarianceFunction.GetCrossCovariance(x, newX, j, i);
     240        }
     241      }
    248242
    249243      return Enumerable.Range(0, newN)
    250244        .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha));
    251       //for (int i = 0; i < newN; i++) {
    252       //  // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);
    253       //  // var sumV2 = prod(GetCol(v, i), GetCol(v, i));
    254       //  // predVar[i] = kss[i] - sumV2;
    255       //}
    256 
    257245    }
    258246
     
    266254
    267255      // for stddev
    268       covarianceFunction.SetData(newX);
    269256      for (int i = 0; i < newN; i++)
    270         kss[i] = covarianceFunction.GetCovariance(i, i);
    271 
    272       covarianceFunction.SetData(x, newX);
     257        kss[i] = covarianceFunction.GetCovariance(newX, i, i);
     258
    273259      for (int i = 0; i < newN; i++) {
    274260        for (int j = 0; j < n; j++) {
    275           sWKs[j, i] = covarianceFunction.GetCovariance(j, i) / Math.Sqrt(sqrSigmaNoise);
     261          sWKs[j, i] = covarianceFunction.GetCrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
    276262        }
    277263      }
    278264
    279265      // for stddev
    280       int info;
    281       alglib.densesolverreport denseSolveRep;
    282       double[,] v;
    283 
    284       alglib.rmatrixsolvem(l, n, sWKs, newN, false, out info, out denseSolveRep, out v);
     266      alglib.ablas.rmatrixlefttrsm(n, newN, l, 0, 0, false, false, 0, ref sWKs, 0, 0);
    285267
    286268      for (int i = 0; i < newN; i++) {
    287         var sumV = Util.ScalarProd(Util.GetCol(v, i), Util.GetCol(v, i));
     269        var sumV = Util.ScalarProd(Util.GetCol(sWKs, i), Util.GetCol(sWKs, i));
    288270        kss[i] -= sumV;
    289271        if (kss[i] < 0) kss[i] = 0;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs

    r8477 r8742  
    2222
    2323using System;
    24 using System.Collections.Generic;
    25 using System.Linq;
    2624using HeuristicLab.Algorithms.GradientDescent;
    2725using HeuristicLab.Common;
     
    3230using HeuristicLab.Parameters;
    3331using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    34 using HeuristicLab.PluginInfrastructure;
    3532using HeuristicLab.Problems.DataAnalysis;
    3633
     
    5956
    6057    #region parameter properties
    61     public IConstrainedValueParameter<IMeanFunction> MeanFunctionParameter {
    62       get { return (IConstrainedValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
     58    public IValueParameter<IMeanFunction> MeanFunctionParameter {
     59      get { return (IValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
    6360    }
    64     public IConstrainedValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
    65       get { return (IConstrainedValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
     61    public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
     62      get { return (IValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
    6663    }
    6764    public IValueParameter<IntValue> MinimizationIterationsParameter {
     
    104101      Problem = new RegressionProblem();
    105102
    106       List<IMeanFunction> meanFunctions = ApplicationManager.Manager.GetInstances<IMeanFunction>().ToList();
    107       List<ICovarianceFunction> covFunctions = ApplicationManager.Manager.GetInstances<ICovarianceFunction>().ToList();
    108 
    109       Parameters.Add(new ConstrainedValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.",
    110         new ItemSet<IMeanFunction>(meanFunctions), meanFunctions.OfType<MeanConst>().First()));
    111       Parameters.Add(new ConstrainedValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.",
    112         new ItemSet<ICovarianceFunction>(covFunctions), covFunctions.OfType<CovarianceSEiso>().First()));
     103      Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst()));
     104      Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso()));
    113105      Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20)));
    114106      Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs

    r8477 r8742  
    6565        ModelParameter.ActualValue = model;
    6666        NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood);
    67         HyperparameterGradientsParameter.ActualValue = new RealVector(model.GetHyperparameterGradients());
     67        HyperparameterGradientsParameter.ActualValue = new RealVector(model.HyperparameterGradients);
    6868        return base.Apply();
    6969      }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolution.cs

    r8477 r8742  
    3434  [StorableClass]
    3535  public sealed class GaussianProcessRegressionSolution : RegressionSolution, IGaussianProcessSolution {
     36    private new readonly Dictionary<int, double> evaluationCache;
    3637
    3738    public new IGaussianProcessModel Model {
     
    4142
    4243    [StorableConstructor]
    43     private GaussianProcessRegressionSolution(bool deserializing) : base(deserializing) { }
     44    private GaussianProcessRegressionSolution(bool deserializing)
     45      : base(deserializing) {
     46      evaluationCache = new Dictionary<int, double>();
     47
     48    }
    4449    private GaussianProcessRegressionSolution(GaussianProcessRegressionSolution original, Cloner cloner)
    4550      : base(original, cloner) {
     51      evaluationCache = new Dictionary<int, double>(original.evaluationCache);
    4652    }
    4753    public GaussianProcessRegressionSolution(IGaussianProcessModel model, IRegressionProblemData problemData)
    4854      : base(model, problemData) {
     55
     56      evaluationCache = new Dictionary<int, double>(problemData.Dataset.Rows);
    4957      RecalculateResults();
    5058    }
     
    6573
    6674    public IEnumerable<double> GetEstimatedVariance(IEnumerable<int> rows) {
    67       return Model.GetEstimatedVariance(ProblemData.Dataset, rows);
     75      var rowsToEvaluate = rows.Except(evaluationCache.Keys);
     76      var rowsEnumerator = rowsToEvaluate.GetEnumerator();
     77      var valuesEnumerator = Model.GetEstimatedVariance(ProblemData.Dataset, rowsToEvaluate).GetEnumerator();
     78
     79      while (rowsEnumerator.MoveNext() & valuesEnumerator.MoveNext()) {
     80        evaluationCache.Add(rowsEnumerator.Current, valuesEnumerator.Current);
     81      }
     82
     83      return rows.Select(row => evaluationCache[row]);
     84    }
     85
     86    protected override void OnModelChanged() {
     87      evaluationCache.Clear();
     88      base.OnModelChanged();
     89    }
     90    protected override void OnProblemDataChanged() {
     91      evaluationCache.Clear();
     92      base.OnProblemDataChanged();
    6893    }
    6994  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs

    r8477 r8742  
    7575
    7676    public override IOperation Apply() {
    77       var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
    78       var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone();
    79       var s = new GaussianProcessRegressionSolution(m, data);
     77      if (ModelParameter.ActualValue != null) {
     78        var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
     79        var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone();
     80        var s = new GaussianProcessRegressionSolution(m, data);
    8081
    8182
    82       SolutionParameter.ActualValue = s;
    83       var results = ResultsParameter.ActualValue;
    84       if (!results.ContainsKey(SolutionParameterName)) {
    85         results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s));
    86         results.Add(new Result(TrainingRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the training partition.", new DoubleValue(s.TrainingRSquared)));
    87         results.Add(new Result(TestRSquaredResultName, "The Pearson's R² of the Gaussian process solution on the test partition.", new DoubleValue(s.TestRSquared)));
    88       } else {
    89         results[SolutionParameterName].Value = s;
    90         results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared);
    91         results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared);
     83        SolutionParameter.ActualValue = s;
     84        var results = ResultsParameter.ActualValue;
     85        if (!results.ContainsKey(SolutionParameterName)) {
     86          results.Add(new Result(SolutionParameterName, "The Gaussian process regression solution", s));
     87          results.Add(new Result(TrainingRSquaredResultName,
     88                                 "The Pearson's R² of the Gaussian process solution on the training partition.",
     89                                 new DoubleValue(s.TrainingRSquared)));
     90          results.Add(new Result(TestRSquaredResultName,
     91                                 "The Pearson's R² of the Gaussian process solution on the test partition.",
     92                                 new DoubleValue(s.TestRSquared)));
     93        } else {
     94          results[SolutionParameterName].Value = s;
     95          results[TrainingRSquaredResultName].Value = new DoubleValue(s.TrainingRSquared);
     96          results[TestRSquaredResultName].Value = new DoubleValue(s.TestRSquared);
     97        }
    9298      }
    9399      return base.Apply();
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs

    r8477 r8742  
    2020#endregion
    2121
     22using System.Collections.Generic;
    2223using HeuristicLab.Core;
    2324
     
    2627    int GetNumberOfParameters(int numberOfVariables);
    2728    void SetParameter(double[] hyp);
    28     void SetData(double[,] x);
    29     void SetData(double[,] x, double[,] xt);
    30 
    31     double GetCovariance(int i, int j);
    32     double GetGradient(int i, int j, int k);
     29    double GetCovariance(double[,] x, int i, int j, IEnumerable<int> columnIndices = null);
     30    IEnumerable<double> GetGradient(double[,] x, int i, int j, IEnumerable<int> columnIndices = null);
     31    double GetCrossCovariance(double[,] x, double[,] xt, int i, int j, IEnumerable<int> columnIndices = null);
    3332  }
    3433}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/IMeanFunction.cs

    r8416 r8742  
    2525    int GetNumberOfParameters(int numberOfVariables);
    2626    void SetParameter(double[] hyp);
    27     void SetData(double[,] x);
    2827    double[] GetMean(double[,] x);
    2928    double[] GetGradients(int k, double[,] x);
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanConst.cs

    r8477 r8742  
    2424using HeuristicLab.Common;
    2525using HeuristicLab.Core;
     26using HeuristicLab.Data;
    2627using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2728
     
    2930  [StorableClass]
    3031  [Item(Name = "MeanConst", Description = "Constant mean function for Gaussian processes.")]
    31   public class MeanConst : Item, IMeanFunction {
     32  public sealed class MeanConst : ParameterizedNamedItem, IMeanFunction {
    3233    [Storable]
    3334    private double c;
    34     public double Value { get { return c; } }
     35    [Storable]
     36    private readonly HyperParameter<DoubleValue> valueParameter;
     37    public IValueParameter<DoubleValue> ValueParameter { get { return valueParameter; } }
    3538
    36     public int GetNumberOfParameters(int numberOfVariables) {
    37       return 1;
    38     }
    3939    [StorableConstructor]
    40     protected MeanConst(bool deserializing) : base(deserializing) { }
    41     protected MeanConst(MeanConst original, Cloner cloner)
     40    private MeanConst(bool deserializing) : base(deserializing) { }
     41    private MeanConst(MeanConst original, Cloner cloner)
    4242      : base(original, cloner) {
    4343      this.c = original.c;
     44      this.valueParameter = cloner.Clone(original.valueParameter);
     45      RegisterEvents();
    4446    }
    4547    public MeanConst()
    4648      : base() {
     49      this.name = ItemName;
     50      this.description = ItemDescription;
     51
     52      this.valueParameter = new HyperParameter<DoubleValue>("Value", "The constant value for the constant mean function.");
     53      Parameters.Add(valueParameter);
     54      RegisterEvents();
     55    }
     56
     57    public override IDeepCloneable Clone(Cloner cloner) {
     58      return new MeanConst(this, cloner);
     59    }
     60
     61    [StorableHook(HookType.AfterDeserialization)]
     62    private void AfterDeserialization() {
     63      RegisterEvents();
     64    }
     65
     66    private void RegisterEvents() {
     67      Util.AttachValueChangeHandler<DoubleValue, double>(valueParameter, () => { c = valueParameter.Value.Value; });
     68    }
     69
     70    public int GetNumberOfParameters(int numberOfVariables) {
     71      return valueParameter.Fixed ? 0 : 1;
    4772    }
    4873
    4974    public void SetParameter(double[] hyp) {
    50       if (hyp.Length != 1) throw new ArgumentException("Only one hyper-parameter allowed for constant mean function.", "hyp");
    51       this.c = hyp[0];
    52     }
    53     public void SetData(double[,] x) {
    54       // nothing to do
     75      if (!valueParameter.Fixed) {
     76        valueParameter.SetValue(new DoubleValue(hyp[0]));
     77      } else if (hyp.Length > 0)
     78        throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the constant mean function.", "hyp");
    5579    }
    5680
     
    6387      return Enumerable.Repeat(1.0, x.GetLength(0)).ToArray();
    6488    }
    65 
    66     public override IDeepCloneable Clone(Cloner cloner) {
    67       return new MeanConst(this, cloner);
    68     }
    6989  }
    7090}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanLinear.cs

    r8477 r8742  
    1919 */
    2020#endregion
     21
    2122using System;
    2223using System.Linq;
    2324using HeuristicLab.Common;
    2425using HeuristicLab.Core;
     26using HeuristicLab.Data;
    2527using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2628
     
    2830  [StorableClass]
    2931  [Item(Name = "MeanLinear", Description = "Linear mean function for Gaussian processes.")]
    30   public class MeanLinear : Item, IMeanFunction {
     32  public sealed class MeanLinear : ParameterizedNamedItem, IMeanFunction {
    3133    [Storable]
    32     private double[] alpha;
    33     public double[] Weights {
    34       get {
    35         if (alpha == null) return new double[0];
    36         var copy = new double[alpha.Length];
    37         Array.Copy(alpha, copy, copy.Length);
    38         return copy;
     34    private double[] weights;
     35    [Storable]
     36    private readonly HyperParameter<DoubleArray> weightsParameter;
     37    public IValueParameter<DoubleArray> WeightsParameter { get { return weightsParameter; } }
     38
     39    [StorableConstructor]
     40    private MeanLinear(bool deserializing) : base(deserializing) { }
     41    private MeanLinear(MeanLinear original, Cloner cloner)
     42      : base(original, cloner) {
     43      if (original.weights != null) {
     44        this.weights = new double[original.weights.Length];
     45        Array.Copy(original.weights, weights, original.weights.Length);
    3946      }
    40     }
    41     public int GetNumberOfParameters(int numberOfVariables) {
    42       return numberOfVariables;
    43     }
    44     [StorableConstructor]
    45     protected MeanLinear(bool deserializing) : base(deserializing) { }
    46     protected MeanLinear(MeanLinear original, Cloner cloner)
    47       : base(original, cloner) {
    48       if (original.alpha != null) {
    49         this.alpha = new double[original.alpha.Length];
    50         Array.Copy(original.alpha, alpha, original.alpha.Length);
    51       }
     47      weightsParameter = cloner.Clone(original.weightsParameter);
     48      RegisterEvents();
    5249    }
    5350    public MeanLinear()
    5451      : base() {
     52      this.weightsParameter = new HyperParameter<DoubleArray>("Weights", "The weights parameter for the linear mean function.");
     53      Parameters.Add(weightsParameter);
     54      RegisterEvents();
     55    }
     56
     57    public override IDeepCloneable Clone(Cloner cloner) {
     58      return new MeanLinear(this, cloner);
     59    }
     60
     61    [StorableHook(HookType.AfterDeserialization)]
     62    private void AfterDeserialization() {
     63      RegisterEvents();
     64    }
     65
     66    private void RegisterEvents() {
     67      Util.AttachArrayChangeHandler<DoubleArray, double>(weightsParameter, () => {
     68        weights = weightsParameter.Value.ToArray();
     69      });
     70    }
     71
     72    public int GetNumberOfParameters(int numberOfVariables) {
     73      return weightsParameter.Fixed ? 0 : numberOfVariables;
    5574    }
    5675
    5776    public void SetParameter(double[] hyp) {
    58       this.alpha = new double[hyp.Length];
    59       Array.Copy(hyp, alpha, hyp.Length);
    60     }
    61     public void SetData(double[,] x) {
    62       // nothing to do
     77      if (!weightsParameter.Fixed) {
     78        weightsParameter.SetValue(new DoubleArray(hyp));
     79      } else if (hyp.Length != 0) throw new ArgumentException("The length of the parameter vector does not match the number of free parameters for the linear mean function.", "hyp");
    6380    }
    6481
    6582    public double[] GetMean(double[,] x) {
    6683      // sanity check
    67       if (alpha.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function.");
     84      if (weights.Length != x.GetLength(1)) throw new ArgumentException("The number of hyperparameters must match the number of variables for the linear mean function.");
    6885      int cols = x.GetLength(1);
    6986      int n = x.GetLength(0);
    7087      return (from i in Enumerable.Range(0, n)
    71               let rowVector = from j in Enumerable.Range(0, cols)
    72                               select x[i, j]
    73               select Util.ScalarProd(alpha, rowVector))
     88              let rowVector = Enumerable.Range(0, cols).Select(j => x[i, j])
     89              select Util.ScalarProd(weights, rowVector))
    7490        .ToArray();
    7591    }
     
    7995      int n = x.GetLength(0);
    8096      if (k > cols) throw new ArgumentException();
    81       return (from r in Enumerable.Range(0, n)
    82               select x[r, k]).ToArray();
    83     }
    84 
    85     public override IDeepCloneable Clone(Cloner cloner) {
    86       return new MeanLinear(this, cloner);
     97      return (Enumerable.Range(0, n).Select(r => x[r, k])).ToArray();
    8798    }
    8899  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanSum.cs

    r8477 r8742  
    2727  [StorableClass]
    2828  [Item(Name = "MeanSum", Description = "Sum of mean functions for Gaussian processes.")]
    29   public class MeanSum : Item, IMeanFunction {
     29  public sealed class MeanSum : Item, IMeanFunction {
    3030    [Storable]
    3131    private ItemList<IMeanFunction> terms;
     
    3737    }
    3838
    39     public int GetNumberOfParameters(int numberOfVariables) {
    40       this.numberOfVariables = numberOfVariables;
    41       return terms.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();
    42     }
    4339    [StorableConstructor]
    44     protected MeanSum(bool deserializing) : base(deserializing) { }
    45     protected MeanSum(MeanSum original, Cloner cloner)
     40    private MeanSum(bool deserializing) : base(deserializing) { }
     41    private MeanSum(MeanSum original, Cloner cloner)
    4642      : base(original, cloner) {
    4743      this.terms = cloner.Clone(original.terms);
     
    5046    public MeanSum() {
    5147      this.terms = new ItemList<IMeanFunction>();
     48    }
     49
     50    public override IDeepCloneable Clone(Cloner cloner) {
     51      return new MeanSum(this, cloner);
     52    }
     53
     54    public int GetNumberOfParameters(int numberOfVariables) {
     55      this.numberOfVariables = numberOfVariables;
     56      return terms.Select(t => t.GetNumberOfParameters(numberOfVariables)).Sum();
    5257    }
    5358
     
    5964        offset += numberOfParameters;
    6065      }
    61     }
    62 
    63     public void SetData(double[,] x) {
    64       foreach (var t in terms) t.SetData(x);
    6566    }
    6667
     
    8283      return terms[i].GetGradients(k, x);
    8384    }
    84 
    85     public override IDeepCloneable Clone(Cloner cloner) {
    86       return new MeanSum(this, cloner);
    87     }
    8885  }
    8986}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanZero.cs

    r8416 r8742  
    2828  [StorableClass]
    2929  [Item(Name = "MeanZero", Description = "Constant zero mean function for Gaussian processes.")]
    30   public class MeanZero : Item, IMeanFunction {
    31     public int GetNumberOfParameters(int numberOfVariables) {
    32       return 0;
    33     }
     30  public sealed class MeanZero : Item, IMeanFunction {
    3431    [StorableConstructor]
    35     protected MeanZero(bool deserializing) : base(deserializing) { }
    36     protected MeanZero(MeanZero original, Cloner cloner)
     32    private MeanZero(bool deserializing) : base(deserializing) { }
     33    private MeanZero(MeanZero original, Cloner cloner)
    3734      : base(original, cloner) {
    3835    }
     
    4037    }
    4138
     39    public override IDeepCloneable Clone(Cloner cloner) {
     40      return new MeanZero(this, cloner);
     41    }
     42
     43    public int GetNumberOfParameters(int numberOfVariables) {
     44      return 0;
     45    }
     46
    4247    public void SetParameter(double[] hyp) {
    4348      if (hyp.Length > 0) throw new ArgumentException("No hyper-parameters allowed for zero mean function.", "hyp");
    44     }
    45 
    46     public void SetData(double[,] x) {
    47       // do nothing
    4849    }
    4950
     
    5657      return Enumerable.Repeat(0.0, x.GetLength(0)).ToArray();
    5758    }
    58     public override IDeepCloneable Clone(Cloner cloner) {
    59       return new MeanZero(this, cloner);
    60     }
    6159  }
    6260}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs

    r8477 r8742  
    2020#endregion
    2121
     22using System;
    2223using System.Collections.Generic;
    2324using System.Linq;
     25using HeuristicLab.Core;
     26using HeuristicLab.Data;
    2427
    2528namespace HeuristicLab.Algorithms.DataAnalysis {
    26   public static class Util {
     29  internal static class Util {
    2730    public static double ScalarProd(IEnumerable<double> v, IEnumerable<double> u) {
    2831      return v.Zip(u, (vi, ui) => vi * ui).Sum();
     32    }
     33
     34    public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) {
     35      return x.Zip(y, (a, b) => (a - b) * (a - b)).Sum();
    2936    }
    3037
     
    3441    }
    3542
    36     public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) {
    37       return x.Zip(y, (a, b) => (a - b) * (a - b)).Sum();
     43    public static double SqrDist(double[,] x, int i, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) {
     44      return SqrDist(x, i, x, j, scale, columnIndices);
     45    }
     46
     47    public static double SqrDist(double[,] x, int i, double[,] xt, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) {
     48      double ss = 0.0;
     49      if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
     50      foreach (int k in columnIndices) {
     51        double d = x[i, k] - xt[j, k];
     52        ss += d * d;
     53      }
     54      return scale * scale * ss;
     55    }
     56
     57    public static double SqrDist(double[,] x, int i, int j, double[] scale, IEnumerable<int> columnIndices = null) {
     58      return SqrDist(x, i, x, j, scale);
     59    }
     60
     61    public static double SqrDist(double[,] x, int i, double[,] xt, int j, double[] scale, IEnumerable<int> columnIndices = null) {
     62      double ss = 0.0;
     63      if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
     64      foreach (int k in columnIndices) {
     65        double d = x[i, k] - xt[j, k];
     66        ss += d * d * scale[k] * scale[k];
     67      }
     68      return ss;
     69    }
     70    public static double ScalarProd(double[,] x, int i, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) {
     71      return ScalarProd(x, i, x, j, scale, columnIndices);
     72    }
     73
     74    public static double ScalarProd(double[,] x, int i, double[,] xt, int j, double scale = 1.0, IEnumerable<int> columnIndices = null) {
     75      double sum = 0.0;
     76      if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
     77      foreach (int k in columnIndices) {
     78        sum += x[i, k] * xt[j, k];
     79      }
     80      return scale * scale * sum;
     81    }
     82    public static double ScalarProd(double[,] x, int i, int j, double[] scale, IEnumerable<int> columnIndices = null) {
     83      return ScalarProd(x, i, x, j, scale, columnIndices);
     84    }
     85
     86    public static double ScalarProd(double[,] x, int i, double[,] xt, int j, double[] scale, IEnumerable<int> columnIndices = null) {
     87      double sum = 0.0;
     88      if (columnIndices == null) columnIndices = Enumerable.Range(0, x.GetLength(1));
     89      foreach (int k in columnIndices) {
     90        sum += x[i, k] * scale[k] * xt[j, k] * scale[k];
     91      }
     92      return sum;
    3893    }
    3994
     
    46101      return Enumerable.Range(0, rows).Select(r => x[r, c]);
    47102    }
     103
     104
     105    public static void AttachValueChangeHandler<T, U>(IValueParameter<T> parameter, Action action)
     106      where T : ValueTypeValue<U>
     107      where U : struct {
     108      parameter.ValueChanged += (sender, args) => {
     109        if (parameter.Value != null) {
     110          parameter.Value.ValueChanged += (s, a) => action();
     111          action();
     112        }
     113      };
     114      if (parameter.Value != null) {
     115        parameter.Value.ValueChanged += (s, a) => action();
     116      }
     117    }
     118
     119    public static void AttachArrayChangeHandler<T, U>(IValueParameter<T> parameter, Action action)
     120      where T : ValueTypeArray<U>
     121      where U : struct {
     122      parameter.ValueChanged += (sender, args) => {
     123        if (parameter.Value != null) {
     124          parameter.Value.ItemChanged += (s, a) => action();
     125          parameter.Value.Reset += (s, a) => action();
     126          action();
     127        }
     128      };
     129      if (parameter.Value != null) {
     130        parameter.Value.ItemChanged += (s, a) => action();
     131        parameter.Value.Reset += (s, a) => action();
     132      }
     133    }
    48134  }
    49135}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r8477 r8742  
    105105      <Private>False</Private>
    106106    </Reference>
    107     <Reference Include="HeuristicLab.Algorithms.Benchmarks-3.3">
    108       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.Benchmarks-3.3.dll</HintPath>
    109     </Reference>
    110     <Reference Include="HeuristicLab.Algorithms.GradientDescent-3.3">
    111       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.GradientDescent-3.3.dll</HintPath>
    112     </Reference>
    113     <Reference Include="HeuristicLab.Analysis-3.3">
    114       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Analysis-3.3.dll</HintPath>
    115     </Reference>
    116     <Reference Include="HeuristicLab.Collections-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    117       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Collections-3.3.dll</HintPath>
    118     </Reference>
    119     <Reference Include="HeuristicLab.Common-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    120       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Common-3.3.dll</HintPath>
    121     </Reference>
    122     <Reference Include="HeuristicLab.Common.Resources-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    123       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Common.Resources-3.3.dll</HintPath>
    124     </Reference>
    125     <Reference Include="HeuristicLab.Core-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    126       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Core-3.3.dll</HintPath>
    127     </Reference>
    128     <Reference Include="HeuristicLab.Data-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    129       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Data-3.3.dll</HintPath>
    130     </Reference>
    131     <Reference Include="HeuristicLab.Encodings.RealVectorEncoding-3.3">
    132       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Encodings.RealVectorEncoding-3.3.dll</HintPath>
    133     </Reference>
    134     <Reference Include="HeuristicLab.Operators-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    135       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Operators-3.3.dll</HintPath>
    136     </Reference>
    137     <Reference Include="HeuristicLab.Optimization-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    138       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Optimization-3.3.dll</HintPath>
    139     </Reference>
    140     <Reference Include="HeuristicLab.Parameters-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    141       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Parameters-3.3.dll</HintPath>
    142     </Reference>
    143     <Reference Include="HeuristicLab.Persistence-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    144       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Persistence-3.3.dll</HintPath>
    145     </Reference>
    146     <Reference Include="HeuristicLab.PluginInfrastructure-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    147       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.PluginInfrastructure-3.3.dll</HintPath>
    148     </Reference>
    149     <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Classification-3.4">
    150       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Problems.DataAnalysis.Symbolic.Classification-3.4.dll</HintPath>
    151     </Reference>
    152     <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4">
    153       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4.dll</HintPath>
    154     </Reference>
    155     <Reference Include="HeuristicLab.Problems.Instances-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    156       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances-3.3.dll</HintPath>
    157     </Reference>
    158     <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    159       <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Random-3.3.dll</HintPath>
    160     </Reference>
    161     <Reference Include="LibSVM-1.6.3, Version=1.6.3.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    162       <HintPath>..\..\..\..\trunk\sources\bin\LibSVM-1.6.3.dll</HintPath>
     107    <Reference Include="HeuristicLab.Algorithms.GradientDescent-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     108    <Reference Include="HeuristicLab.Analysis-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     109    <Reference Include="HeuristicLab.Collections-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     110    <Reference Include="HeuristicLab.Common-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     111    <Reference Include="HeuristicLab.Common.Resources-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     112    <Reference Include="HeuristicLab.Core-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     113    <Reference Include="HeuristicLab.Data-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     114    <Reference Include="HeuristicLab.Encodings.RealVectorEncoding-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     115    <Reference Include="HeuristicLab.Operators-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     116    <Reference Include="HeuristicLab.Optimization-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     117    <Reference Include="HeuristicLab.Parameters-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     118    <Reference Include="HeuristicLab.Persistence-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     119    <Reference Include="HeuristicLab.PluginInfrastructure-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     120    <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Classification-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     121    <Reference Include="HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     122    <Reference Include="HeuristicLab.Problems.Instances-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     123    <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     124    <Reference Include="LibSVM-3.12, Version=3.12.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     125      <HintPath>..\..\..\..\trunk\sources\bin\LibSVM-3.12.dll</HintPath>
    163126      <Private>False</Private>
    164127    </Reference>
     
    174137    </Compile>
    175138    <Compile Include="FixedDataAnalysisAlgorithm.cs" />
    176     <Compile Include="GaussianProcess\CovarianceRQiso.cs" />
     139    <Compile Include="GaussianProcess\CovarianceMask.cs" />
     140    <Compile Include="GaussianProcess\GaussianProcessClassificationSolutionCreator.cs" />
     141    <Compile Include="GaussianProcess\GaussianProcessClassificationModelCreator.cs" />
     142    <Compile Include="GaussianProcess\GaussianProcessClassification.cs" />
     143    <Compile Include="GaussianProcess\CovarianceProduct.cs" />
     144    <Compile Include="GaussianProcess\CovarianceScale.cs" />
     145    <Compile Include="GaussianProcess\CovarianceRationalQuadraticArd.cs" />
     146    <Compile Include="GaussianProcess\CovarianceRationalQuadraticIso.cs" />
     147    <Compile Include="GaussianProcess\CovarianceSquaredExponentialArd.cs" />
     148    <Compile Include="GaussianProcess\CovarianceSquaredExponentialIso.cs" />
     149    <Compile Include="GaussianProcess\HyperParameter.cs" />
     150    <Compile Include="GaussianProcess\CovarianceMaternIso.cs" />
     151    <Compile Include="GaussianProcess\CovarianceLinearArd.cs" />
    177152    <Compile Include="GaussianProcess\CovarianceNoise.cs" />
    178153    <Compile Include="GaussianProcess\CovarianceConst.cs" />
    179     <Compile Include="GaussianProcess\MeanProd.cs" />
     154    <Compile Include="GaussianProcess\MeanProduct.cs" />
    180155    <Compile Include="GaussianProcess\MeanSum.cs" />
    181     <Compile Include="GaussianProcess\CovarianceProd.cs" />
    182156    <Compile Include="GaussianProcess\CovarianceSum.cs" />
    183157    <Compile Include="GaussianProcess\CovariancePeriodic.cs" />
     
    194168    <Compile Include="GaussianProcess\MeanConst.cs" />
    195169    <Compile Include="GaussianProcess\IMeanFunction.cs" />
    196     <Compile Include="GaussianProcess\CovarianceSEard.cs" />
    197     <Compile Include="GaussianProcess\CovarianceSEiso.cs" />
    198170    <Compile Include="GaussianProcess\GaussianProcessModel.cs" />
    199171    <Compile Include="GaussianProcess\GaussianProcessRegression.cs" />
     
    295267      <Name>HeuristicLab.Problems.DataAnalysis-3.4</Name>
    296268    </ProjectReference>
    297     <ProjectReference Include="..\..\HeuristicLab.Random\3.3\HeuristicLab.Random-3.3.csproj">
    298       <Project>{F4539FB6-4708-40C9-BE64-0A1390AEA197}</Project>
    299       <Name>HeuristicLab.Random-3.3</Name>
    300       <Private>False</Private>
    301     </ProjectReference>
    302269  </ItemGroup>
    303270  <ItemGroup>
     
    332299  -->
    333300  <PropertyGroup>
    334     <PreBuildEvent>set Path=%25Path%25;$(ProjectDir);$(SolutionDir)
     301    <PreBuildEvent Condition=" '$(OS)' == 'Windows_NT' ">set Path=%25Path%25;$(ProjectDir);$(SolutionDir)
    335302set ProjectDir=$(ProjectDir)
    336303set SolutionDir=$(SolutionDir)
     
    339306call PreBuildEvent.cmd
    340307</PreBuildEvent>
     308    <PreBuildEvent Condition=" '$(OS)' != 'Windows_NT' ">
     309export ProjectDir=$(ProjectDir)
     310export SolutionDir=$(SolutionDir)
     311
     312$SolutionDir/PreBuildEvent.sh
     313</PreBuildEvent>
    341314  </PropertyGroup>
    342315</Project>
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs

    r8477 r8742  
    2929  public interface IGaussianProcessModel : IRegressionModel {
    3030    double NegativeLogLikelihood { get; }
     31    double SigmaNoise { get; }
    3132    IMeanFunction MeanFunction { get; }
    3233    ICovarianceFunction CovarianceFunction { get; }
    33     double[] GetHyperparameterGradients();
     34    double[] HyperparameterGradients { get; }
    3435
    3536    IEnumerable<double> GetEstimatedVariance(Dataset ds, IEnumerable<int> rows);
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/ISupportVectorMachineModel.cs

    r7259 r8742  
    2020#endregion
    2121
    22 using HeuristicLab.Optimization;
    2322using HeuristicLab.Problems.DataAnalysis;
    24 using HeuristicLab.Core;
    25 using System.Collections.Generic;
     23using LibSVM;
    2624
    2725namespace HeuristicLab.Algorithms.DataAnalysis {
     
    3028  /// </summary>
    3129  public interface ISupportVectorMachineModel : IDataAnalysisModel, IRegressionModel, IClassificationModel {
    32     SVM.Model Model { get; }
    33     SVM.RangeTransform RangeTransform { get; }
     30    svm_model Model { get; }
     31    RangeTransform RangeTransform { get; }
    3432    Dataset SupportVectors { get; }
    3533  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearDiscriminantAnalysis.cs

    r8430 r8742  
    111111      IClassificationProblemData problemData,
    112112      IEnumerable<int> rows) {
    113       return new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter);
     113      var model = new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter, new AccuracyMaximizationThresholdCalculator());
     114      model.RecalculateModelParameters(problemData, rows);
     115      return model;
    114116    }
    115117  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitClassificationSolution.cs

    r7259 r8742  
    4545    public MultinomialLogitClassificationSolution(IClassificationProblemData problemData, MultinomialLogitModel logitModel)
    4646      : base(logitModel, problemData) {
    47       RecalculateResults();
    4847    }
    4948
     
    5150      return new MultinomialLogitClassificationSolution(this, cloner);
    5251    }
    53 
    54     protected override void RecalculateResults() {
    55       CalculateResults();
    56     }
    5752  }
    5853}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitModel.cs

    r7259 r8742  
    109109
    110110    public MultinomialLogitClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    111       return new MultinomialLogitClassificationSolution(problemData, this);
     111      return new MultinomialLogitClassificationSolution(new ClassificationProblemData(problemData), this);
    112112    }
    113113    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Matrix.cs

    r8471 r8742  
    7070
    7171    public Matrix Transpose() {
    72       var result = new Matrix(Transpose(values, Columns, Rows), Columns, Rows);
    73       return result;
     72      return new Matrix(Transpose(values, Columns, Rows), Columns, Rows);
    7473    }
    7574
     
    125124    }
    126125
    127     public double VectorLength() {
    128       return Math.Sqrt(SquaredVectorLength());
    129     }
    130 
    131     public double SquaredVectorLength() {
    132       if (Rows != 1) throw new ArgumentException("Length only works on vectors.");
     126    public double EuclideanNorm() {
     127      return Math.Sqrt(SumOfSquares());
     128    }
     129
     130    public double SumOfSquares() {
    133131      return values.Sum(x => x * x);
    134132    }
     
    137135      if (Rows != 1 || other.Rows != 1) throw new ArgumentException("OuterProduct can only be applied to vectors.");
    138136      return Transpose().Multiply(other);
     137    }
     138
     139    public IEnumerable<double> ColumnSums() {
     140      return Transpose().RowSums();
     141    }
     142
     143    public IEnumerable<double> RowSums() {
     144      var sum = 0.0;
     145      int counter = 0;
     146      foreach (var v in values) {
     147        sum += v;
     148        counter++;
     149        if (counter == Rows) {
     150          yield return sum;
     151          sum = 0.0;
     152          counter = 0;
     153        }
     154      }
    139155    }
    140156
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs

    r8471 r8742  
    3535
    3636namespace HeuristicLab.Algorithms.DataAnalysis {
    37   internal delegate void Reporter(double quality, double[] coefficients);
     37  internal delegate void Reporter(double quality, double[] coefficients, double[] gradients);
    3838  /// <summary>
    3939  /// Neighborhood Components Analysis
    4040  /// </summary>
    41   [Item("Neighborhood Components Analysis (NCA)", "Implementation of Neighborhood Components Analysis based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520.")]
     41  [Item("Neighborhood Components Analysis (NCA)", @"Implementation of Neighborhood Components Analysis
     42based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005.
     43Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520
     44with additional regularizations described in Z. Yang, J. Laaksonen. 2007.
     45Regularized Neighborhood Component Analysis. Lecture Notes in Computer Science, 4522. pp. 253-262.")]
    4246  [Creatable("Data Analysis")]
    4347  [StorableClass]
     
    5963      get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; }
    6064    }
     65    public IFixedValueParameter<DoubleValue> RegularizationParameter {
     66      get { return (IFixedValueParameter<DoubleValue>)Parameters["Regularization"]; }
     67    }
    6168    #endregion
    6269
    6370    #region Properties
    64     private int K {
     71    public int K {
    6572      get { return KParameter.Value.Value; }
    6673      set { KParameter.Value.Value = value; }
    6774    }
    68     private int Dimensions {
     75    public int Dimensions {
    6976      get { return DimensionsParameter.Value.Value; }
    7077      set { DimensionsParameter.Value.Value = value; }
    7178    }
    72     private int NeighborSamples {
     79    public int NeighborSamples {
    7380      get { return NeighborSamplesParameter.Value.Value; }
    7481      set { NeighborSamplesParameter.Value.Value = value; }
    7582    }
    76     private int Iterations {
     83    public int Iterations {
    7784      get { return IterationsParameter.Value.Value; }
    7885      set { IterationsParameter.Value.Value = value; }
     86    }
     87    public double Regularization {
     88      get { return RegularizationParameter.Value.Value; }
     89      set { RegularizationParameter.Value.Value = value; }
    7990    }
    8091    #endregion
     
    8596    public NcaAlgorithm()
    8697      : base() {
    87       Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(1)));
     98      Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(3)));
    8899      Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2)));
    89100      Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate."));
    90       Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(50)));
    91       Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(20)));
     101      Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(60)));
     102      Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50)));
     103      Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
    92104
    93105      INCAInitializer defaultInitializer = null;
     
    105117    }
    106118
     119    [StorableHook(HookType.AfterDeserialization)]
     120    private void AfterDeserialization() {
     121      if (!Parameters.ContainsKey("Regularization")) {
     122        Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0)));
     123      }
     124    }
     125
    107126    public override void Prepare() {
    108127      if (Problem != null) base.Prepare();
     
    113132
    114133      var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone();
    115       var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
    116       Results.Add(new Result("ClassificationSolution", "The classification solution.", model.CreateClassificationSolution(clonedProblem)));
    117     }
    118 
    119     public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
     134      var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Regularization, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None);
     135      var solution = model.CreateClassificationSolution(clonedProblem);
     136      if (!Results.ContainsKey("ClassificationSolution"))
     137        Results.Add(new Result("ClassificationSolution", "The classification solution.", solution));
     138      else Results["ClassificationSolution"].Value = solution;
     139    }
     140
     141    public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
    120142      var clonedProblem = (IClassificationProblemData)data.Clone();
    121       var model = Train(clonedProblem, k, dimensions, neighborSamples, iterations, initializer);
     143      var model = Train(clonedProblem, k, dimensions, neighborSamples, regularization, iterations, initializer);
    122144      return model.CreateClassificationSolution(clonedProblem);
    123145    }
    124146
    125     public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {
    126       return Train(problemData, k, dimensions, neighborSamples, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
    127     }
    128 
    129     public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, int iterations, double[,] initalMatrix) {
     147    public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) {
     148      return Train(problemData, k, dimensions, neighborSamples, regularization, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);
     149    }
     150
     151    public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, double regularization, int iterations, double[,] initalMatrix) {
    130152      var matrix = new double[initalMatrix.Length];
    131153      for (int i = 0; i < initalMatrix.GetLength(0); i++)
    132154        for (int j = 0; j < initalMatrix.GetLength(1); j++)
    133155          matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j];
    134       return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, iterations, matrix, null, CancellationToken.None);
    135     }
    136 
    137     private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
     156      return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, regularization, iterations, matrix, null, CancellationToken.None);
     157    }
     158
     159    private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {
    138160      var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices);
    139161      var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling);
     
    146168      alglib.mincgsetcond(state, 0, 0, 0, iterations);
    147169      alglib.mincgsetxrep(state, true);
     170      //alglib.mincgsetgradientcheck(state, 0.01);
    148171      int neighborSampleSize = neighborSamples;
    149       Optimize(state, scaledData, classes, dimensions, neighborSampleSize, cancellation, reporter);
     172      Optimize(state, scaledData, classes, dimensions, neighborSampleSize, regularization, cancellation, reporter);
    150173      alglib.mincgresults(state, out matrix, out rep);
     174      if (rep.terminationtype == -7) throw new InvalidOperationException("Gradient verification failed.");
    151175
    152176      var transformationMatrix = new double[attributes, dimensions];
     
    159183    }
    160184
    161     private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {
     185    private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda, CancellationToken cancellation, Reporter reporter) {
    162186      while (alglib.mincgiteration(state)) {
    163187        if (cancellation.IsCancellationRequested) break;
    164188        if (state.needfg) {
    165           Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize);
     189          Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize, lambda);
    166190          continue;
    167191        }
    168192        if (state.innerobj.xupdated) {
    169193          if (reporter != null)
    170             reporter(state.innerobj.f, state.innerobj.x);
     194            reporter(state.innerobj.f, state.innerobj.x, state.innerobj.g);
    171195          continue;
    172196        }
     
    175199    }
    176200
    177     private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize) {
     201    private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda) {
    178202      var instances = data.GetLength(0);
    179203      var attributes = data.GetLength(1);
     
    192216          }
    193217          var kVector = new Matrix(GetRow(data, k));
    194           transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SquaredVectorLength());
    195         }
    196         var sample = transformedDistances.OrderByDescending(x => x.Value).Take(neighborSampleSize).ToArray();
    197         var normalization = sample.Sum(x => x.Value);
    198         if (normalization > 0) {
    199           foreach (var s in sample) {
    200             if (s.Value <= 0) break;
    201             alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
    202           }
     218          transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SumOfSquares());
     219        }
     220        var normalization = transformedDistances.Sum(x => x.Value);
     221        if (normalization <= 0) continue;
     222        foreach (var s in transformedDistances.Where(x => x.Value > 0).OrderByDescending(x => x.Value).Take(neighborSampleSize)) {
     223          alglib.sparseset(probabilities, i, s.Key, s.Value / normalization);
    203224        }
    204225      }
     
    224245      }
    225246
    226       func = -pi.Sum();
     247      func = -pi.Sum() + lambda * AMatrix.SumOfSquares();
    227248
    228249      r = 0;
    229250      var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();
    230251      foreach (var g in newGrad) {
    231         grad[r++] = g;
    232       }
    233     }
    234 
    235     private void ReportQuality(double func, double[] coefficients) {
     252        grad[r] = g + lambda * 2 * A[r];
     253        r++;
     254      }
     255    }
     256
     257    private void ReportQuality(double func, double[] coefficients, double[] gradients) {
    236258      var instances = Problem.ProblemData.TrainingIndices.Count();
    237259      DataTable qualities;
     
    243265      qualities.Rows["Quality"].Values.Add(-func / instances);
    244266
     267      string[] attributNames = Problem.ProblemData.AllowedInputVariables.ToArray();
     268      if (gradients != null) {
     269        DataTable grads;
     270        if (!Results.ContainsKey("Gradients")) {
     271          grads = new DataTable("Gradients");
     272          for (int i = 0; i < gradients.Length; i++)
     273            grads.Rows.Add(new DataRow(attributNames[i / Dimensions] + "-" + (i % Dimensions), string.Empty));
     274          Results.Add(new Result("Gradients", grads));
     275        } else grads = (DataTable)Results["Gradients"].Value;
     276        for (int i = 0; i < gradients.Length; i++)
     277          grads.Rows[attributNames[i / Dimensions] + "-" + (i % Dimensions)].Values.Add(gradients[i]);
     278      }
     279
    245280      if (!Results.ContainsKey("Quality")) {
    246281        Results.Add(new Result("Quality", new DoubleValue(-func / instances)));
    247282      } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances;
     283
     284      var attributes = attributNames.Length;
     285      var transformationMatrix = new double[attributes, Dimensions];
     286      var counter = 0;
     287      for (var i = 0; i < attributes; i++)
     288        for (var j = 0; j < Dimensions; j++)
     289          transformationMatrix[i, j] = coefficients[counter++];
     290
     291      var scaling = new Scaling(Problem.ProblemData.Dataset, attributNames, Problem.ProblemData.TrainingIndices);
     292      var model = new NcaModel(K, transformationMatrix, Problem.ProblemData.Dataset, Problem.ProblemData.TrainingIndices, Problem.ProblemData.TargetVariable, attributNames, scaling, Problem.ProblemData.ClassValues.ToArray());
     293
     294      IClassificationSolution solution = model.CreateClassificationSolution(Problem.ProblemData);
     295      if (!Results.ContainsKey("ClassificationSolution")) {
     296        Results.Add(new Result("ClassificationSolution", solution));
     297      } else {
     298        Results["ClassificationSolution"].Value = solution;
     299      }
    248300    }
    249301
     
    252304        yield return data[row, i];
    253305    }
     306
    254307  }
    255308}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaClassificationSolution.cs

    r8471 r8742  
    4242    public NcaClassificationSolution(IClassificationProblemData problemData, INcaModel ncaModel)
    4343      : base(ncaModel, problemData) {
    44       RecalculateResults();
    4544    }
    4645
     
    4847      return new NcaClassificationSolution(this, cloner);
    4948    }
    50 
    51     protected override void RecalculateResults() {
    52       CalculateResults();
    53     }
    5449  }
    5550}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaModel.cs

    r8471 r8742  
    8282
    8383    public INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    84       return new NcaClassificationSolution(problemData, this);
     84      return new NcaClassificationSolution(new ClassificationProblemData(problemData), this);
    8585    }
    8686
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassificationSolution.cs

    r7259 r8742  
    4545    public NearestNeighbourClassificationSolution(IClassificationProblemData problemData, INearestNeighbourModel nnModel)
    4646      : base(nnModel, problemData) {
    47       RecalculateResults();
    4847    }
    4948
     
    5150      return new NearestNeighbourClassificationSolution(this, cloner);
    5251    }
    53 
    54     protected override void RecalculateResults() {
    55       CalculateResults();
    56     }
    5752  }
    5853}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs

    r8477 r8742  
    202202
    203203    public INearestNeighbourRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    204       return new NearestNeighbourRegressionSolution(problemData, this);
     204      return new NearestNeighbourRegressionSolution(new RegressionProblemData(problemData), this);
    205205    }
    206206    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    208208    }
    209209    public INearestNeighbourClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    210       return new NearestNeighbourClassificationSolution(problemData, this);
     210      return new NearestNeighbourClassificationSolution(new ClassificationProblemData(problemData), this);
    211211    }
    212212    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassificationSolution.cs

    r7259 r8742  
    4545    public NeuralNetworkClassificationSolution(IClassificationProblemData problemData, INeuralNetworkModel nnModel)
    4646      : base(nnModel, problemData) {
    47       RecalculateResults();
    4847    }
    4948
     
    5150      return new NeuralNetworkClassificationSolution(this, cloner);
    5251    }
    53     protected override void RecalculateResults() {
    54       CalculateResults();
    55     }
     52
    5653  }
    5754}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassificationSolution.cs

    r7259 r8742  
    4545    public NeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, INeuralNetworkEnsembleModel nnModel)
    4646      : base(nnModel, problemData) {
    47       RecalculateResults();
    4847    }
    4948
     
    5150      return new NeuralNetworkEnsembleClassificationSolution(this, cloner);
    5251    }
    53 
    54     protected override void RecalculateResults() {
    55       CalculateResults();
    56     }
    5752  }
    5853}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs

    r7694 r8742  
    130130
    131131    public INeuralNetworkEnsembleRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    132       return new NeuralNetworkEnsembleRegressionSolution(problemData, this);
     132      return new NeuralNetworkEnsembleRegressionSolution(new RegressionEnsembleProblemData(problemData), this);
    133133    }
    134134    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    136136    }
    137137    public INeuralNetworkEnsembleClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    138       return new NeuralNetworkEnsembleClassificationSolution(problemData, this);
     138      return new NeuralNetworkEnsembleClassificationSolution(new ClassificationEnsembleProblemData(problemData), this);
    139139    }
    140140    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs

    r7259 r8742  
    138138
    139139    public INeuralNetworkRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    140       return new NeuralNetworkRegressionSolution(problemData, this);
     140      return new NeuralNetworkRegressionSolution(new RegressionProblemData(problemData), this);
    141141    }
    142142    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    144144    }
    145145    public INeuralNetworkClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    146       return new NeuralNetworkClassificationSolution(problemData, this);
     146      return new NeuralNetworkClassificationSolution(new ClassificationProblemData(problemData), this);
    147147    }
    148148    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Plugin.cs.frame

    r8477 r8742  
    4646  [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Classification", "3.4")]
    4747  [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Regression", "3.4")]
    48   [PluginDependency("HeuristicLab.LibSVM", "1.6.3")]
     48  [PluginDependency("HeuristicLab.LibSVM", "3.12")]
    4949  [PluginDependency("HeuristicLab.Random", "3.3")]
    5050  public class HeuristicLabAlgorithmsDataAnalysisPlugin : PluginBase {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestClassificationSolution.cs

    r7259 r8742  
    4545    public RandomForestClassificationSolution(IClassificationProblemData problemData, IRandomForestModel randomForestModel)
    4646      : base(randomForestModel, problemData) {
    47       RecalculateResults();
    4847    }
    4948
     
    5150      return new RandomForestClassificationSolution(this, cloner);
    5251    }
    53 
    54     protected override void RecalculateResults() {
    55       CalculateResults();
    56     }
    5752  }
    5853}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs

    r7259 r8742  
    132132
    133133    public IRandomForestRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    134       return new RandomForestRegressionSolution(problemData, this);
     134      return new RandomForestRegressionSolution(new RegressionProblemData(problemData), this);
    135135    }
    136136    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    138138    }
    139139    public IRandomForestClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    140       return new RandomForestClassificationSolution(problemData, this);
     140      return new RandomForestClassificationSolution(new ClassificationProblemData(problemData), this);
    141141    }
    142142    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassification.cs

    r8430 r8742  
    3030using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3131using HeuristicLab.Problems.DataAnalysis;
     32using LibSVM;
    3233
    3334namespace HeuristicLab.Algorithms.DataAnalysis {
     
    4445    private const string NuParameterName = "Nu";
    4546    private const string GammaParameterName = "Gamma";
     47    private const string DegreeParameterName = "Degree";
    4648
    4749    #region parameter properties
     
    6062    public IValueParameter<DoubleValue> GammaParameter {
    6163      get { return (IValueParameter<DoubleValue>)Parameters[GammaParameterName]; }
     64    }
     65    public IValueParameter<IntValue> DegreeParameter {
     66      get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; }
    6267    }
    6368    #endregion
     
    7984    public DoubleValue Gamma {
    8085      get { return GammaParameter.Value; }
     86    }
     87    public IntValue Degree {
     88      get { return DegreeParameter.Value; }
    8189    }
    8290    #endregion
     
    103111      Parameters.Add(new ValueParameter<DoubleValue>(CostParameterName, "The value of the C (cost) parameter of C-SVC.", new DoubleValue(1.0)));
    104112      Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0)));
     113      Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
    105114    }
    106115    [StorableHook(HookType.AfterDeserialization)]
    107     private void AfterDeserialization() { }
     116    private void AfterDeserialization() {
     117      #region backwards compatibility (change with 3.4)
     118      if (!Parameters.ContainsKey(DegreeParameterName))
     119        Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     120      #endregion
     121    }
    108122
    109123    public override IDeepCloneable Clone(Cloner cloner) {
     
    118132      int nSv;
    119133      var solution = CreateSupportVectorClassificationSolution(problemData, selectedInputVariables,
    120         SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value,
     134        SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Degree.Value,
    121135        out trainingAccuracy, out testAccuracy, out nSv);
    122136
    123137      Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", solution));
    124138      Results.Add(new Result("Training accuracy", "The accuracy of the SVR solution on the training partition.", new DoubleValue(trainingAccuracy)));
    125       Results.Add(new Result("Test ", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy)));
     139      Results.Add(new Result("Test accuracy", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy)));
    126140      Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv)));
    127141    }
    128142
    129143    public static SupportVectorClassificationSolution CreateSupportVectorClassificationSolution(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables,
    130       string svmType, string kernelType, double cost, double nu, double gamma,
     144      string svmType, string kernelType, double cost, double nu, double gamma, int degree,
    131145      out double trainingAccuracy, out double testAccuracy, out int nSv) {
    132146      Dataset dataset = problemData.Dataset;
     
    135149
    136150      //extract SVM parameters from scope and set them
    137       SVM.Parameter parameter = new SVM.Parameter();
    138       parameter.SvmType = (SVM.SvmType)Enum.Parse(typeof(SVM.SvmType), svmType, true);
    139       parameter.KernelType = (SVM.KernelType)Enum.Parse(typeof(SVM.KernelType), kernelType, true);
     151      svm_parameter parameter = new svm_parameter();
     152      parameter.svm_type = GetSvmType(svmType);
     153      parameter.kernel_type = GetKernelType(kernelType);
    140154      parameter.C = cost;
    141       parameter.Nu = nu;
    142       parameter.Gamma = gamma;
    143       parameter.CacheSize = 500;
    144       parameter.Probability = false;
    145 
     155      parameter.nu = nu;
     156      parameter.gamma = gamma;
     157      parameter.cache_size = 500;
     158      parameter.probability = 0;
     159      parameter.eps = 0.001;
     160      parameter.degree = degree;
     161      parameter.shrinking = 1;
     162      parameter.coef0 = 0;
     163
     164
     165      var weightLabels = new List<int>();
     166      var weights = new List<double>();
    146167      foreach (double c in problemData.ClassValues) {
    147168        double wSum = 0.0;
     
    151172          }
    152173        }
    153         parameter.Weights.Add((int)c, wSum);
     174        weightLabels.Add((int)c);
     175        weights.Add(wSum);
    154176      }
    155 
    156 
    157       SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
    158       SVM.RangeTransform rangeTransform = SVM.RangeTransform.Compute(problem);
    159       SVM.Problem scaledProblem = SVM.Scaling.Scale(rangeTransform, problem);
    160       var svmModel = SVM.Training.Train(scaledProblem, parameter);
     177      parameter.weight_label = weightLabels.ToArray();
     178      parameter.weight = weights.ToArray();
     179
     180
     181      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     182      RangeTransform rangeTransform = RangeTransform.Compute(problem);
     183      svm_problem scaledProblem = rangeTransform.Scale(problem);
     184      var svmModel = svm.svm_train(scaledProblem, parameter);
    161185      var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues);
    162186      var solution = new SupportVectorClassificationSolution(model, (IClassificationProblemData)problemData.Clone());
    163187
    164       nSv = svmModel.SupportVectorCount;
     188      nSv = svmModel.SV.Length;
    165189      trainingAccuracy = solution.TrainingAccuracy;
    166190      testAccuracy = solution.TestAccuracy;
    167191
    168192      return solution;
     193    }
     194
     195    private static int GetSvmType(string svmType) {
     196      if (svmType == "NU_SVC") return svm_parameter.NU_SVC;
     197      if (svmType == "C_SVC") return svm_parameter.C_SVC;
     198      throw new ArgumentException("Unknown SVM type");
     199    }
     200
     201    private static int GetKernelType(string kernelType) {
     202      if (kernelType == "LINEAR") return svm_parameter.LINEAR;
     203      if (kernelType == "POLY") return svm_parameter.POLY;
     204      if (kernelType == "SIGMOID") return svm_parameter.SIGMOID;
     205      if (kernelType == "RBF") return svm_parameter.RBF;
     206      throw new ArgumentException("Unknown kernel type");
    169207    }
    170208    #endregion
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassificationSolution.cs

    r7259 r8742  
    4545    public SupportVectorClassificationSolution(SupportVectorMachineModel model, IClassificationProblemData problemData)
    4646      : base(model, problemData) {
    47       RecalculateResults();
    4847    }
    4948
     
    5150      return new SupportVectorClassificationSolution(this, cloner);
    5251    }
    53 
    54     protected override void RecalculateResults() {
    55       CalculateResults();
    56     }
    5752  }
    5853}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineModel.cs

    r8430 r8742  
    2929using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3030using HeuristicLab.Problems.DataAnalysis;
    31 using SVM;
     31using LibSVM;
    3232
    3333namespace HeuristicLab.Algorithms.DataAnalysis {
     
    3939  public sealed class SupportVectorMachineModel : NamedItem, ISupportVectorMachineModel {
    4040
    41     private SVM.Model model;
     41    private svm_model model;
    4242    /// <summary>
    4343    /// Gets or sets the SVM model.
    4444    /// </summary>
    45     public SVM.Model Model {
     45    public svm_model Model {
    4646      get { return model; }
    4747      set {
     
    5757    /// Gets or sets the range transformation for the model.
    5858    /// </summary>
    59     private SVM.RangeTransform rangeTransform;
    60     public SVM.RangeTransform RangeTransform {
     59    private RangeTransform rangeTransform;
     60    public RangeTransform RangeTransform {
    6161      get { return rangeTransform; }
    6262      set {
     
    7171    public Dataset SupportVectors {
    7272      get {
    73         var data = new double[Model.SupportVectorCount, allowedInputVariables.Count()];
    74         for (int i = 0; i < Model.SupportVectorCount; i++) {
    75           var sv = Model.SupportVectors[i];
     73        var data = new double[Model.sv_coef.Length, allowedInputVariables.Count()];
     74        for (int i = 0; i < Model.sv_coef.Length; i++) {
     75          var sv = Model.SV[i];
    7676          for (int j = 0; j < sv.Length; j++) {
    77             data[i, j] = sv[j].Value;
     77            data[i, j] = sv[j].value;
    7878          }
    7979        }
     
    101101        this.classValues = (double[])original.classValues.Clone();
    102102    }
    103     public SupportVectorMachineModel(SVM.Model model, SVM.RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<double> classValues)
     103    public SupportVectorMachineModel(svm_model model, RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<double> classValues)
    104104      : this(model, rangeTransform, targetVariable, allowedInputVariables) {
    105105      this.classValues = classValues.ToArray();
    106106    }
    107     public SupportVectorMachineModel(SVM.Model model, SVM.RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables)
     107    public SupportVectorMachineModel(svm_model model, RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables)
    108108      : base() {
    109109      this.name = ItemName;
     
    124124    }
    125125    public SupportVectorRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    126       return new SupportVectorRegressionSolution(this, problemData);
     126      return new SupportVectorRegressionSolution(this, new RegressionProblemData(problemData));
    127127    }
    128128    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    153153
    154154    public SupportVectorClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    155       return new SupportVectorClassificationSolution(this, problemData);
     155      return new SupportVectorClassificationSolution(this, new ClassificationProblemData(problemData));
    156156    }
    157157    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
     
    161161    private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) {
    162162      // calculate predictions for the currently requested rows
    163       SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
    164       SVM.Problem scaledProblem = SVM.Scaling.Scale(RangeTransform, problem);
    165 
    166       for (int i = 0; i < scaledProblem.Count; i++) {
    167         yield return SVM.Prediction.Predict(Model, scaledProblem.X[i]);
     163      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     164      svm_problem scaledProblem = rangeTransform.Scale(problem);
     165
     166      for (int i = 0; i < problem.l; i++) {
     167        yield return svm.svm_predict(Model, scaledProblem.x[i]);
    168168      }
    169169    }
     
    183183      get {
    184184        using (MemoryStream stream = new MemoryStream()) {
    185           SVM.Model.Write(stream, Model);
     185          svm.svm_save_model(new StreamWriter(stream), Model);
    186186          stream.Seek(0, System.IO.SeekOrigin.Begin);
    187187          StreamReader reader = new StreamReader(stream);
     
    191191      set {
    192192        using (MemoryStream stream = new MemoryStream(Encoding.ASCII.GetBytes(value))) {
    193           model = SVM.Model.Read(stream);
     193          model = svm.svm_load_model(new StreamReader(stream));
    194194        }
    195195      }
     
    199199      get {
    200200        using (MemoryStream stream = new MemoryStream()) {
    201           SVM.RangeTransform.Write(stream, RangeTransform);
     201          RangeTransform.Write(stream, RangeTransform);
    202202          stream.Seek(0, System.IO.SeekOrigin.Begin);
    203203          StreamReader reader = new StreamReader(stream);
     
    207207      set {
    208208        using (MemoryStream stream = new MemoryStream(Encoding.ASCII.GetBytes(value))) {
    209           RangeTransform = SVM.RangeTransform.Read(stream);
     209          RangeTransform = RangeTransform.Read(stream);
    210210        }
    211211      }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineUtil.cs

    r7259 r8742  
    2323using System.Linq;
    2424using HeuristicLab.Problems.DataAnalysis;
     25using LibSVM;
    2526
    2627namespace HeuristicLab.Algorithms.DataAnalysis {
     
    3233    /// <param name="rowIndices">The rows of the dataset that should be contained in the resulting SVM-problem</param>
    3334    /// <returns>A problem data type that can be used to train a support vector machine.</returns>
    34     public static SVM.Problem CreateSvmProblem(Dataset dataset, string targetVariable, IEnumerable<string> inputVariables, IEnumerable<int> rowIndices) {
     35    public static svm_problem CreateSvmProblem(Dataset dataset, string targetVariable, IEnumerable<string> inputVariables, IEnumerable<int> rowIndices) {
    3536      double[] targetVector =
    3637        dataset.GetDoubleValues(targetVariable, rowIndices).ToArray();
    3738
    38       SVM.Node[][] nodes = new SVM.Node[targetVector.Length][];
    39       List<SVM.Node> tempRow;
     39      svm_node[][] nodes = new svm_node[targetVector.Length][];
     40      List<svm_node> tempRow;
    4041      int maxNodeIndex = 0;
    4142      int svmProblemRowIndex = 0;
    4243      List<string> inputVariablesList = inputVariables.ToList();
    4344      foreach (int row in rowIndices) {
    44         tempRow = new List<SVM.Node>();
     45        tempRow = new List<svm_node>();
    4546        int colIndex = 1; // make sure the smallest node index for SVM = 1
    4647        foreach (var inputVariable in inputVariablesList) {
     
    4950          // => don't add NaN values in the dataset to the sparse SVM matrix representation
    5051          if (!double.IsNaN(value)) {
    51             tempRow.Add(new SVM.Node(colIndex, value)); // nodes must be sorted in ascending ordered by column index
     52            tempRow.Add(new svm_node() { index = colIndex, value = value }); // nodes must be sorted in ascending ordered by column index
    5253            if (colIndex > maxNodeIndex) maxNodeIndex = colIndex;
    5354          }
     
    5758      }
    5859
    59       return new SVM.Problem(targetVector.Length, targetVector, nodes, maxNodeIndex);
     60      return new svm_problem() { l = targetVector.Length, y = targetVector, x = nodes };
    6061    }
    6162  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorRegression.cs

    r8430 r8742  
    3030using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3131using HeuristicLab.Problems.DataAnalysis;
     32using LibSVM;
    3233
    3334namespace HeuristicLab.Algorithms.DataAnalysis {
     
    4546    private const string GammaParameterName = "Gamma";
    4647    private const string EpsilonParameterName = "Epsilon";
     48    private const string DegreeParameterName = "Degree";
    4749
    4850    #region parameter properties
     
    6466    public IValueParameter<DoubleValue> EpsilonParameter {
    6567      get { return (IValueParameter<DoubleValue>)Parameters[EpsilonParameterName]; }
     68    }
     69    public IValueParameter<IntValue> DegreeParameter {
     70      get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; }
    6671    }
    6772    #endregion
     
    8691    public DoubleValue Epsilon {
    8792      get { return EpsilonParameter.Value; }
     93    }
     94    public IntValue Degree {
     95      get { return DegreeParameter.Value; }
    8896    }
    8997    #endregion
     
    111119      Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0)));
    112120      Parameters.Add(new ValueParameter<DoubleValue>(EpsilonParameterName, "The value of the epsilon parameter for epsilon-SVR.", new DoubleValue(0.1)));
     121      Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
    113122    }
    114123    [StorableHook(HookType.AfterDeserialization)]
    115     private void AfterDeserialization() { }
     124    private void AfterDeserialization() {
     125      #region backwards compatibility (change with 3.4)
     126      if (!Parameters.ContainsKey(DegreeParameterName))
     127        Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     128      #endregion
     129    }
    116130
    117131    public override IDeepCloneable Clone(Cloner cloner) {
     
    126140      int nSv;
    127141      var solution = CreateSupportVectorRegressionSolution(problemData, selectedInputVariables, SvmType.Value,
    128         KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value,
     142        KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value,
    129143        out trainR2, out testR2, out nSv);
    130144
     
    136150
    137151    public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables,
    138       string svmType, string kernelType, double cost, double nu, double gamma, double epsilon,
     152      string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree,
    139153      out double trainingR2, out double testR2, out int nSv) {
    140154      Dataset dataset = problemData.Dataset;
     
    143157
    144158      //extract SVM parameters from scope and set them
    145       SVM.Parameter parameter = new SVM.Parameter();
    146       parameter.SvmType = (SVM.SvmType)Enum.Parse(typeof(SVM.SvmType), svmType, true);
    147       parameter.KernelType = (SVM.KernelType)Enum.Parse(typeof(SVM.KernelType), kernelType, true);
     159      svm_parameter parameter = new svm_parameter();
     160      parameter.svm_type = GetSvmType(svmType);
     161      parameter.kernel_type = GetKernelType(kernelType);
    148162      parameter.C = cost;
    149       parameter.Nu = nu;
    150       parameter.Gamma = gamma;
    151       parameter.P = epsilon;
    152       parameter.CacheSize = 500;
    153       parameter.Probability = false;
    154 
    155 
    156       SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
    157       SVM.RangeTransform rangeTransform = SVM.RangeTransform.Compute(problem);
    158       SVM.Problem scaledProblem = SVM.Scaling.Scale(rangeTransform, problem);
    159       var svmModel = SVM.Training.Train(scaledProblem, parameter);
    160       nSv = svmModel.SupportVectorCount;
     163      parameter.nu = nu;
     164      parameter.gamma = gamma;
     165      parameter.p = epsilon;
     166      parameter.cache_size = 500;
     167      parameter.probability = 0;
     168      parameter.eps = 0.001;
     169      parameter.degree = degree;
     170      parameter.shrinking = 1;
     171      parameter.coef0 = 0;
     172
     173
     174
     175      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     176      RangeTransform rangeTransform = RangeTransform.Compute(problem);
     177      svm_problem scaledProblem = rangeTransform.Scale(problem);
     178      var svmModel = svm.svm_train(scaledProblem, parameter);
     179      nSv = svmModel.SV.Length;
    161180      var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables);
    162181      var solution = new SupportVectorRegressionSolution(model, (IRegressionProblemData)problemData.Clone());
     
    165184      return solution;
    166185    }
     186
     187    private static int GetSvmType(string svmType) {
     188      if (svmType == "NU_SVR") return svm_parameter.NU_SVR;
     189      if (svmType == "EPSILON_SVR") return svm_parameter.EPSILON_SVR;
     190      throw new ArgumentException("Unknown SVM type");
     191    }
     192
     193    private static int GetKernelType(string kernelType) {
     194      if (kernelType == "LINEAR") return svm_parameter.LINEAR;
     195      if (kernelType == "POLY") return svm_parameter.POLY;
     196      if (kernelType == "SIGMOID") return svm_parameter.SIGMOID;
     197      if (kernelType == "RBF") return svm_parameter.RBF;
     198      throw new ArgumentException("Unknown kernel type");
     199    }
    167200    #endregion
    168201  }
Note: See TracChangeset for help on using the changeset viewer.