Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/13/12 16:18:37 (12 years ago)
Author:
mkommend
Message:

#1081:

  • Added autoregressive target variable Symbol
  • Merged trunk changes into the branch.
Location:
branches/HeuristicLab.TimeSeries
Files:
40 edited
9 copied

Legend:

Unmodified
Added
Removed
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis

  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceLinear.cs

    r8417 r8477  
    2929  [Item(Name = "CovarianceLinear", Description = "Linear covariance function for Gaussian processes.")]
    3030  public class CovarianceLinear : Item, ICovarianceFunction {
    31     private static readonly double[] emptyArray = new double[0];
    32 
    3331    [Storable]
    3432    private double[,] x;
     
    8987    }
    9088
    91     public double[] GetGradient(int i, int j) {
    92       return emptyArray;
     89    public double GetGradient(int i, int j, int k) {
     90      throw new NotSupportedException("CovarianceLinear does not have hyperparameters.");
    9391    }
    9492
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovariancePeriodic.cs

    r8417 r8477  
    3535    [Storable]
    3636    private double sf2;
     37    public double Scale { get { return sf2; } }
    3738    [Storable]
    3839    private double l;
     40    public double Length { get { return l; } }
    3941    [Storable]
    4042    private double p;
     43    public double Period { get { return p; } }
    4144
    4245    private bool symmetric;
     
    7477      this.p = Math.Exp(hyp[1]);
    7578      this.sf2 = Math.Exp(2 * hyp[2]);
    76 
    77       sf2 = Math.Min(10E6, sf2); // upper limit for the scale
     79      // sf2 = Math.Min(10E6, sf2); // upper limit for the scale
    7880
    7981      sd = null;
     
    102104    }
    103105
     106    public double GetGradient(int i, int j, int k) {
     107      double v = Math.PI * sd[i, j] / p;
     108      switch (k) {
     109        case 0: {
     110            double newK = Math.Sin(v) / l;
     111            newK = newK * newK;
     112            return 4 * sf2 * Math.Exp(-2 * newK) * newK;
     113          }
     114        case 1: {
     115            double r = Math.Sin(v) / l;
     116            return 4 * sf2 / l * Math.Exp(-2 * r * r) * r * Math.Cos(v) * v;
     117          }
     118        case 2: {
     119            double newK = Math.Sin(v) / l;
     120            newK = newK * newK;
     121            return 2 * sf2 * Math.Exp(-2 * newK);
    104122
    105     public double[] GetDiagonalCovariances() {
    106       if (x != xt) throw new InvalidOperationException();
    107       int rows = x.GetLength(0);
    108       var cov = new double[rows];
    109       for (int i = 0; i < rows; i++) {
    110         double k = Math.Sqrt(Util.SqrDist(Util.GetRow(x, i), Util.GetRow(xt, i)));
    111         k = Math.PI * k / p;
    112         k = Math.Sin(k) / l;
    113         k = k * k;
    114         cov[i] = sf2 * Math.Exp(-2.0 * k);
     123          }
     124        default: {
     125            throw new ArgumentException("CovariancePeriodic only has three hyperparameters.", "k");
     126          }
    115127      }
    116       return cov;
    117     }
    118 
    119     public double[] GetGradient(int i, int j) {
    120 
    121       var res = new double[3];
    122       double k = sd[i, j];
    123       k = Math.PI * k / p;
    124       {
    125         double newK = Math.Sin(k) / l;
    126         newK = newK * newK;
    127         res[0] = 4 * sf2 * Math.Exp(-2 * newK) * newK;
    128       }
    129       {
    130         double r = Math.Sin(k) / l;
    131         res[1] = 4 * sf2 / l * Math.Exp(-2 * r * r) * r * Math.Cos(k) * k;
    132       }
    133       {
    134         double newK = Math.Sin(k) / l;
    135         newK = newK * newK;
    136         res[2] = 2 * sf2 * Math.Exp(-2 * newK);
    137       }
    138 
    139       return res;
    140128    }
    141129
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceProd.cs

    r8416 r8477  
    2020#endregion
    2121
     22using System;
     23using System.Collections.Generic;
    2224using System.Linq;
    2325using HeuristicLab.Common;
     
    4850      this.factors = cloner.Clone(original.factors);
    4951      this.numberOfVariables = original.numberOfVariables;
     52      AttachEventHandlers();
    5053    }
    5154
     
    5356      : base() {
    5457      this.factors = new ItemList<ICovarianceFunction>();
     58      AttachEventHandlers();
     59    }
     60
     61    private void AttachEventHandlers() {
     62      this.factors.CollectionReset += (sender, args) => ClearCache();
     63      this.factors.ItemsAdded += (sender, args) => ClearCache();
     64      this.factors.ItemsRemoved += (sender, args) => ClearCache();
     65      this.factors.ItemsReplaced += (sender, args) => ClearCache();
     66      this.factors.ItemsMoved += (sender, args) => ClearCache();
    5567    }
    5668
     
    8698    }
    8799
    88     public double[] GetGradient(int i, int j) {
    89       return factors.Select(t => t.GetGradient(i, j)).SelectMany(seq => seq).ToArray();
     100    private Dictionary<int, Tuple<int, int>> cachedParameterMap;
     101    public double GetGradient(int i, int j, int k) {
     102      if (cachedParameterMap == null) {
     103        CalculateParameterMap();
     104      }
     105      int ti = cachedParameterMap[k].Item1;
     106      k = cachedParameterMap[k].Item2;
     107      double res = 1.0;
     108      for (int ii = 0; ii < factors.Count; ii++) {
     109        var f = factors[ii];
     110        if (ii == ti) {
     111          res *= f.GetGradient(i, j, k);
     112        } else {
     113          res *= f.GetCovariance(i, j);
     114        }
     115      }
     116      return res;
     117    }
     118
     119    private void ClearCache() {
     120      cachedParameterMap = null;
     121    }
     122
     123    private void CalculateParameterMap() {
     124      cachedParameterMap = new Dictionary<int, Tuple<int, int>>();
     125      int k = 0;
     126      for (int ti = 0; ti < factors.Count; ti++) {
     127        for (int ti_k = 0; ti_k < factors[ti].GetNumberOfParameters(numberOfVariables); ti_k++) {
     128          cachedParameterMap[k++] = Tuple.Create(ti, ti_k);
     129        }
     130      }
    90131    }
    91132  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEard.cs

    r8416 r8477  
    3636    [Storable]
    3737    private double sf2;
     38    public double Scale { get { return sf2; } }
     39
    3840    [Storable]
    3941    private double[] l;
     42    public double[] Length {
     43      get {
     44        if (l == null) return new double[0];
     45        var copy = new double[l.Length];
     46        Array.Copy(l, copy, copy.Length);
     47        return copy;
     48      }
     49    }
    4050
    4151    private double[,] sd;
     
    7686      this.l = hyp.Take(hyp.Length - 1).Select(Math.Exp).ToArray();
    7787      this.sf2 = Math.Exp(2 * hyp[hyp.Length - 1]);
    78       sf2 = Math.Min(10E6, sf2); // upper limit for the scale
     88      // sf2 = Math.Min(10E6, sf2); // upper limit for the scale
    7989
    8090      sd = null;
     
    99109    }
    100110
    101     public double[] GetGradient(int i, int j) {
    102       var res = new double[l.Length + 1];
    103       for (int k = 0; k < l.Length; k++) {
     111    public double GetGradient(int i, int j, int k) {
     112      if (k < l.Length) {
    104113        double sqrDist = Util.SqrDist(x[i, k] / l[k], xt[j, k] / l[k]);
    105 
    106         res[k] = sf2 * Math.Exp(-sd[i, j] / 2.0) * sqrDist;
     114        return sf2 * Math.Exp(-sd[i, j] / 2.0) * sqrDist;
     115      } else if (k == l.Length) {
     116        return 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0);
     117      } else {
     118        throw new ArgumentException("CovarianceSEard has dimension+1 hyperparameters.", "k");
    107119      }
    108       res[res.Length - 1] = 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0);
    109       return res;
    110120    }
    111121
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSEiso.cs

    r8416 r8477  
    3737    [Storable]
    3838    private double sf2;
     39    public double Scale { get { return sf2; } }
    3940    [Storable]
    4041    private double l;
     42    public double Length { get { return l; } }
    4143    [Storable]
    4244    private bool symmetric;
     
    8082    public void SetParameter(double[] hyp) {
    8183      this.l = Math.Exp(hyp[0]);
    82       this.sf2 = Math.Min(1E6, Math.Exp(2 * hyp[1])); // upper limit for scale
     84      this.sf2 = Math.Exp(2 * hyp[1]);
    8385      sd = null;
    8486    }
     
    101103    }
    102104
    103     public double[] GetGradient(int i, int j) {
    104       var res = new double[2];
    105       res[0] = sf2 * Math.Exp(-sd[i, j] / 2.0) * sd[i, j];
    106       res[1] = 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0);
    107       return res;
     105    public double GetGradient(int i, int j, int k) {
     106      switch (k) {
     107        case 0: return sf2 * Math.Exp(-sd[i, j] / 2.0) * sd[i, j];
     108        case 1: return 2.0 * sf2 * Math.Exp(-sd[i, j] / 2.0);
     109        default: throw new ArgumentException("CovarianceSEiso has two hyperparameters", "k");
     110      }
    108111    }
    109112
     
    113116      int cols = xt.GetLength(0);
    114117      sd = new double[rows, cols];
     118      double lInv = 1.0 / l;
    115119      if (symmetric) {
    116120        for (int i = 0; i < rows; i++) {
    117121          for (int j = i; j < rows; j++) {
    118             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));
     122            sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv));
    119123            sd[j, i] = sd[i, j];
    120124          }
     
    123127        for (int i = 0; i < rows; i++) {
    124128          for (int j = 0; j < cols; j++) {
    125             sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e / l), Util.GetRow(xt, j).Select(e => e / l));
     129            sd[i, j] = Util.SqrDist(Util.GetRow(x, i).Select(e => e * lInv), Util.GetRow(xt, j).Select(e => e * lInv));
    126130          }
    127131        }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/CovarianceSum.cs

    r8416 r8477  
    2020#endregion
    2121
     22using System;
     23using System.Collections.Generic;
    2224using System.Linq;
    2325using HeuristicLab.Common;
     
    4850      this.terms = cloner.Clone(original.terms);
    4951      this.numberOfVariables = original.numberOfVariables;
     52      AttachEventHandlers();
    5053    }
    5154
     
    5356      : base() {
    5457      this.terms = new ItemList<ICovarianceFunction>();
     58      AttachEventHandlers();
     59    }
     60
     61    private void AttachEventHandlers() {
     62      this.terms.CollectionReset += (sender, args) => ClearCache();
     63      this.terms.ItemsAdded += (sender, args) => ClearCache();
     64      this.terms.ItemsRemoved += (sender, args) => ClearCache();
     65      this.terms.ItemsReplaced += (sender, args) => ClearCache();
     66      this.terms.ItemsMoved += (sender, args) => ClearCache();
    5567    }
    5668
     
    8698    }
    8799
    88     public double[] GetGradient(int i, int j) {
    89       return terms.Select(t => t.GetGradient(i, j)).SelectMany(seq => seq).ToArray();
     100    private Dictionary<int, Tuple<int, int>> cachedParameterMap;
     101    public double GetGradient(int i, int j, int k) {
     102      if (cachedParameterMap == null) {
     103        CalculateParameterMap();
     104      }
     105      int ti = cachedParameterMap[k].Item1;
     106      k = cachedParameterMap[k].Item2;
     107      return terms[ti].GetGradient(i, j, k);
     108    }
     109    private void ClearCache() {
     110      cachedParameterMap = null;
     111    }
     112
     113    private void CalculateParameterMap() {
     114      cachedParameterMap = new Dictionary<int, Tuple<int, int>>();
     115      int k = 0;
     116      for (int ti = 0; ti < terms.Count; ti++) {
     117        for (int ti_k = 0; ti_k < terms[ti].GetNumberOfParameters(numberOfVariables); ti_k++) {
     118          cachedParameterMap[k++] = Tuple.Create(ti, ti_k);
     119        }
     120      }
    90121    }
    91122  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessHyperparameterInitializer.cs

    r8401 r8477  
    3838    private const string ProblemDataParameterName = "ProblemData";
    3939    private const string HyperparameterParameterName = "Hyperparameter";
     40    private const string RandomParameterName = "Random";
    4041
    4142    #region Parameter Properties
     
    4950    public ILookupParameter<IDataAnalysisProblemData> ProblemDataParameter {
    5051      get { return (ILookupParameter<IDataAnalysisProblemData>)Parameters[ProblemDataParameterName]; }
     52    }
     53    public ILookupParameter<IRandom> RandomParameter {
     54      get { return (ILookupParameter<IRandom>)Parameters[RandomParameterName]; }
    5155    }
    5256    // out
     
    7175      Parameters.Add(new LookupParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function for the Gaussian process model."));
    7276      Parameters.Add(new LookupParameter<IDataAnalysisProblemData>(ProblemDataParameterName, "The input data for the Gaussian process."));
     77      Parameters.Add(new LookupParameter<IRandom>(RandomParameterName, "The pseudo random number generator to use for initializing the hyperparameter vector."));
    7378      // out
    7479      Parameters.Add(new LookupParameter<RealVector>(HyperparameterParameterName, "The initial hyperparameter vector for the Gaussian process model."));
     
    8388      int l = 1 + MeanFunction.GetNumberOfParameters(inputVariablesCount) +
    8489              CovarianceFunction.GetNumberOfParameters(inputVariablesCount);
    85       HyperparameterParameter.ActualValue = new RealVector(l);
     90      var r = new RealVector(l);
     91      var rand = RandomParameter.ActualValue;
     92      for (int i = 0; i < r.Length; i++)
     93        r[i] = rand.NextDouble() * 2 - 1;
     94
     95      HyperparameterParameter.ActualValue = r;
    8696      return base.Apply();
    8797    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8416 r8477  
    7373    private double[,] x;
    7474    [Storable]
    75     private Scaling scaling;
     75    private Scaling inputScaling;
    7676
    7777
     
    8282      this.meanFunction = cloner.Clone(original.meanFunction);
    8383      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
    84       this.scaling = cloner.Clone(original.scaling);
     84      this.inputScaling = cloner.Clone(original.inputScaling);
    8585      this.negativeLogLikelihood = original.negativeLogLikelihood;
    8686      this.targetVariable = original.targetVariable;
     
    103103      this.allowedInputVariables = allowedInputVariables.ToArray();
    104104
    105       sqrSigmaNoise = Math.Exp(2.0 * hyp.First());
    106       sqrSigmaNoise = Math.Max(10E-6, sqrSigmaNoise); // lower limit for the noise level
    107105
    108106      int nVariables = this.allowedInputVariables.Length;
    109       this.meanFunction.SetParameter(hyp.Skip(1)
     107      this.meanFunction.SetParameter(hyp
    110108        .Take(this.meanFunction.GetNumberOfParameters(nVariables))
    111109        .ToArray());
    112       this.covarianceFunction.SetParameter(hyp.Skip(1 + this.meanFunction.GetNumberOfParameters(nVariables))
     110      this.covarianceFunction.SetParameter(hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables))
    113111        .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
    114112        .ToArray());
     113      sqrSigmaNoise = Math.Exp(2.0 * hyp.Last());
    115114
    116115      CalculateModel(ds, rows);
     
    118117
    119118    private void CalculateModel(Dataset ds, IEnumerable<int> rows) {
    120       scaling = new Scaling(ds, allowedInputVariables, rows);
    121       x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling);
    122 
    123       var y = ds.GetDoubleValues(targetVariable, rows).ToArray();
     119      inputScaling = new Scaling(ds, allowedInputVariables, rows);
     120      x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling);
     121      var y = ds.GetDoubleValues(targetVariable, rows);
    124122
    125123      int n = x.GetLength(0);
     
    132130      double[] m = meanFunction.GetMean(x);
    133131      for (int i = 0; i < n; i++) {
    134 
    135132        for (int j = i; j < n; j++) {
    136133          l[j, i] = covarianceFunction.GetCovariance(i, j) / sqrSigmaNoise;
     
    144141
    145142      var res = alglib.trfac.spdmatrixcholesky(ref l, n, false);
    146       if (!res) throw new InvalidOperationException("Matrix is not positive semidefinite");
     143      if (!res) throw new ArgumentException("Matrix is not positive semidefinite");
    147144
    148145      // calculate sum of diagonal elements for likelihood
     
    162159      int n = x.GetLength(0);
    163160      int nAllowedVariables = x.GetLength(1);
    164       double[,] q = new double[n, n];
    165       double[,] eye = new double[n, n];
    166       for (int i = 0; i < n; i++) eye[i, i] = 1.0;
    167161
    168162      int info;
    169       alglib.densesolverreport denseSolveRep;
    170 
    171       alglib.spdmatrixcholeskysolvem(l, n, false, eye, n, out info, out denseSolveRep, out q);
    172       // double[,] a2 = outerProd(alpha, alpha);
     163      alglib.matinvreport matInvRep;
     164      double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)];
     165      Array.Copy(l, lCopy, lCopy.Length);
     166
     167      alglib.spdmatrixcholeskyinverse(ref lCopy, n, false, out info, out matInvRep);
     168      if (info != 1) throw new ArgumentException("Can't invert matrix to calculate gradients.");
    173169      for (int i = 0; i < n; i++) {
    174         for (int j = 0; j < n; j++)
    175           q[i, j] = q[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; // a2[i, j];
    176       }
    177 
    178       double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => q[i, i]).Sum();
     170        for (int j = 0; j <= i; j++)
     171          lCopy[i, j] = lCopy[i, j] / sqrSigmaNoise - alpha[i] * alpha[j];
     172      }
     173
     174      double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => lCopy[i, i]).Sum();
    179175
    180176      double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
     
    187183      if (covGradients.Length > 0) {
    188184        for (int i = 0; i < n; i++) {
    189           for (int j = 0; j < n; j++) {
    190             var covDeriv = covarianceFunction.GetGradient(i, j);
    191             for (int k = 0; k < covGradients.Length; k++) {
    192               covGradients[k] += q[i, j] * covDeriv[k];
     185          for (int k = 0; k < covGradients.Length; k++) {
     186            for (int j = 0; j < i; j++) {
     187              covGradients[k] += lCopy[i, j] * covarianceFunction.GetGradient(i, j, k);
    193188            }
     189            covGradients[k] += 0.5 * lCopy[i, i] * covarianceFunction.GetGradient(i, i, k);
    194190          }
    195191        }
    196         covGradients = covGradients.Select(g => g / 2.0).ToArray();
    197       }
    198 
    199       return new double[] { noiseGradient }
    200         .Concat(meanGradients)
    201         .Concat(covGradients).ToArray();
     192      }
     193
     194      return
     195        meanGradients
     196        .Concat(covGradients)
     197        .Concat(new double[] { noiseGradient }).ToArray();
    202198    }
    203199
     
    220216
    221217    private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) {
    222       var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, scaling);
     218      var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
    223219      int newN = newX.GetLength(0);
    224220      int n = x.GetLength(0);
     
    230226      // var kss = new double[newN];
    231227      var Ks = new double[newN, n];
    232       double[,] sWKs = new double[n, newN];
     228      //double[,] sWKs = new double[n, newN];
    233229      // double[,] v;
    234230
     
    242238      var ms = meanFunction.GetMean(newX);
    243239      for (int i = 0; i < newN; i++) {
    244 
    245240        for (int j = 0; j < n; j++) {
    246241          Ks[i, j] = covarianceFunction.GetCovariance(j, i);
    247           sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise);
     242          //sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise);
    248243        }
    249244      }
     
    252247      // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v);
    253248
    254 
     249      return Enumerable.Range(0, newN)
     250        .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha));
     251      //for (int i = 0; i < newN; i++) {
     252      //  // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);
     253      //  // var sumV2 = prod(GetCol(v, i), GetCol(v, i));
     254      //  // predVar[i] = kss[i] - sumV2;
     255      //}
     256
     257    }
     258
     259    public IEnumerable<double> GetEstimatedVariance(Dataset dataset, IEnumerable<int> rows) {
     260      var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
     261      int newN = newX.GetLength(0);
     262      int n = x.GetLength(0);
     263
     264      var kss = new double[newN];
     265      double[,] sWKs = new double[n, newN];
     266
     267      // for stddev
     268      covarianceFunction.SetData(newX);
     269      for (int i = 0; i < newN; i++)
     270        kss[i] = covarianceFunction.GetCovariance(i, i);
     271
     272      covarianceFunction.SetData(x, newX);
    255273      for (int i = 0; i < newN; i++) {
    256         // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);
    257         yield return ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha);
    258         // var sumV2 = prod(GetCol(v, i), GetCol(v, i));
    259         // predVar[i] = kss[i] - sumV2;
    260       }
    261 
     274        for (int j = 0; j < n; j++) {
     275          sWKs[j, i] = covarianceFunction.GetCovariance(j, i) / Math.Sqrt(sqrSigmaNoise);
     276        }
     277      }
     278
     279      // for stddev
     280      int info;
     281      alglib.densesolverreport denseSolveRep;
     282      double[,] v;
     283
     284      alglib.rmatrixsolvem(l, n, sWKs, newN, false, out info, out denseSolveRep, out v);
     285
     286      for (int i = 0; i < newN; i++) {
     287        var sumV = Util.ScalarProd(Util.GetCol(v, i), Util.GetCol(v, i));
     288        kss[i] -= sumV;
     289        if (kss[i] < 0) kss[i] = 0;
     290      }
     291      return kss;
    262292    }
    263293  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs

    r8401 r8477  
    5555    private const string MinimizationIterationsParameterName = "Iterations";
    5656    private const string ApproximateGradientsParameterName = "ApproximateGradients";
     57    private const string SeedParameterName = "Seed";
     58    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
    5759
    5860    #region parameter properties
     
    6567    public IValueParameter<IntValue> MinimizationIterationsParameter {
    6668      get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }
     69    }
     70    public IValueParameter<IntValue> SeedParameter {
     71      get { return (IValueParameter<IntValue>)Parameters[SeedParameterName]; }
     72    }
     73    public IValueParameter<BoolValue> SetSeedRandomlyParameter {
     74      get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
    6775    }
    6876    #endregion
     
    8088      get { return MinimizationIterationsParameter.Value.Value; }
    8189    }
     90    public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } }
     91    public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } }
    8292    #endregion
     93
    8394    [StorableConstructor]
    8495    private GaussianProcessRegression(bool deserializing) : base(deserializing) { }
     
    97108
    98109      Parameters.Add(new ConstrainedValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.",
    99         new ItemSet<IMeanFunction>(meanFunctions), meanFunctions.First()));
     110        new ItemSet<IMeanFunction>(meanFunctions), meanFunctions.OfType<MeanConst>().First()));
    100111      Parameters.Add(new ConstrainedValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.",
    101         new ItemSet<ICovarianceFunction>(covFunctions), covFunctions.First()));
     112        new ItemSet<ICovarianceFunction>(covFunctions), covFunctions.OfType<CovarianceSEiso>().First()));
    102113      Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20)));
     114      Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
     115      Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
     116
    103117      Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false)));
    104118      Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed
    105119
     120      var randomCreator = new HeuristicLab.Random.RandomCreator();
    106121      var gpInitializer = new GaussianProcessHyperparameterInitializer();
    107122      var bfgsInitializer = new LbfgsInitializer();
     
    115130      var solutionCreator = new GaussianProcessRegressionSolutionCreator();
    116131
    117       OperatorGraph.InitialOperator = gpInitializer;
     132      OperatorGraph.InitialOperator = randomCreator;
     133      randomCreator.SeedParameter.ActualName = SeedParameterName;
     134      randomCreator.SeedParameter.Value = null;
     135      randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName;
     136      randomCreator.SetSeedRandomlyParameter.Value = null;
     137      randomCreator.Successor = gpInitializer;
    118138
    119139      gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
     
    121141      gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    122142      gpInitializer.HyperparameterParameter.ActualName = modelCreator.HyperparameterParameter.Name;
     143      gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name;
    123144      gpInitializer.Successor = bfgsInitializer;
    124145
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs

    r8401 r8477  
    2020#endregion
    2121
     22using System;
    2223using System.Linq;
    2324using HeuristicLab.Common;
     
    6061
    6162    public override IOperation Apply() {
    62       var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction);
    63       ModelParameter.ActualValue = model;
    64       NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood);
    65       HyperparameterGradientsParameter.ActualValue = new RealVector(model.GetHyperparameterGradients());
     63      try {
     64        var model = Create(ProblemData, Hyperparameter.ToArray(), MeanFunction, CovarianceFunction);
     65        ModelParameter.ActualValue = model;
     66        NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(model.NegativeLogLikelihood);
     67        HyperparameterGradientsParameter.ActualValue = new RealVector(model.GetHyperparameterGradients());
     68        return base.Apply();
     69      }
     70      catch (ArgumentException) { }
     71      catch (alglib.alglibexception) { }
     72      NegativeLogLikelihoodParameter.ActualValue = new DoubleValue(1E300);
     73      HyperparameterGradientsParameter.ActualValue = new RealVector(Hyperparameter.Count());
    6674      return base.Apply();
    6775    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolution.cs

    r8371 r8477  
    2020#endregion
    2121
     22using System.Collections.Generic;
     23using System.Linq;
    2224using HeuristicLab.Common;
    2325using HeuristicLab.Core;
     
    5153      return new GaussianProcessRegressionSolution(this, cloner);
    5254    }
     55
     56    public IEnumerable<double> EstimatedVariance {
     57      get { return GetEstimatedVariance(Enumerable.Range(0, ProblemData.Dataset.Rows)); }
     58    }
     59    public IEnumerable<double> EstimatedTrainingVariance {
     60      get { return GetEstimatedVariance(ProblemData.TrainingIndices); }
     61    }
     62    public IEnumerable<double> EstimatedTestVariance {
     63      get { return GetEstimatedVariance(ProblemData.TestIndices); }
     64    }
     65
     66    public IEnumerable<double> GetEstimatedVariance(IEnumerable<int> rows) {
     67      return Model.GetEstimatedVariance(ProblemData.Dataset, rows);
     68    }
    5369  }
    5470}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs

    r8416 r8477  
    7575
    7676    public override IOperation Apply() {
    77       var m = ModelParameter.ActualValue;
    78       var data = ProblemDataParameter.ActualValue;
     77      var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
     78      var data = (IRegressionProblemData)ProblemDataParameter.ActualValue.Clone();
    7979      var s = new GaussianProcessRegressionSolution(m, data);
    8080
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/ICovarianceFunction.cs

    r8416 r8477  
    3030
    3131    double GetCovariance(int i, int j);
    32     double[] GetGradient(int i, int j);
     32    double GetGradient(int i, int j, int k);
    3333  }
    3434}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanConst.cs

    r8416 r8477  
    3232    [Storable]
    3333    private double c;
     34    public double Value { get { return c; } }
     35
    3436    public int GetNumberOfParameters(int numberOfVariables) {
    3537      return 1;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/MeanLinear.cs

    r8416 r8477  
    3131    [Storable]
    3232    private double[] alpha;
     33    public double[] Weights {
     34      get {
     35        if (alpha == null) return new double[0];
     36        var copy = new double[alpha.Length];
     37        Array.Copy(alpha, copy, copy.Length);
     38        return copy;
     39      }
     40    }
    3341    public int GetNumberOfParameters(int numberOfVariables) {
    3442      return numberOfVariables;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/Util.cs

    r8401 r8477  
    2020#endregion
    2121
    22 using System;
    2322using System.Collections.Generic;
    2423using System.Linq;
     
    3231    public static double SqrDist(double x, double y) {
    3332      double d = x - y;
    34       return Math.Max(d * d, 0.0);
     33      return d * d;
    3534    }
    3635
    3736    public static double SqrDist(IEnumerable<double> x, IEnumerable<double> y) {
    38       return x.Zip(y, SqrDist).Sum();
     37      return x.Zip(y, (a, b) => (a - b) * (a - b)).Sum();
    3938    }
    4039
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r8430 r8477  
    101101  </PropertyGroup>
    102102  <ItemGroup>
    103     <Reference Include="ALGLIB-3.5.0, Version=3.5.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    104       <HintPath>..\..\..\..\trunk\sources\bin\ALGLIB-3.5.0.dll</HintPath>
     103    <Reference Include="ALGLIB-3.6.0, Version=3.6.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     104      <HintPath>..\..\..\..\trunk\sources\bin\ALGLIB-3.6.0.dll</HintPath>
    105105      <Private>False</Private>
    106106    </Reference>
     
    174174    </Compile>
    175175    <Compile Include="FixedDataAnalysisAlgorithm.cs" />
     176    <Compile Include="GaussianProcess\CovarianceRQiso.cs" />
     177    <Compile Include="GaussianProcess\CovarianceNoise.cs" />
     178    <Compile Include="GaussianProcess\CovarianceConst.cs" />
     179    <Compile Include="GaussianProcess\MeanProd.cs" />
     180    <Compile Include="GaussianProcess\MeanSum.cs" />
    176181    <Compile Include="GaussianProcess\CovarianceProd.cs" />
    177182    <Compile Include="GaussianProcess\CovarianceSum.cs" />
     
    197202    <Compile Include="Interfaces\IGaussianProcessModel.cs" />
    198203    <Compile Include="Interfaces\IGaussianProcessSolution.cs" />
     204    <Compile Include="Interfaces\INcaClassificationSolution.cs" />
     205    <Compile Include="Interfaces\INcaModel.cs" />
    199206    <Compile Include="Interfaces\INearestNeighbourClassificationSolution.cs" />
    200207    <Compile Include="Interfaces\INearestNeighbourRegressionSolution.cs" />
     
    227234    <Compile Include="Linear\MultinomialLogitClassificationSolution.cs" />
    228235    <Compile Include="Linear\MultinomialLogitModel.cs" />
     236    <Compile Include="Nca\Initialization\INcaInitializer.cs" />
     237    <Compile Include="Nca\Initialization\LdaInitializer.cs" />
     238    <Compile Include="Nca\Initialization\PcaInitializer.cs" />
     239    <Compile Include="Nca\Initialization\RandomInitializer.cs" />
     240    <Compile Include="Nca\Matrix.cs" />
     241    <Compile Include="Nca\NcaAlgorithm.cs" />
     242    <Compile Include="Nca\NcaClassificationSolution.cs" />
     243    <Compile Include="Nca\NcaModel.cs" />
    229244    <Compile Include="NearestNeighbour\NearestNeighbourClassification.cs" />
    230245    <Compile Include="NearestNeighbour\NearestNeighbourClassificationSolution.cs" />
     
    280295      <Name>HeuristicLab.Problems.DataAnalysis-3.4</Name>
    281296    </ProjectReference>
     297    <ProjectReference Include="..\..\HeuristicLab.Random\3.3\HeuristicLab.Random-3.3.csproj">
     298      <Project>{F4539FB6-4708-40C9-BE64-0A1390AEA197}</Project>
     299      <Name>HeuristicLab.Random-3.3</Name>
     300      <Private>False</Private>
     301    </ProjectReference>
    282302  </ItemGroup>
    283303  <ItemGroup>
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/IGaussianProcessModel.cs

    r8430 r8477  
    2020#endregion
    2121
     22using System.Collections.Generic;
    2223using HeuristicLab.Problems.DataAnalysis;
    2324
     
    3132    ICovarianceFunction CovarianceFunction { get; }
    3233    double[] GetHyperparameterGradients();
     34
     35    IEnumerable<double> GetEstimatedVariance(Dataset ds, IEnumerable<int> rows);
    3336  }
    3437}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/Scaling.cs

    r8430 r8477  
    5959      return ds.GetDoubleValues(variable, rows).Select(x => (x - min) / (max - min));
    6060    }
     61
     62    public void GetScalingParameters(string variable, out double min, out double max) {
     63      min = scalingParameters[variable].Item1;
     64      max = scalingParameters[variable].Item2;
     65    }
    6166  }
    6267}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassification.cs

    r8430 r8477  
    2121
    2222using System;
    23 using System.Collections.Generic;
    2423using System.Linq;
    2524using HeuristicLab.Common;
    2625using HeuristicLab.Core;
    2726using HeuristicLab.Data;
    28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2927using HeuristicLab.Optimization;
     28using HeuristicLab.Parameters;
    3029using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3130using HeuristicLab.Problems.DataAnalysis;
    32 using HeuristicLab.Problems.DataAnalysis.Symbolic;
    33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
    34 using HeuristicLab.Parameters;
    3531
    3632namespace HeuristicLab.Algorithms.DataAnalysis {
     
    8480
    8581    public static IClassificationSolution CreateNearestNeighbourClassificationSolution(IClassificationProblemData problemData, int k) {
    86       Dataset dataset = problemData.Dataset;
    87       string targetVariable = problemData.TargetVariable;
    88       IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
    89       IEnumerable<int> rows = problemData.TrainingIndices;
    90       double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    91       if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    92         throw new NotSupportedException("Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
     82      var problemDataClone = (IClassificationProblemData)problemData.Clone();
     83      return new NearestNeighbourClassificationSolution(problemDataClone, Train(problemDataClone, k));
     84    }
    9385
    94       alglib.nearestneighbor.kdtree kdtree = new alglib.nearestneighbor.kdtree();
    95 
    96       int nRows = inputMatrix.GetLength(0);
    97       int nFeatures = inputMatrix.GetLength(1) - 1;
    98       double[] classValues = dataset.GetDoubleValues(targetVariable).Distinct().OrderBy(x => x).ToArray();
    99       int nClasses = classValues.Count();
    100       // map original class values to values [0..nClasses-1]
    101       Dictionary<double, double> classIndices = new Dictionary<double, double>();
    102       for (int i = 0; i < nClasses; i++) {
    103         classIndices[classValues[i]] = i;
    104       }
    105       for (int row = 0; row < nRows; row++) {
    106         inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
    107       }
    108       alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdtree);
    109       var problemDataClone = (IClassificationProblemData) problemData.Clone();
    110       return new NearestNeighbourClassificationSolution(problemDataClone, new NearestNeighbourModel(kdtree, k, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()));
     86    public static INearestNeighbourModel Train(IClassificationProblemData problemData, int k) {
     87      return new NearestNeighbourModel(problemData.Dataset,
     88        problemData.TrainingIndices,
     89        k,
     90        problemData.TargetVariable,
     91        problemData.AllowedInputVariables,
     92        problemData.ClassValues.ToArray());
    11193    }
    11294    #endregion
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs

    r7294 r8477  
    3333  /// </summary>
    3434  [StorableClass]
    35   [Item("NearestNeighbourModel", "Represents a neural network for regression and classification.")]
     35  [Item("NearestNeighbourModel", "Represents a nearest neighbour model for regression and classification.")]
    3636  public sealed class NearestNeighbourModel : NamedItem, INearestNeighbourModel {
    3737
     
    5656    [Storable]
    5757    private int k;
     58
    5859    [StorableConstructor]
    5960    private NearestNeighbourModel(bool deserializing)
     
    9596        this.classValues = (double[])original.classValues.Clone();
    9697    }
    97     public NearestNeighbourModel(alglib.nearestneighbor.kdtree kdTree, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
    98       : base() {
    99       this.name = ItemName;
    100       this.description = ItemDescription;
    101       this.kdTree = kdTree;
     98    public NearestNeighbourModel(Dataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) {
     99      Name = ItemName;
     100      Description = ItemDescription;
    102101      this.k = k;
    103102      this.targetVariable = targetVariable;
    104103      this.allowedInputVariables = allowedInputVariables.ToArray();
    105       if (classValues != null)
     104
     105      var inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,
     106                                   allowedInputVariables.Concat(new string[] { targetVariable }),
     107                                   rows);
     108
     109      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
     110        throw new NotSupportedException(
     111          "Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
     112
     113      this.kdTree = new alglib.nearestneighbor.kdtree();
     114
     115      var nRows = inputMatrix.GetLength(0);
     116      var nFeatures = inputMatrix.GetLength(1) - 1;
     117
     118      if (classValues != null) {
    106119        this.classValues = (double[])classValues.Clone();
     120        int nClasses = classValues.Length;
     121        // map original class values to values [0..nClasses-1]
     122        var classIndices = new Dictionary<double, double>();
     123        for (int i = 0; i < nClasses; i++)
     124          classIndices[classValues[i]] = i;
     125
     126        for (int row = 0; row < nRows; row++) {
     127          inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
     128        }
     129      }
     130      alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
    107131    }
    108132
     
    140164
    141165    public IEnumerable<double> GetEstimatedClassValues(Dataset dataset, IEnumerable<int> rows) {
     166      if (classValues == null) throw new InvalidOperationException("No class values are defined.");
    142167      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
    143168
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourRegression.cs

    r8430 r8477  
    2121
    2222using System;
    23 using System.Collections.Generic;
    24 using System.Linq;
    2523using HeuristicLab.Common;
    2624using HeuristicLab.Core;
    2725using HeuristicLab.Data;
    28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2926using HeuristicLab.Optimization;
     27using HeuristicLab.Parameters;
    3028using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3129using HeuristicLab.Problems.DataAnalysis;
    32 using HeuristicLab.Problems.DataAnalysis.Symbolic;
    33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
    34 using HeuristicLab.Parameters;
    3530
    3631namespace HeuristicLab.Algorithms.DataAnalysis {
     
    8479
    8580    public static IRegressionSolution CreateNearestNeighbourRegressionSolution(IRegressionProblemData problemData, int k) {
    86       Dataset dataset = problemData.Dataset;
    87       string targetVariable = problemData.TargetVariable;
    88       IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
    89       IEnumerable<int> rows = problemData.TrainingIndices;
    90       double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    91       if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    92         throw new NotSupportedException("Nearest neighbour regression does not support NaN or infinity values in the input dataset.");
     81      var clonedProblemData = (IRegressionProblemData)problemData.Clone();
     82      return new NearestNeighbourRegressionSolution(clonedProblemData, Train(problemData, k));
     83    }
    9384
    94       alglib.nearestneighbor.kdtree kdtree = new alglib.nearestneighbor.kdtree();
    95 
    96       int nRows = inputMatrix.GetLength(0);
    97 
    98       alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdtree);
    99 
    100       return new NearestNeighbourRegressionSolution((IRegressionProblemData)problemData.Clone(), new NearestNeighbourModel(kdtree, k, targetVariable, allowedInputVariables));
     85    public static INearestNeighbourModel Train(IRegressionProblemData problemData, int k) {
     86      return new NearestNeighbourModel(problemData.Dataset,
     87        problemData.TrainingIndices,
     88        k,
     89        problemData.TargetVariable,
     90        problemData.AllowedInputVariables);
    10191    }
    10292    #endregion
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/Plugin.cs.frame

    r8430 r8477  
    2828  [Plugin("HeuristicLab.Algorithms.DataAnalysis", "Provides wrappers for data analysis algorithms implemented in external libraries (linear regression, linear discriminant analysis, k-means clustering, support vector classification and regression)", "3.4.3.$WCREV$")]
    2929  [PluginFile("HeuristicLab.Algorithms.DataAnalysis-3.4.dll", PluginFileType.Assembly)]
    30   [PluginDependency("HeuristicLab.ALGLIB", "3.5.0")]
     30  [PluginDependency("HeuristicLab.ALGLIB", "3.6.0")]
    3131  [PluginDependency("HeuristicLab.Algorithms.GradientDescent", "3.3")]
    3232  [PluginDependency("HeuristicLab.Analysis", "3.3")]
     
    4747  [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Regression", "3.4")]
    4848  [PluginDependency("HeuristicLab.LibSVM", "1.6.3")]
     49  [PluginDependency("HeuristicLab.Random", "3.3")]
    4950  public class HeuristicLabAlgorithmsDataAnalysisPlugin : PluginBase {
    5051  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Encodings.SymbolicExpressionTreeEncoding

  • branches/HeuristicLab.TimeSeries/HeuristicLab.Encodings.SymbolicExpressionTreeEncoding/3.4/SymbolicExpressionGrammar.cs

    r7842 r8477  
    181181
    182182    #region IStatefulItem methods
    183     void IStatefulItem.InitializeState()
    184     {
     183    void IStatefulItem.InitializeState() {
    185184      ReadOnly = false;
    186185    }
     
    245244        var groupSymbol = s as GroupSymbol;
    246245        if (groupSymbol != null) RegisterGroupSymbolEvents(groupSymbol);
    247         else symbol.Changed += new EventHandler(Symbol_Changed);
     246        else s.Changed += new EventHandler(Symbol_Changed);
    248247      }
    249248    }
     
    255254        var groupSymbol = s as GroupSymbol;
    256255        if (groupSymbol != null) DeregisterGroupSymbolEvents(groupSymbol);
    257         else symbol.Changed -= new EventHandler(Symbol_Changed);
     256        else s.Changed -= new EventHandler(Symbol_Changed);
    258257      }
    259258    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Encodings.SymbolicExpressionTreeEncoding/3.4/SymbolicExpressionTreeTerminalNode.cs

    r7268 r8477  
    3030  public abstract class SymbolicExpressionTreeTerminalNode : SymbolicExpressionTreeNode {
    3131    public override IEnumerable<ISymbolicExpressionTreeNode> Subtrees {
    32       get {
    33         return Enumerable.Empty<ISymbolicExpressionTreeNode>();
    34       }
     32      get { return Enumerable.Empty<ISymbolicExpressionTreeNode>(); }
    3533    }
    3634
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis/3.4/SingleObjective/SymbolicTimeSeriesPrognosisSingleObjectiveProblem.cs

    r8430 r8477  
    7575      var grammar = SymbolicExpressionTreeGrammar as TypeCoherentExpressionGrammar;
    7676      if (grammar != null) grammar.ConfigureAsDefaultTimeSeriesPrognosisGrammar();
     77      UpdateGrammar();
     78    }
     79    protected override void UpdateGrammar() {
     80      base.UpdateGrammar();
     81      foreach (var autoregressiveSymbol in SymbolicExpressionTreeGrammar.Symbols.OfType<AutoregressiveTargetVariable>()) {
     82        if (!autoregressiveSymbol.Fixed) autoregressiveSymbol.VariableNames = ProblemData.TargetVariable.ToEnumerable();
     83      }
    7784    }
    7885
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.Views

  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.Views/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic.Views-3.4.csproj

    r7886 r8477  
    9393  </PropertyGroup>
    9494  <ItemGroup>
    95     <Reference Include="ALGLIB-3.5.0, Version=3.5.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
    96     <Reference Include="HeuristicLab.ALGLIB-3.5.0, Version=3.5.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     95    <Reference Include="ALGLIB-3.6.0, Version=3.6.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     96      <HintPath>..\..\..\..\trunk\sources\bin\ALGLIB-3.6.0.dll</HintPath>
     97      <Private>False</Private>
     98    </Reference>
    9799    <Reference Include="HeuristicLab.Collections-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
    98100    <Reference Include="HeuristicLab.Common-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
     
    104106    <Reference Include="HeuristicLab.MainForm-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
    105107    <Reference Include="HeuristicLab.MainForm.WindowsForms-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
    106     <Reference Include="HeuristicLab.MathJax-1.1, Version=1.1.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
    107108    <Reference Include="HeuristicLab.Optimization-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
    108109    <Reference Include="HeuristicLab.PluginInfrastructure-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL" />
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.Views/3.4/Plugin.cs.frame

    r8430 r8477  
    2929  [PluginFile("HeuristicLab.Problems.DataAnalysis.Symbolic.Views-3.4.dll", PluginFileType.Assembly)]
    3030  [PluginFile("displayModelFrame.html", PluginFileType.Data)]
    31   [PluginDependency("HeuristicLab.ALGLIB", "3.5")]
     31  [PluginDependency("HeuristicLab.ALGLIB", "3.6.0")]
    3232  [PluginDependency("HeuristicLab.Collections", "3.3")]
    3333  [PluginDependency("HeuristicLab.Common", "3.3")]
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic.Views/3.4/Symbols/VariableView.cs

    r8430 r8477  
    9292    protected override void SetEnabledStateOfControls() {
    9393      base.SetEnabledStateOfControls();
     94      enabledCheckBox.Enabled = Content != null && Content.VariableNames.Any() && !Locked && !ReadOnly;
    9495      weightInitializationMuTextBox.Enabled = Content != null;
    9596      weightInitializationMuTextBox.ReadOnly = ReadOnly;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Grammars/FullFunctionalExpressionGrammar.cs

    r7842 r8477  
    116116      var laggedVariable = new LaggedVariable();
    117117      laggedVariable.InitialFrequency = 0.0;
     118      var autoregressiveVariable = new AutoregressiveTargetVariable();
     119      autoregressiveVariable.InitialFrequency = 0.0;
     120      autoregressiveVariable.Enabled = false;
    118121
    119122      var allSymbols = new List<Symbol>() { add, sub, mul, div, mean, sin, cos, tan, log, square, pow, sqrt, root, exp,
    120123        airyA, airyB, bessel, cosineIntegral, dawson, erf, expIntegralEi, fresnelCosineIntegral, fresnelSineIntegral, gamma, hypCosineIntegral, hypSineIntegral, norm, psi, sineIntegral,
    121         @if, gt, lt, and, or, not, timeLag, integral, derivative, constant, variableSymbol, laggedVariable, variableCondition };
     124        @if, gt, lt, and, or, not, timeLag, integral, derivative, constant, variableSymbol, laggedVariable,autoregressiveVariable, variableCondition };
    122125      var unaryFunctionSymbols = new List<Symbol>() { square, sqrt, sin, cos, tan, log, exp, not, timeLag, integral, derivative,
    123126        airyA, airyB, bessel, cosineIntegral, dawson, erf, expIntegralEi, fresnelCosineIntegral, fresnelSineIntegral, gamma, hypCosineIntegral, hypSineIntegral, norm, psi, sineIntegral
     
    126129      var binaryFunctionSymbols = new List<Symbol>() { pow, root, gt, lt, variableCondition };
    127130      var ternarySymbols = new List<Symbol>() { add, sub, mul, div, mean, and, or };
    128       var terminalSymbols = new List<Symbol>() { variableSymbol, constant, laggedVariable };
     131      var terminalSymbols = new List<Symbol>() { variableSymbol, constant, laggedVariable, autoregressiveVariable };
    129132
    130133      foreach (var symb in allSymbols)
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Grammars/TypeCoherentExpressionGrammar.cs

    r8114 r8477  
    104104      var variableSymbol = new Variable();
    105105      var laggedVariable = new LaggedVariable();
     106      var autoregressiveVariable = new AutoregressiveTargetVariable();
    106107      #endregion
    107108
     
    122123      var conditionalSymbols = new GroupSymbol(ConditionalSymbolsName, new List<ISymbol> { conditionSymbols, comparisonSymbols, booleanOperationSymbols });
    123124
    124       var timeSeriesSymbols = new GroupSymbol(TimeSeriesSymbolsName, new List<ISymbol> { timeLag, integral, derivative, laggedVariable });
     125      var timeSeriesSymbols = new GroupSymbol(TimeSeriesSymbolsName, new List<ISymbol> { timeLag, integral, derivative, laggedVariable, autoregressiveVariable });
    125126      #endregion
    126127
     
    152153      SetSubtreeCount(derivative, 1, 1);
    153154      SetSubtreeCount(laggedVariable, 0, 0);
     155      SetSubtreeCount(autoregressiveVariable, 0, 0);
    154156      #endregion
    155157
     
    227229
    228230    public void ConfigureAsDefaultTimeSeriesPrognosisGrammar() {
    229       Symbols.First(s => s is Variable).Enabled = false;
     231      Symbols.First(s => s is Average).Enabled = false;
    230232      Symbols.First(s => s.Name == TrigonometricFunctionsName).Enabled = false;
    231233      Symbols.First(s => s.Name == PowerFunctionsName).Enabled = false;
    232234      Symbols.First(s => s.Name == ConditionalSymbolsName).Enabled = false;
    233235      Symbols.First(s => s.Name == SpecialFunctionsName).Enabled = false;
     236
     237      Symbols.First(s => s.Name == TimeSeriesSymbolsName).Enabled = true;
     238      Symbols.First(s => s is Derivative).Enabled = false;
     239      Symbols.First(s => s is Integral).Enabled = false;
     240      Symbols.First(s => s is TimeLag).Enabled = false;
    234241    }
    235242  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic-3.4.csproj

    r8430 r8477  
    186186    <Compile Include="Symbols\AiryB.cs" />
    187187    <Compile Include="Symbols\Bessel.cs" />
     188    <Compile Include="Symbols\AutoregressiveVariable.cs" />
    188189    <Compile Include="Symbols\Erf.cs" />
    189190    <Compile Include="Symbols\Norm.cs" />
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Interpreter/OpCodes.cs

    r8436 r8477  
    103103      { typeof(Variable), OpCodes.Variable },
    104104      { typeof(LaggedVariable), OpCodes.LagVariable },
     105      { typeof(AutoregressiveTargetVariable),OpCodes.LagVariable},
    105106      { typeof(Constant), OpCodes.Constant },
    106107      { typeof(Argument), OpCodes.Arg },
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Symbols/AutoregressiveVariable.cs

    r8475 r8477  
    2222using HeuristicLab.Common;
    2323using HeuristicLab.Core;
    24 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2524using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2625namespace HeuristicLab.Problems.DataAnalysis.Symbolic {
    2726  [StorableClass]
    2827  [Item("LaggedVariable", "Represents a variable value with a time offset.")]
    29   public sealed class LaggedVariable : Variable {
    30     [Storable]
    31     private int minLag;
    32     public int MinLag {
    33       get { return minLag; }
    34       set { minLag = value; }
     28  public sealed class AutoregressiveTargetVariable : LaggedVariable {
     29    [StorableConstructor]
     30    private AutoregressiveTargetVariable(bool deserializing) : base(deserializing) { }
     31    private AutoregressiveTargetVariable(AutoregressiveTargetVariable original, Cloner cloner)
     32      : base(original, cloner) {
    3533    }
    36     [Storable]
    37     private int maxLag;
    38     public int MaxLag {
    39       get { return maxLag; }
    40       set { maxLag = value; }
    41     }
    42     [StorableConstructor]
    43     private LaggedVariable(bool deserializing) : base(deserializing) { }
    44     private LaggedVariable(LaggedVariable original, Cloner cloner)
    45       : base(original, cloner) {
    46       minLag = original.minLag;
    47       maxLag = original.maxLag;
    48     }
    49     public LaggedVariable()
    50       : base("LaggedVariable", "Represents a variable value with a time offset.") {
    51       minLag = -1; maxLag = -1;
    52     }
    53 
    54     public override ISymbolicExpressionTreeNode CreateTreeNode() {
    55       return new LaggedVariableTreeNode(this);
    56     }
    57 
     34    public AutoregressiveTargetVariable() : base("Autoregressive Target Variable", "Represents the target variable with a time offset.") { }
    5835    public override IDeepCloneable Clone(Cloner cloner) {
    59       return new LaggedVariable(this, cloner);
     36      return new AutoregressiveTargetVariable(this, cloner);
    6037    }
    6138  }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Symbols/LaggedVariable.cs

    r7268 r8477  
    2727  [StorableClass]
    2828  [Item("LaggedVariable", "Represents a variable value with a time offset.")]
    29   public sealed class LaggedVariable : Variable {
     29  public class LaggedVariable : Variable {
    3030    [Storable]
    3131    private int minLag;
     
    4141    }
    4242    [StorableConstructor]
    43     private LaggedVariable(bool deserializing) : base(deserializing) { }
    44     private LaggedVariable(LaggedVariable original, Cloner cloner)
     43    protected LaggedVariable(bool deserializing) : base(deserializing) { }
     44    protected LaggedVariable(LaggedVariable original, Cloner cloner)
    4545      : base(original, cloner) {
    4646      minLag = original.minLag;
    4747      maxLag = original.maxLag;
    4848    }
    49     public LaggedVariable()
    50       : base("LaggedVariable", "Represents a variable value with a time offset.") {
    51       minLag = -1; maxLag = -1;
     49    public LaggedVariable() : this("LaggedVariable", "Represents a variable value with a time offset.") { }
     50    protected LaggedVariable(string name, string description)
     51      : base(name, description) {
     52      MinLag = -1;
     53      MaxLag = -1;
    5254    }
    5355
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Symbols/LaggedVariableTreeNode.cs

    r7268 r8477  
    3737    }
    3838
     39    public override bool HasLocalParameters {
     40      get { return true; }
     41    }
     42
    3943    [StorableConstructor]
    4044    private LaggedVariableTreeNode(bool deserializing) : base(deserializing) { }
     
    4347      lag = original.lag;
    4448    }
    45     private LaggedVariableTreeNode() { }
    4649
    4750    public LaggedVariableTreeNode(LaggedVariable variableSymbol) : base(variableSymbol) { }
    4851
    49     public override bool HasLocalParameters {
    50       get {
    51         return true;
    52       }
    53     }
    5452
    5553    public override void ResetLocalParameters(IRandom random) {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Symbols/Variable.cs

    r7268 r8477  
    101101    }
    102102
     103    public override bool Enabled {
     104      get {
     105        if (variableNames.Count == 0) return false;
     106        return base.Enabled;
     107      }
     108      set {
     109        if (variableNames.Count == 0) base.Enabled = false;
     110        else base.Enabled = value;
     111      }
     112    }
     113
    103114    private const int minimumArity = 0;
    104115    private const int maximumArity = 0;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/TimeSeriesPrognosis/TimeSeriesPrognosisProblemData.cs

    r8460 r8477  
    2222using System;
    2323using System.Collections.Generic;
     24using System.Linq;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
     
    15421543      defaultDataset = new Dataset(new string[] { "x" }, mackey_glass_17);
    15431544      defaultDataset.Name = "Mackey-Glass (t=17) Time Series Benchmark Dataset";
    1544       defaultAllowedInputVariables = new List<string>() { "x" };
     1545      defaultAllowedInputVariables = Enumerable.Empty<string>();
    15451546      defaultTargetVariable = "x";
    15461547    }
Note: See TracChangeset for help on using the changeset viewer.