Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/13/12 16:18:37 (12 years ago)
Author:
mkommend
Message:

#1081:

  • Added autoregressive target variable Symbol
  • Merged trunk changes into the branch.
Location:
branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis

  • branches/HeuristicLab.TimeSeries/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r8416 r8477  
    7373    private double[,] x;
    7474    [Storable]
    75     private Scaling scaling;
     75    private Scaling inputScaling;
    7676
    7777
     
    8282      this.meanFunction = cloner.Clone(original.meanFunction);
    8383      this.covarianceFunction = cloner.Clone(original.covarianceFunction);
    84       this.scaling = cloner.Clone(original.scaling);
     84      this.inputScaling = cloner.Clone(original.inputScaling);
    8585      this.negativeLogLikelihood = original.negativeLogLikelihood;
    8686      this.targetVariable = original.targetVariable;
     
    103103      this.allowedInputVariables = allowedInputVariables.ToArray();
    104104
    105       sqrSigmaNoise = Math.Exp(2.0 * hyp.First());
    106       sqrSigmaNoise = Math.Max(10E-6, sqrSigmaNoise); // lower limit for the noise level
    107105
    108106      int nVariables = this.allowedInputVariables.Length;
    109       this.meanFunction.SetParameter(hyp.Skip(1)
     107      this.meanFunction.SetParameter(hyp
    110108        .Take(this.meanFunction.GetNumberOfParameters(nVariables))
    111109        .ToArray());
    112       this.covarianceFunction.SetParameter(hyp.Skip(1 + this.meanFunction.GetNumberOfParameters(nVariables))
     110      this.covarianceFunction.SetParameter(hyp.Skip(this.meanFunction.GetNumberOfParameters(nVariables))
    113111        .Take(this.covarianceFunction.GetNumberOfParameters(nVariables))
    114112        .ToArray());
     113      sqrSigmaNoise = Math.Exp(2.0 * hyp.Last());
    115114
    116115      CalculateModel(ds, rows);
     
    118117
    119118    private void CalculateModel(Dataset ds, IEnumerable<int> rows) {
    120       scaling = new Scaling(ds, allowedInputVariables, rows);
    121       x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, scaling);
    122 
    123       var y = ds.GetDoubleValues(targetVariable, rows).ToArray();
     119      inputScaling = new Scaling(ds, allowedInputVariables, rows);
     120      x = AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputVariables, rows, inputScaling);
     121      var y = ds.GetDoubleValues(targetVariable, rows);
    124122
    125123      int n = x.GetLength(0);
     
    132130      double[] m = meanFunction.GetMean(x);
    133131      for (int i = 0; i < n; i++) {
    134 
    135132        for (int j = i; j < n; j++) {
    136133          l[j, i] = covarianceFunction.GetCovariance(i, j) / sqrSigmaNoise;
     
    144141
    145142      var res = alglib.trfac.spdmatrixcholesky(ref l, n, false);
    146       if (!res) throw new InvalidOperationException("Matrix is not positive semidefinite");
     143      if (!res) throw new ArgumentException("Matrix is not positive semidefinite");
    147144
    148145      // calculate sum of diagonal elements for likelihood
     
    162159      int n = x.GetLength(0);
    163160      int nAllowedVariables = x.GetLength(1);
    164       double[,] q = new double[n, n];
    165       double[,] eye = new double[n, n];
    166       for (int i = 0; i < n; i++) eye[i, i] = 1.0;
    167161
    168162      int info;
    169       alglib.densesolverreport denseSolveRep;
    170 
    171       alglib.spdmatrixcholeskysolvem(l, n, false, eye, n, out info, out denseSolveRep, out q);
    172       // double[,] a2 = outerProd(alpha, alpha);
     163      alglib.matinvreport matInvRep;
     164      double[,] lCopy = new double[l.GetLength(0), l.GetLength(1)];
     165      Array.Copy(l, lCopy, lCopy.Length);
     166
     167      alglib.spdmatrixcholeskyinverse(ref lCopy, n, false, out info, out matInvRep);
     168      if (info != 1) throw new ArgumentException("Can't invert matrix to calculate gradients.");
    173169      for (int i = 0; i < n; i++) {
    174         for (int j = 0; j < n; j++)
    175           q[i, j] = q[i, j] / sqrSigmaNoise - alpha[i] * alpha[j]; // a2[i, j];
    176       }
    177 
    178       double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => q[i, i]).Sum();
     170        for (int j = 0; j <= i; j++)
     171          lCopy[i, j] = lCopy[i, j] / sqrSigmaNoise - alpha[i] * alpha[j];
     172      }
     173
     174      double noiseGradient = sqrSigmaNoise * Enumerable.Range(0, n).Select(i => lCopy[i, i]).Sum();
    179175
    180176      double[] meanGradients = new double[meanFunction.GetNumberOfParameters(nAllowedVariables)];
     
    187183      if (covGradients.Length > 0) {
    188184        for (int i = 0; i < n; i++) {
    189           for (int j = 0; j < n; j++) {
    190             var covDeriv = covarianceFunction.GetGradient(i, j);
    191             for (int k = 0; k < covGradients.Length; k++) {
    192               covGradients[k] += q[i, j] * covDeriv[k];
     185          for (int k = 0; k < covGradients.Length; k++) {
     186            for (int j = 0; j < i; j++) {
     187              covGradients[k] += lCopy[i, j] * covarianceFunction.GetGradient(i, j, k);
    193188            }
     189            covGradients[k] += 0.5 * lCopy[i, i] * covarianceFunction.GetGradient(i, i, k);
    194190          }
    195191        }
    196         covGradients = covGradients.Select(g => g / 2.0).ToArray();
    197       }
    198 
    199       return new double[] { noiseGradient }
    200         .Concat(meanGradients)
    201         .Concat(covGradients).ToArray();
     192      }
     193
     194      return
     195        meanGradients
     196        .Concat(covGradients)
     197        .Concat(new double[] { noiseGradient }).ToArray();
    202198    }
    203199
     
    220216
    221217    private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) {
    222       var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, scaling);
     218      var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
    223219      int newN = newX.GetLength(0);
    224220      int n = x.GetLength(0);
     
    230226      // var kss = new double[newN];
    231227      var Ks = new double[newN, n];
    232       double[,] sWKs = new double[n, newN];
     228      //double[,] sWKs = new double[n, newN];
    233229      // double[,] v;
    234230
     
    242238      var ms = meanFunction.GetMean(newX);
    243239      for (int i = 0; i < newN; i++) {
    244 
    245240        for (int j = 0; j < n; j++) {
    246241          Ks[i, j] = covarianceFunction.GetCovariance(j, i);
    247           sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise);
     242          //sWKs[j, i] = Ks[i, j] / Math.Sqrt(sqrSigmaNoise);
    248243        }
    249244      }
     
    252247      // alglib.rmatrixsolvem(l, n, sWKs, newN, true, out info, out denseSolveRep, out v);
    253248
    254 
     249      return Enumerable.Range(0, newN)
     250        .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha));
     251      //for (int i = 0; i < newN; i++) {
     252      //  // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);
     253      //  // var sumV2 = prod(GetCol(v, i), GetCol(v, i));
     254      //  // predVar[i] = kss[i] - sumV2;
     255      //}
     256
     257    }
     258
     259    public IEnumerable<double> GetEstimatedVariance(Dataset dataset, IEnumerable<int> rows) {
     260      var newX = AlglibUtil.PrepareAndScaleInputMatrix(dataset, allowedInputVariables, rows, inputScaling);
     261      int newN = newX.GetLength(0);
     262      int n = x.GetLength(0);
     263
     264      var kss = new double[newN];
     265      double[,] sWKs = new double[n, newN];
     266
     267      // for stddev
     268      covarianceFunction.SetData(newX);
     269      for (int i = 0; i < newN; i++)
     270        kss[i] = covarianceFunction.GetCovariance(i, i);
     271
     272      covarianceFunction.SetData(x, newX);
    255273      for (int i = 0; i < newN; i++) {
    256         // predMean[i] = ms[i] + prod(GetRow(Ks, i), alpha);
    257         yield return ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha);
    258         // var sumV2 = prod(GetCol(v, i), GetCol(v, i));
    259         // predVar[i] = kss[i] - sumV2;
    260       }
    261 
     274        for (int j = 0; j < n; j++) {
     275          sWKs[j, i] = covarianceFunction.GetCovariance(j, i) / Math.Sqrt(sqrSigmaNoise);
     276        }
     277      }
     278
     279      // for stddev
     280      int info;
     281      alglib.densesolverreport denseSolveRep;
     282      double[,] v;
     283
     284      alglib.rmatrixsolvem(l, n, sWKs, newN, false, out info, out denseSolveRep, out v);
     285
     286      for (int i = 0; i < newN; i++) {
     287        var sumV = Util.ScalarProd(Util.GetCol(v, i), Util.GetCol(v, i));
     288        kss[i] -= sumV;
     289        if (kss[i] < 0) kss[i] = 0;
     290      }
     291      return kss;
    262292    }
    263293  }
Note: See TracChangeset for help on using the changeset viewer.