[8463] | 1 | #region License Information
|
---|
| 2 | /* HeuristicLab
|
---|
[9539] | 3 | * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
[8463] | 4 | *
|
---|
| 5 | * This file is part of HeuristicLab.
|
---|
| 6 | *
|
---|
| 7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
| 8 | * it under the terms of the GNU General Public License as published by
|
---|
| 9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
| 10 | * (at your option) any later version.
|
---|
| 11 | *
|
---|
| 12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 15 | * GNU General Public License for more details.
|
---|
| 16 | *
|
---|
| 17 | * You should have received a copy of the GNU General Public License
|
---|
| 18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
| 19 | */
|
---|
| 20 | #endregion
|
---|
| 21 |
|
---|
| 22 | using System.Linq;
|
---|
| 23 | using HeuristicLab.Algorithms.DataAnalysis;
|
---|
| 24 | using HeuristicLab.Problems.Instances.DataAnalysis;
|
---|
| 25 | using Microsoft.VisualStudio.TestTools.UnitTesting;
|
---|
| 26 |
|
---|
| 27 | namespace HeuristicLab.Algorithms.DataAnalysis_34.Tests {
|
---|
| 28 | [TestClass]
|
---|
| 29 |
|
---|
| 30 | // reference values calculated with Rasmussen's GPML MATLAB package
|
---|
| 31 | public class GaussianProcessModelTest {
|
---|
| 32 | [TestMethod]
|
---|
| 33 | [DeploymentItem(@"HeuristicLab.Algorithms.DataAnalysis-3.4/co2.txt")]
|
---|
| 34 | public void GaussianProcessModelOutputTest() {
|
---|
| 35 | var provider = new RegressionCSVInstanceProvider();
|
---|
| 36 | var problemData = provider.ImportData("co2.txt");
|
---|
| 37 |
|
---|
| 38 | var targetVariable = "interpolated";
|
---|
| 39 | var allowedInputVariables = new string[] { "decimal date" };
|
---|
| 40 | var rows = Enumerable.Range(0, 401);
|
---|
| 41 |
|
---|
| 42 | var meanFunction = new MeanConst();
|
---|
| 43 | var covarianceFunction = new CovarianceSum();
|
---|
[8615] | 44 | covarianceFunction.Terms.Add(new CovarianceSquaredExponentialIso());
|
---|
[8620] | 45 | var prod = new CovarianceProduct();
|
---|
[8615] | 46 | prod.Factors.Add(new CovarianceSquaredExponentialIso());
|
---|
[8463] | 47 | prod.Factors.Add(new CovariancePeriodic());
|
---|
| 48 | covarianceFunction.Terms.Add(prod);
|
---|
| 49 |
|
---|
| 50 | {
|
---|
| 51 | var hyp = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
|
---|
| 52 | var model = new GaussianProcessModel(problemData.Dataset, targetVariable, allowedInputVariables, rows, hyp,
|
---|
| 53 | meanFunction,
|
---|
| 54 | covarianceFunction);
|
---|
| 55 | Assert.AreEqual(4.3170e+004, model.NegativeLogLikelihood, 1);
|
---|
| 56 |
|
---|
[8484] | 57 | var dHyp = model.HyperparameterGradients;
|
---|
[8473] | 58 | Assert.AreEqual(-248.7932, dHyp[0], 1E-2);
|
---|
[8463] | 59 | var dHypCovExpected = new double[] { -0.5550e4, -5.5533e4, -0.2511e4, -2.7625e4, -1.3033e4, 0.0289e4, -2.7625e4 };
|
---|
[8473] | 60 | AssertEqual(dHypCovExpected, dHyp.Skip(1).Take(7).ToArray(), 1);
|
---|
| 61 | Assert.AreEqual(-2.0171e+003, dHyp.Last(), 1);
|
---|
[8463] | 62 |
|
---|
[8473] | 63 |
|
---|
[8463] | 64 | var predTrain = model.GetEstimatedValues(problemData.Dataset, new int[] { 0, 400 }).ToArray();
|
---|
| 65 | Assert.AreEqual(310.5930, predTrain[0], 1e-3);
|
---|
| 66 | Assert.AreEqual(347.9993, predTrain[1], 1e-3);
|
---|
[8473] | 67 |
|
---|
| 68 | var predTrainVar = model.GetEstimatedVariance(problemData.Dataset, problemData.TrainingIndices).ToArray();
|
---|
[8463] | 69 | }
|
---|
| 70 |
|
---|
| 71 | {
|
---|
[8473] | 72 | var hyp = new double[] { 0.029973094285941, 0.455535210579926, 3.438647883940457, 1.464114485889487, 3.001788584487478, 3.815289323309630, 4.374914122810222, 3.001788584487478, 0.716427415979145 };
|
---|
[8463] | 73 | var model = new GaussianProcessModel(problemData.Dataset, targetVariable, allowedInputVariables, rows, hyp,
|
---|
| 74 | meanFunction,
|
---|
| 75 | covarianceFunction);
|
---|
| 76 | Assert.AreEqual(872.8448, model.NegativeLogLikelihood, 1e-3);
|
---|
| 77 |
|
---|
[8484] | 78 | var dHyp = model.HyperparameterGradients;
|
---|
[8473] | 79 | Assert.AreEqual(-0.0046, dHyp[0], 1e-3);
|
---|
[8463] | 80 | var dHypCovExpected = new double[] { 0.2652, -0.2386, 0.1706, -0.1744, 0.0000, 0.0000, -0.1744 };
|
---|
[8473] | 81 | AssertEqual(dHypCovExpected, dHyp.Skip(1).Take(7).ToArray(), 1e-3);
|
---|
| 82 | Assert.AreEqual(0.8621, dHyp.Last(), 1e-3);
|
---|
[8463] | 83 |
|
---|
| 84 | var predTrain = model.GetEstimatedValues(problemData.Dataset, new int[] { 0, 400 }).ToArray();
|
---|
| 85 | Assert.AreEqual(315.3692, predTrain[0], 1e-3);
|
---|
| 86 | Assert.AreEqual(356.6076, predTrain[1], 1e-3);
|
---|
| 87 | }
|
---|
| 88 | }
|
---|
| 89 |
|
---|
| 90 |
|
---|
| 91 | private void AssertEqual(double[] expected, double[] actual, double delta = 1E-3) {
|
---|
| 92 | Assert.AreEqual(expected.Length, actual.Length);
|
---|
| 93 | for (int i = 0; i < expected.Length; i++)
|
---|
| 94 | Assert.AreEqual(expected[i], actual[i], delta);
|
---|
| 95 | }
|
---|
| 96 | }
|
---|
| 97 | }
|
---|