1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using System;
|
---|
23 | using System.Linq;
|
---|
24 | using HeuristicLab.Common;
|
---|
25 | using HeuristicLab.Core;
|
---|
26 | using HeuristicLab.Data;
|
---|
27 | using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
|
---|
28 | using HeuristicLab.Optimization;
|
---|
29 | using HeuristicLab.Parameters;
|
---|
30 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
31 | using HeuristicLab.Problems.DataAnalysis;
|
---|
32 | using HeuristicLab.Problems.DataAnalysis.Symbolic;
|
---|
33 | using HeuristicLab.Problems.DataAnalysis.Symbolic.TimeSeriesPrognosis;
|
---|
34 |
|
---|
35 | namespace HeuristicLab.Algorithms.DataAnalysis.TimeSeries {
|
---|
36 | [Item("Autoregressive Modeling (AR)", "Timeseries modeling algorithm that creates AR-N models.")]
|
---|
37 | [Creatable(CreatableAttribute.Categories.DataAnalysis, Priority = 130)]
|
---|
38 | [StorableType("7D0CD322-3E9F-43C7-BF62-C6CFDCE889AE")]
|
---|
39 | public class AutoregressiveModeling : FixedDataAnalysisAlgorithm<ITimeSeriesPrognosisProblem> {
|
---|
40 | private const string TimeOffesetParameterName = "Maximum Time Offset";
|
---|
41 |
|
---|
42 | public IFixedValueParameter<IntValue> TimeOffsetParameter {
|
---|
43 | get { return (IFixedValueParameter<IntValue>)Parameters[TimeOffesetParameterName]; }
|
---|
44 | }
|
---|
45 |
|
---|
46 |
|
---|
47 | public int TimeOffset {
|
---|
48 | get { return TimeOffsetParameter.Value.Value; }
|
---|
49 | set { TimeOffsetParameter.Value.Value = value; }
|
---|
50 | }
|
---|
51 |
|
---|
52 | [StorableConstructor]
|
---|
53 | protected AutoregressiveModeling(bool deserializing) : base(deserializing) { }
|
---|
54 | protected AutoregressiveModeling(AutoregressiveModeling original, Cloner cloner) : base(original, cloner) { }
|
---|
55 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
56 | return new AutoregressiveModeling(this, cloner);
|
---|
57 | }
|
---|
58 |
|
---|
59 | public AutoregressiveModeling()
|
---|
60 | : base() {
|
---|
61 | Parameters.Add(new FixedValueParameter<IntValue>(TimeOffesetParameterName, "The maximum time offset for the model ranging from 1 to infinity.", new IntValue(1)));
|
---|
62 | Problem = new TimeSeriesPrognosisProblem();
|
---|
63 | }
|
---|
64 |
|
---|
65 | protected override void Run() {
|
---|
66 | double rmsError, cvRmsError;
|
---|
67 | var solution = CreateAutoRegressiveSolution(Problem.ProblemData, TimeOffset, out rmsError, out cvRmsError);
|
---|
68 | Results.Add(new Result("Autoregressive solution", "The autoregressive time series prognosis solution.", solution));
|
---|
69 | Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the autoregressive time series prognosis solution on the training set.", new DoubleValue(rmsError)));
|
---|
70 | Results.Add(new Result("Estimated root mean square error (cross-validation)", "The estimated root of the mean of squared errors of the autoregressive time series prognosis solution via cross validation.", new DoubleValue(cvRmsError)));
|
---|
71 | }
|
---|
72 |
|
---|
73 | /// <summary>
|
---|
74 | /// Calculates an AR(p) model. For further information see http://en.wikipedia.org/wiki/Autoregressive_model
|
---|
75 | /// </summary>
|
---|
76 | /// <param name="problemData">The problem data which should be used for training</param>
|
---|
77 | /// <param name="timeOffset">The parameter p of the AR(p) specifying the maximum time offset [1,infinity] </param>
|
---|
78 | /// <returns>The times series autoregressive solution </returns>
|
---|
79 | public static ITimeSeriesPrognosisSolution CreateAutoRegressiveSolution(ITimeSeriesPrognosisProblemData problemData, int timeOffset) {
|
---|
80 | double rmsError, cvRmsError;
|
---|
81 | return CreateAutoRegressiveSolution(problemData, timeOffset, out rmsError, out cvRmsError);
|
---|
82 | }
|
---|
83 |
|
---|
84 | private static ITimeSeriesPrognosisSolution CreateAutoRegressiveSolution(ITimeSeriesPrognosisProblemData problemData, int timeOffset, out double rmsError, out double cvRmsError) {
|
---|
85 | string targetVariable = problemData.TargetVariable;
|
---|
86 |
|
---|
87 | double[,] inputMatrix = new double[problemData.TrainingPartition.Size, timeOffset + 1];
|
---|
88 | var targetValues = problemData.Dataset.GetDoubleValues(targetVariable).ToList();
|
---|
89 | for (int i = 0, row = problemData.TrainingPartition.Start; i < problemData.TrainingPartition.Size; i++, row++) {
|
---|
90 | for (int col = 0; col < timeOffset; col++) {
|
---|
91 | inputMatrix[i, col] = targetValues[row - col - 1];
|
---|
92 | }
|
---|
93 | }
|
---|
94 | // set target values in last column
|
---|
95 | for (int i = 0; i < inputMatrix.GetLength(0); i++)
|
---|
96 | inputMatrix[i, timeOffset] = targetValues[i + problemData.TrainingPartition.Start];
|
---|
97 |
|
---|
98 | if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
|
---|
99 | throw new NotSupportedException("Linear regression does not support NaN or infinity values in the input dataset.");
|
---|
100 |
|
---|
101 |
|
---|
102 | alglib.linearmodel lm = new alglib.linearmodel();
|
---|
103 | alglib.lrreport ar = new alglib.lrreport();
|
---|
104 | int nRows = inputMatrix.GetLength(0);
|
---|
105 | int nFeatures = inputMatrix.GetLength(1) - 1;
|
---|
106 | double[] coefficients = new double[nFeatures + 1]; // last coefficient is for the constant
|
---|
107 |
|
---|
108 | int retVal = 1;
|
---|
109 | alglib.lrbuild(inputMatrix, nRows, nFeatures, out retVal, out lm, out ar);
|
---|
110 | if (retVal != 1) throw new ArgumentException("Error in calculation of linear regression solution");
|
---|
111 | rmsError = ar.rmserror;
|
---|
112 | cvRmsError = ar.cvrmserror;
|
---|
113 |
|
---|
114 | alglib.lrunpack(lm, out coefficients, out nFeatures);
|
---|
115 |
|
---|
116 |
|
---|
117 | ISymbolicExpressionTree tree = new SymbolicExpressionTree(new ProgramRootSymbol().CreateTreeNode());
|
---|
118 | ISymbolicExpressionTreeNode startNode = new StartSymbol().CreateTreeNode();
|
---|
119 | tree.Root.AddSubtree(startNode);
|
---|
120 | ISymbolicExpressionTreeNode addition = new Addition().CreateTreeNode();
|
---|
121 | startNode.AddSubtree(addition);
|
---|
122 |
|
---|
123 | for (int i = 0; i < timeOffset; i++) {
|
---|
124 | LaggedVariableTreeNode node = (LaggedVariableTreeNode)new LaggedVariable().CreateTreeNode();
|
---|
125 | node.VariableName = targetVariable;
|
---|
126 | node.Weight = coefficients[i];
|
---|
127 | node.Lag = (i + 1) * -1;
|
---|
128 | addition.AddSubtree(node);
|
---|
129 | }
|
---|
130 |
|
---|
131 | ConstantTreeNode cNode = (ConstantTreeNode)new Constant().CreateTreeNode();
|
---|
132 | cNode.Value = coefficients[coefficients.Length - 1];
|
---|
133 | addition.AddSubtree(cNode);
|
---|
134 |
|
---|
135 | var interpreter = new SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(problemData.TargetVariable);
|
---|
136 | var model = new SymbolicTimeSeriesPrognosisModel(tree, interpreter);
|
---|
137 | var solution = model.CreateTimeSeriesPrognosisSolution((ITimeSeriesPrognosisProblemData)problemData.Clone());
|
---|
138 | return solution;
|
---|
139 | }
|
---|
140 | }
|
---|
141 | }
|
---|