[13646] | 1 | #region License Information
|
---|
| 2 | /* HeuristicLab
|
---|
[17815] | 3 | * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
| 4 | *
|
---|
[13646] | 5 | * This file is part of HeuristicLab.
|
---|
| 6 | *
|
---|
| 7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
| 8 | * it under the terms of the GNU General Public License as published by
|
---|
| 9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
| 10 | * (at your option) any later version.
|
---|
| 11 | *
|
---|
| 12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 15 | * GNU General Public License for more details.
|
---|
| 16 | *
|
---|
| 17 | * You should have received a copy of the GNU General Public License
|
---|
| 18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
| 19 | */
|
---|
| 20 | #endregion
|
---|
| 21 |
|
---|
| 22 | using System;
|
---|
| 23 | using System.Collections.Generic;
|
---|
| 24 | using System.Linq;
|
---|
| 25 | using System.Threading;
|
---|
[17812] | 26 | using HEAL.Attic;
|
---|
[13646] | 27 | using HeuristicLab.Analysis;
|
---|
| 28 | using HeuristicLab.Common;
|
---|
| 29 | using HeuristicLab.Core;
|
---|
| 30 | using HeuristicLab.Data;
|
---|
| 31 | using HeuristicLab.Optimization;
|
---|
| 32 | using HeuristicLab.Parameters;
|
---|
| 33 | using HeuristicLab.Problems.DataAnalysis;
|
---|
| 34 | using HeuristicLab.Random;
|
---|
| 35 |
|
---|
[15775] | 36 | namespace HeuristicLab.Algorithms.DataAnalysis {
|
---|
[17812] | 37 | [Item("Generalized Additive Model (GAM)", "Generalized additive model using uni-variate penalized regression splines as base learner.")]
|
---|
| 38 | [StorableType("98A887E7-73DD-4602-BD6C-2F6B9E6FBBC5")]
|
---|
[15775] | 39 | [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 600)]
|
---|
| 40 | public sealed class GeneralizedAdditiveModelAlgorithm : FixedDataAnalysisAlgorithm<IRegressionProblem> {
|
---|
[17888] | 41 |
|
---|
[13646] | 42 | #region ParameterNames
|
---|
| 43 |
|
---|
| 44 | private const string IterationsParameterName = "Iterations";
|
---|
[15775] | 45 | private const string LambdaParameterName = "Lambda";
|
---|
[13646] | 46 | private const string SeedParameterName = "Seed";
|
---|
| 47 | private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
|
---|
| 48 | private const string CreateSolutionParameterName = "CreateSolution";
|
---|
[17888] | 49 |
|
---|
[13646] | 50 | #endregion
|
---|
| 51 |
|
---|
| 52 | #region ParameterProperties
|
---|
| 53 |
|
---|
| 54 | public IFixedValueParameter<IntValue> IterationsParameter {
|
---|
| 55 | get { return (IFixedValueParameter<IntValue>)Parameters[IterationsParameterName]; }
|
---|
| 56 | }
|
---|
| 57 |
|
---|
[15775] | 58 | public IFixedValueParameter<DoubleValue> LambdaParameter {
|
---|
| 59 | get { return (IFixedValueParameter<DoubleValue>)Parameters[LambdaParameterName]; }
|
---|
[13646] | 60 | }
|
---|
| 61 |
|
---|
| 62 | public IFixedValueParameter<IntValue> SeedParameter {
|
---|
| 63 | get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; }
|
---|
| 64 | }
|
---|
| 65 |
|
---|
| 66 | public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
|
---|
| 67 | get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
|
---|
| 68 | }
|
---|
| 69 |
|
---|
| 70 | public IFixedValueParameter<BoolValue> CreateSolutionParameter {
|
---|
| 71 | get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
|
---|
| 72 | }
|
---|
| 73 |
|
---|
| 74 | #endregion
|
---|
| 75 |
|
---|
| 76 | #region Properties
|
---|
| 77 |
|
---|
| 78 | public int Iterations {
|
---|
| 79 | get { return IterationsParameter.Value.Value; }
|
---|
| 80 | set { IterationsParameter.Value.Value = value; }
|
---|
| 81 | }
|
---|
| 82 |
|
---|
[15775] | 83 | public double Lambda {
|
---|
| 84 | get { return LambdaParameter.Value.Value; }
|
---|
| 85 | set { LambdaParameter.Value.Value = value; }
|
---|
| 86 | }
|
---|
| 87 |
|
---|
[13646] | 88 | public int Seed {
|
---|
| 89 | get { return SeedParameter.Value.Value; }
|
---|
| 90 | set { SeedParameter.Value.Value = value; }
|
---|
| 91 | }
|
---|
| 92 |
|
---|
| 93 | public bool SetSeedRandomly {
|
---|
| 94 | get { return SetSeedRandomlyParameter.Value.Value; }
|
---|
| 95 | set { SetSeedRandomlyParameter.Value.Value = value; }
|
---|
| 96 | }
|
---|
| 97 |
|
---|
| 98 | public bool CreateSolution {
|
---|
| 99 | get { return CreateSolutionParameter.Value.Value; }
|
---|
| 100 | set { CreateSolutionParameter.Value.Value = value; }
|
---|
| 101 | }
|
---|
| 102 |
|
---|
| 103 | #endregion
|
---|
| 104 |
|
---|
| 105 | [StorableConstructor]
|
---|
[17812] | 106 | private GeneralizedAdditiveModelAlgorithm(StorableConstructorFlag deserializing)
|
---|
[13646] | 107 | : base(deserializing) {
|
---|
| 108 | }
|
---|
| 109 |
|
---|
[15775] | 110 | private GeneralizedAdditiveModelAlgorithm(GeneralizedAdditiveModelAlgorithm original, Cloner cloner)
|
---|
[13646] | 111 | : base(original, cloner) {
|
---|
| 112 | }
|
---|
| 113 |
|
---|
| 114 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
[15775] | 115 | return new GeneralizedAdditiveModelAlgorithm(this, cloner);
|
---|
[13646] | 116 | }
|
---|
| 117 |
|
---|
[15775] | 118 | public GeneralizedAdditiveModelAlgorithm() {
|
---|
[13646] | 119 | Problem = new RegressionProblem(); // default problem
|
---|
| 120 |
|
---|
| 121 | Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName,
|
---|
[15775] | 122 | "Number of iterations. Try a large value and check convergence of the error over iterations. Usually, only a few iterations (e.g. 10) are needed for convergence.", new IntValue(10)));
|
---|
| 123 | Parameters.Add(new FixedValueParameter<DoubleValue>(LambdaParameterName,
|
---|
| 124 | "The penalty parameter for the penalized regression splines. Set to a value between -8 (weak smoothing) and 8 (strong smooting). Usually, a value between -4 and 4 should be fine", new DoubleValue(3)));
|
---|
[13646] | 125 | Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName,
|
---|
| 126 | "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
|
---|
| 127 | Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName,
|
---|
| 128 | "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
|
---|
| 129 | Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName,
|
---|
| 130 | "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
|
---|
| 131 | Parameters[CreateSolutionParameterName].Hidden = true;
|
---|
| 132 | }
|
---|
| 133 |
|
---|
| 134 | protected override void Run(CancellationToken cancellationToken) {
|
---|
| 135 | // Set up the algorithm
|
---|
| 136 | if (SetSeedRandomly) Seed = new System.Random().Next();
|
---|
| 137 | var rand = new MersenneTwister((uint)Seed);
|
---|
| 138 |
|
---|
[15775] | 139 | // calculates a GAM model using univariate non-linear functions
|
---|
| 140 | // using backfitting algorithm (see The Elements of Statistical Learning page 298)
|
---|
[13646] | 141 |
|
---|
| 142 | // init
|
---|
| 143 | var problemData = Problem.ProblemData;
|
---|
[15775] | 144 | var ds = problemData.Dataset;
|
---|
[17888] | 145 | var trainRows = problemData.TrainingIndices.ToArray();
|
---|
| 146 | var testRows = problemData.TestIndices.ToArray();
|
---|
[15775] | 147 | var avgY = problemData.TargetVariableTrainingValues.Average();
|
---|
| 148 | var inputVars = problemData.AllowedInputVariables.ToArray();
|
---|
[13646] | 149 |
|
---|
[15775] | 150 | int nTerms = inputVars.Length;
|
---|
[13646] | 151 |
|
---|
[15775] | 152 | #region init results
|
---|
| 153 | // Set up the results display
|
---|
| 154 | var iterations = new IntValue(0);
|
---|
| 155 | Results.Add(new Result("Iterations", iterations));
|
---|
[13646] | 156 |
|
---|
[15775] | 157 | var table = new DataTable("Qualities");
|
---|
| 158 | var rmseRow = new DataRow("RMSE (train)");
|
---|
| 159 | var rmseRowTest = new DataRow("RMSE (test)");
|
---|
| 160 | table.Rows.Add(rmseRow);
|
---|
| 161 | table.Rows.Add(rmseRowTest);
|
---|
| 162 | Results.Add(new Result("Qualities", table));
|
---|
| 163 | var curRMSE = new DoubleValue();
|
---|
| 164 | var curRMSETest = new DoubleValue();
|
---|
| 165 | Results.Add(new Result("RMSE (train)", curRMSE));
|
---|
| 166 | Results.Add(new Result("RMSE (test)", curRMSETest));
|
---|
[13646] | 167 |
|
---|
[15775] | 168 | // calculate table with residual contributions of each term
|
---|
| 169 | var rssTable = new DoubleMatrix(nTerms, 1, new string[] { "RSS" }, inputVars);
|
---|
| 170 | Results.Add(new Result("RSS Values", rssTable));
|
---|
| 171 | #endregion
|
---|
[13646] | 172 |
|
---|
[15775] | 173 | // start with a set of constant models = 0
|
---|
| 174 | IRegressionModel[] f = new IRegressionModel[nTerms];
|
---|
| 175 | for (int i = 0; i < f.Length; i++) {
|
---|
| 176 | f[i] = new ConstantModel(0.0, problemData.TargetVariable);
|
---|
| 177 | }
|
---|
| 178 | // init res which contains the current residual vector
|
---|
| 179 | double[] res = problemData.TargetVariableTrainingValues.Select(yi => yi - avgY).ToArray();
|
---|
| 180 | double[] resTest = problemData.TargetVariableTestValues.Select(yi => yi - avgY).ToArray();
|
---|
[13646] | 181 |
|
---|
[17888] | 182 | curRMSE.Value = RMSE(res);
|
---|
| 183 | curRMSETest.Value = RMSE(resTest);
|
---|
| 184 | rmseRow.Values.Add(curRMSE.Value);
|
---|
| 185 | rmseRowTest.Values.Add(curRMSETest.Value);
|
---|
[13898] | 186 |
|
---|
[13646] | 187 |
|
---|
[15775] | 188 | double lambda = Lambda;
|
---|
| 189 | var idx = Enumerable.Range(0, nTerms).ToArray();
|
---|
[13646] | 190 |
|
---|
[15775] | 191 | // Loop until iteration limit reached or canceled.
|
---|
| 192 | for (int i = 0; i < Iterations && !cancellationToken.IsCancellationRequested; i++) {
|
---|
| 193 | // shuffle order of terms in each iteration to remove bias on earlier terms
|
---|
| 194 | idx.ShuffleInPlace(rand);
|
---|
| 195 | foreach (var inputIdx in idx) {
|
---|
| 196 | var inputVar = inputVars[inputIdx];
|
---|
| 197 | // first remove the effect of the previous model for the inputIdx (by adding the output of the current model to the residual)
|
---|
| 198 | AddInPlace(res, f[inputIdx].GetEstimatedValues(ds, trainRows));
|
---|
| 199 | AddInPlace(resTest, f[inputIdx].GetEstimatedValues(ds, testRows));
|
---|
[13646] | 200 |
|
---|
[17888] | 201 | rssTable[inputIdx, 0] = MSE(res);
|
---|
[15775] | 202 | f[inputIdx] = RegressSpline(problemData, inputVar, res, lambda);
|
---|
[13646] | 203 |
|
---|
[15775] | 204 | SubtractInPlace(res, f[inputIdx].GetEstimatedValues(ds, trainRows));
|
---|
| 205 | SubtractInPlace(resTest, f[inputIdx].GetEstimatedValues(ds, testRows));
|
---|
[13646] | 206 | }
|
---|
| 207 |
|
---|
[17888] | 208 | curRMSE.Value = RMSE(res);
|
---|
| 209 | curRMSETest.Value = RMSE(resTest);
|
---|
[15775] | 210 | rmseRow.Values.Add(curRMSE.Value);
|
---|
| 211 | rmseRowTest.Values.Add(curRMSETest.Value);
|
---|
| 212 | iterations.Value = i;
|
---|
[13699] | 213 | }
|
---|
[13646] | 214 |
|
---|
[15775] | 215 | // produce solution
|
---|
| 216 | if (CreateSolution) {
|
---|
| 217 | var model = new RegressionEnsembleModel(f.Concat(new[] { new ConstantModel(avgY, problemData.TargetVariable) }));
|
---|
| 218 | model.AverageModelEstimates = false;
|
---|
[17888] | 219 | var solution = model.CreateRegressionSolution((IRegressionProblemData)problemData.Clone());
|
---|
[15775] | 220 | Results.Add(new Result("Ensemble solution", solution));
|
---|
[13917] | 221 | }
|
---|
| 222 | }
|
---|
| 223 |
|
---|
[17888] | 224 | public static double MSE(IEnumerable<double> residuals) {
|
---|
| 225 | var mse = residuals.Select(r => r * r).Average();
|
---|
| 226 | return mse;
|
---|
| 227 | }
|
---|
| 228 |
|
---|
| 229 | public static double RMSE(IEnumerable<double> residuals) {
|
---|
| 230 | var mse = MSE(residuals);
|
---|
| 231 | var rmse = Math.Sqrt(mse);
|
---|
| 232 | return rmse;
|
---|
| 233 | }
|
---|
| 234 |
|
---|
[15775] | 235 | private IRegressionModel RegressSpline(IRegressionProblemData problemData, string inputVar, double[] target, double lambda) {
|
---|
| 236 | var x = problemData.Dataset.GetDoubleValues(inputVar, problemData.TrainingIndices).ToArray();
|
---|
| 237 | var y = (double[])target.Clone();
|
---|
| 238 | int info;
|
---|
| 239 | alglib.spline1dinterpolant s;
|
---|
| 240 | alglib.spline1dfitreport rep;
|
---|
[17812] | 241 | int numKnots = (int)Math.Min(50, 3 * Math.Sqrt(x.Length)); // heuristic for number of knots (Elements of Statistical Learning)
|
---|
[13917] | 242 |
|
---|
[15775] | 243 | alglib.spline1dfitpenalized(x, y, numKnots, lambda, out info, out s, out rep);
|
---|
[13653] | 244 |
|
---|
[15775] | 245 | return new Spline1dModel(s.innerobj, problemData.TargetVariable, inputVar);
|
---|
[13646] | 246 | }
|
---|
| 247 |
|
---|
| 248 |
|
---|
[15775] | 249 | private static void AddInPlace(double[] a, IEnumerable<double> enumerable) {
|
---|
| 250 | int i = 0;
|
---|
| 251 | foreach (var elem in enumerable) {
|
---|
| 252 | a[i] += elem;
|
---|
| 253 | i++;
|
---|
[13646] | 254 | }
|
---|
| 255 | }
|
---|
| 256 |
|
---|
[15775] | 257 | private static void SubtractInPlace(double[] a, IEnumerable<double> enumerable) {
|
---|
| 258 | int i = 0;
|
---|
| 259 | foreach (var elem in enumerable) {
|
---|
| 260 | a[i] -= elem;
|
---|
| 261 | i++;
|
---|
[13646] | 262 | }
|
---|
| 263 | }
|
---|
| 264 | }
|
---|
| 265 | }
|
---|