Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestRegression.cs @ 6241

Last change on this file since 6241 was 6241, checked in by gkronber, 12 years ago

#1473: implemented random forest wrapper for classification.

File size: 6.4 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2011 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
29using HeuristicLab.Optimization;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31using HeuristicLab.Problems.DataAnalysis;
32using HeuristicLab.Problems.DataAnalysis.Symbolic;
33using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
34using HeuristicLab.Parameters;
35
36namespace HeuristicLab.Algorithms.DataAnalysis {
37  /// <summary>
38  /// Random forest regression data analysis algorithm.
39  /// </summary>
40  [Item("Random Forest Regression", "Random forest regression data analysis algorithm (wrapper for ALGLIB).")]
41  [Creatable("Data Analysis")]
42  [StorableClass]
43  public sealed class RandomForestRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> {
44    private const string RandomForestRegressionModelResultName = "Random forest regression solution";
45    private const string NumberOfTreesParameterName = "Number of trees";
46    private const string RParameterName = "R";
47    #region parameter properties
48    public IValueParameter<IntValue> NumberOfTreesParameter {
49      get { return (IValueParameter<IntValue>)Parameters[NumberOfTreesParameterName]; }
50    }
51    public IValueParameter<DoubleValue> RParameter {
52      get { return (IValueParameter<DoubleValue>)Parameters[RParameterName]; }
53    }
54    #endregion
55    #region properties
56    public int NumberOfTrees {
57      get { return NumberOfTreesParameter.Value.Value; }
58      set { NumberOfTreesParameter.Value.Value = value; }
59    }
60    public double R {
61      get { return RParameter.Value.Value; }
62      set { RParameter.Value.Value = value; }
63    }
64    #endregion
65    [StorableConstructor]
66    private RandomForestRegression(bool deserializing) : base(deserializing) { }
67    private RandomForestRegression(RandomForestRegression original, Cloner cloner)
68      : base(original, cloner) {
69    }
70    public RandomForestRegression()
71      : base() {
72      Parameters.Add(new FixedValueParameter<IntValue>(NumberOfTreesParameterName, "The number of trees in the forest. Should be between 50 and 100", new IntValue(50)));
73      Parameters.Add(new FixedValueParameter<DoubleValue>(RParameterName, "The ratio of the training set that will be used in the construction of individual trees (0<r<=1). Should be adjusted depending on the noise level in the dataset in the range from 0.66 (low noise) to 0.05 (high noise). This parameter should be adjusted to achieve good generalization error.", new DoubleValue(0.3)));
74      Problem = new RegressionProblem();
75    }
76    [StorableHook(HookType.AfterDeserialization)]
77    private void AfterDeserialization() { }
78
79    public override IDeepCloneable Clone(Cloner cloner) {
80      return new RandomForestRegression(this, cloner);
81    }
82
83    #region random forest
84    protected override void Run() {
85      double rmsError, avgRelError, outOfBagRmsError, outOfBagAvgRelError;
86      var solution = CreateRandomForestRegressionSolution(Problem.ProblemData, NumberOfTrees, R, out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);
87      Results.Add(new Result(RandomForestRegressionModelResultName, "The random forest regression solution.", solution));
88      Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the random forest regression solution on the training set.", new DoubleValue(rmsError)));
89      Results.Add(new Result("Average relative error", "The average of relative errors of the random forest regression solution on the training set.", new PercentValue(avgRelError)));
90      Results.Add(new Result("Root mean square error (out-of-bag)", "The out-of-bag root of the mean of squared errors of the random forest regression solution.", new DoubleValue(outOfBagRmsError)));
91      Results.Add(new Result("Average relative error (out-of-bag)", "The out-of-bag average of relative errors of the random forest regression solution.", new PercentValue(outOfBagAvgRelError)));
92    }
93
94    public static IRegressionSolution CreateRandomForestRegressionSolution(IRegressionProblemData problemData, int nTrees, double r,
95      out double rmsError, out double avgRelError, out double outOfBagRmsError, out double outOfBagAvgRelError) {
96      Dataset dataset = problemData.Dataset;
97      string targetVariable = problemData.TargetVariable;
98      IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
99      IEnumerable<int> rows = problemData.TrainingIndizes;
100      double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
101      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
102        throw new NotSupportedException("Random forest regression does not support NaN or infinity values in the input dataset.");
103
104
105      alglib.decisionforest dforest;
106      alglib.dfreport rep;
107      int nRows = inputMatrix.GetLength(0);
108
109      int info;
110      alglib.dfbuildrandomdecisionforest(inputMatrix, nRows, allowedInputVariables.Count(), 1, nTrees, r, out info, out dforest, out rep);
111      if (info != 1) throw new ArgumentException("Error in calculation of random forest regression solution");
112
113      rmsError = rep.rmserror;
114      avgRelError = rep.avgrelerror;
115      outOfBagAvgRelError = rep.oobavgrelerror;
116      outOfBagRmsError = rep.oobrmserror;
117
118      return new RandomForestRegressionSolution(problemData, new RandomForestModel(dforest, targetVariable, allowedInputVariables));
119    }
120    #endregion
121  }
122}
Note: See TracBrowser for help on using the repository browser.