Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegression.cs @ 14235

Last change on this file since 14235 was 14185, checked in by swagner, 8 years ago

#2526: Updated year of copyrights in license headers

File size: 11.1 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Optimization;
29using HeuristicLab.Parameters;
30using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
31using HeuristicLab.Problems.DataAnalysis;
32
33namespace HeuristicLab.Algorithms.DataAnalysis {
34  /// <summary>
35  /// Neural network ensemble regression data analysis algorithm.
36  /// </summary>
37  [Item("Neural Network Ensemble Regression (NN)", "Neural network ensemble regression data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/mlpensembles.php")]
38  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 140)]
39  [StorableClass]
40  public sealed class NeuralNetworkEnsembleRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> {
41    private const string EnsembleSizeParameterName = "EnsembleSize";
42    private const string DecayParameterName = "Decay";
43    private const string HiddenLayersParameterName = "HiddenLayers";
44    private const string NodesInFirstHiddenLayerParameterName = "NodesInFirstHiddenLayer";
45    private const string NodesInSecondHiddenLayerParameterName = "NodesInSecondHiddenLayer";
46    private const string RestartsParameterName = "Restarts";
47    private const string NeuralNetworkEnsembleRegressionModelResultName = "Neural network ensemble regression solution";
48
49    #region parameter properties
50    public IFixedValueParameter<IntValue> EnsembleSizeParameter {
51      get { return (IFixedValueParameter<IntValue>)Parameters[EnsembleSizeParameterName]; }
52    }
53    public IFixedValueParameter<DoubleValue> DecayParameter {
54      get { return (IFixedValueParameter<DoubleValue>)Parameters[DecayParameterName]; }
55    }
56    public IConstrainedValueParameter<IntValue> HiddenLayersParameter {
57      get { return (IConstrainedValueParameter<IntValue>)Parameters[HiddenLayersParameterName]; }
58    }
59    public IFixedValueParameter<IntValue> NodesInFirstHiddenLayerParameter {
60      get { return (IFixedValueParameter<IntValue>)Parameters[NodesInFirstHiddenLayerParameterName]; }
61    }
62    public IFixedValueParameter<IntValue> NodesInSecondHiddenLayerParameter {
63      get { return (IFixedValueParameter<IntValue>)Parameters[NodesInSecondHiddenLayerParameterName]; }
64    }
65    public IFixedValueParameter<IntValue> RestartsParameter {
66      get { return (IFixedValueParameter<IntValue>)Parameters[RestartsParameterName]; }
67    }
68    #endregion
69
70    #region properties
71    public int EnsembleSize {
72      get { return EnsembleSizeParameter.Value.Value; }
73      set {
74        if (value < 1) throw new ArgumentException("The number of models in the ensemble must be positive and at least one.", "EnsembleSize");
75        EnsembleSizeParameter.Value.Value = value;
76      }
77    }
78    public double Decay {
79      get { return DecayParameter.Value.Value; }
80      set {
81        if (value < 0.001 || value > 100) throw new ArgumentException("The decay parameter should be set to a value between 0.001 and 100.", "Decay");
82        DecayParameter.Value.Value = value;
83      }
84    }
85    public int HiddenLayers {
86      get { return HiddenLayersParameter.Value.Value; }
87      set {
88        if (value < 0 || value > 2) throw new ArgumentException("The number of hidden layers should be set to 0, 1, or 2.", "HiddenLayers");
89        HiddenLayersParameter.Value = (from v in HiddenLayersParameter.ValidValues
90                                       where v.Value == value
91                                       select v)
92                                      .Single();
93      }
94    }
95    public int NodesInFirstHiddenLayer {
96      get { return NodesInFirstHiddenLayerParameter.Value.Value; }
97      set {
98        if (value < 1) throw new ArgumentException("The number of nodes in the first hidden layer must be at least one.", "NodesInFirstHiddenLayer");
99        NodesInFirstHiddenLayerParameter.Value.Value = value;
100      }
101    }
102    public int NodesInSecondHiddenLayer {
103      get { return NodesInSecondHiddenLayerParameter.Value.Value; }
104      set {
105        if (value < 1) throw new ArgumentException("The number of nodes in the first second layer must be at least one.", "NodesInSecondHiddenLayer");
106        NodesInSecondHiddenLayerParameter.Value.Value = value;
107      }
108    }
109    public int Restarts {
110      get { return RestartsParameter.Value.Value; }
111      set {
112        if (value < 0) throw new ArgumentException("The number of restarts must be positive.", "Restarts");
113        RestartsParameter.Value.Value = value;
114      }
115    }
116    #endregion
117
118
119    [StorableConstructor]
120    private NeuralNetworkEnsembleRegression(bool deserializing) : base(deserializing) { }
121    private NeuralNetworkEnsembleRegression(NeuralNetworkEnsembleRegression original, Cloner cloner)
122      : base(original, cloner) {
123    }
124    public NeuralNetworkEnsembleRegression()
125      : base() {
126      var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] {
127        (IntValue)new IntValue(0).AsReadOnly(),
128        (IntValue)new IntValue(1).AsReadOnly(),
129        (IntValue)new IntValue(2).AsReadOnly() });
130      var selectedHiddenLayerValue = (from v in validHiddenLayerValues
131                                      where v.Value == 1
132                                      select v)
133                                     .Single();
134      Parameters.Add(new FixedValueParameter<IntValue>(EnsembleSizeParameterName, "The number of simple neural network models in the ensemble. A good value is 10.", new IntValue(10)));
135      Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(0.001)));
136      Parameters.Add(new ConstrainedValueParameter<IntValue>(HiddenLayersParameterName, "The number of hidden layers for the neural network (0, 1, or 2)", validHiddenLayerValues, selectedHiddenLayerValue));
137      Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. The value should be rather large (30-100 nodes) in order to make the network highly flexible and run into the early stopping criterion). This value is not used if the number of hidden layers is zero.", new IntValue(100)));
138      Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(100)));
139      Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of restarts for learning.", new IntValue(2)));
140
141      HiddenLayersParameter.Hidden = true;
142      NodesInFirstHiddenLayerParameter.Hidden = true;
143      NodesInSecondHiddenLayerParameter.Hidden = true;
144      RestartsParameter.Hidden = true;
145
146      Problem = new RegressionProblem();
147    }
148    [StorableHook(HookType.AfterDeserialization)]
149    private void AfterDeserialization() { }
150
151    public override IDeepCloneable Clone(Cloner cloner) {
152      return new NeuralNetworkEnsembleRegression(this, cloner);
153    }
154
155    #region neural network ensemble
156    protected override void Run() {
157      double rmsError, avgRelError;
158      var solution = CreateNeuralNetworkEnsembleRegressionSolution(Problem.ProblemData, EnsembleSize, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError);
159      Results.Add(new Result(NeuralNetworkEnsembleRegressionModelResultName, "The neural network ensemble regression solution.", solution));
160      Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network ensemble regression solution on the training set.", new DoubleValue(rmsError)));
161      Results.Add(new Result("Average relative error", "The average of relative errors of the neural network ensemble regression solution on the training set.", new PercentValue(avgRelError)));
162    }
163
164    public static IRegressionSolution CreateNeuralNetworkEnsembleRegressionSolution(IRegressionProblemData problemData, int ensembleSize, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,
165      out double rmsError, out double avgRelError) {
166      var dataset = problemData.Dataset;
167      string targetVariable = problemData.TargetVariable;
168      IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
169      IEnumerable<int> rows = problemData.TrainingIndices;
170      double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
171      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
172        throw new NotSupportedException("Neural network ensemble regression does not support NaN or infinity values in the input dataset.");
173
174      alglib.mlpensemble mlpEnsemble = null;
175      if (nLayers == 0) {
176        alglib.mlpecreate0(allowedInputVariables.Count(), 1, ensembleSize, out mlpEnsemble);
177      } else if (nLayers == 1) {
178        alglib.mlpecreate1(allowedInputVariables.Count(), nHiddenNodes1, 1, ensembleSize, out mlpEnsemble);
179      } else if (nLayers == 2) {
180        alglib.mlpecreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, ensembleSize, out mlpEnsemble);
181      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
182      alglib.mlpreport rep;
183      int nRows = inputMatrix.GetLength(0);
184
185      int info;
186      alglib.mlpetraines(mlpEnsemble, inputMatrix, nRows, decay, restarts, out info, out rep);
187      if (info != 6) throw new ArgumentException("Error in calculation of neural network ensemble regression solution");
188
189      rmsError = alglib.mlpermserror(mlpEnsemble, inputMatrix, nRows);
190      avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows);
191
192      return new NeuralNetworkEnsembleRegressionSolution(new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables), (IRegressionProblemData)problemData.Clone());
193    }
194    #endregion
195  }
196}
Note: See TracBrowser for help on using the repository browser.