Free cookie consent management tool by TermsFeed Policy Generator

source: stable/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs @ 18134

Last change on this file since 18134 was 17181, checked in by swagner, 5 years ago

#2875: Merged r17180 from trunk to stable

File size: 9.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HEAL.Attic;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a neural network model for regression and classification
33  /// </summary>
34  [StorableType("AEB9B960-FCA6-4A6D-BD5F-27BCE9CC5BEA")]
35  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
36  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
37
38    private object mlpLocker = new object();
39    private alglib.multilayerperceptron multiLayerPerceptron;
40
41    public override IEnumerable<string> VariablesUsedForPrediction {
42      get { return allowedInputVariables; }
43    }
44
45    [Storable]
46    private string[] allowedInputVariables;
47    [Storable]
48    private double[] classValues;
49    [StorableConstructor]
50    private NeuralNetworkModel(StorableConstructorFlag _) : base(_) {
51      multiLayerPerceptron = new alglib.multilayerperceptron();
52    }
53    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
54      : base(original, cloner) {
55      multiLayerPerceptron = new alglib.multilayerperceptron();
56      multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
57      multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
58      multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
59      multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
60      multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
61      multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
62      multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
63      multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
64      multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
65      multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
66      multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
67      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
68      if (original.classValues != null)
69        this.classValues = (double[])original.classValues.Clone();
70    }
71    public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
72      : base(targetVariable) {
73      this.name = ItemName;
74      this.description = ItemDescription;
75      this.multiLayerPerceptron = multiLayerPerceptron;
76      this.allowedInputVariables = allowedInputVariables.ToArray();
77      if (classValues != null)
78        this.classValues = (double[])classValues.Clone();
79    }
80
81    public override IDeepCloneable Clone(Cloner cloner) {
82      return new NeuralNetworkModel(this, cloner);
83    }
84
85    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
86      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
87
88      int n = inputData.GetLength(0);
89      int columns = inputData.GetLength(1);
90      double[] x = new double[columns];
91      double[] y = new double[1];
92
93      for (int row = 0; row < n; row++) {
94        for (int column = 0; column < columns; column++) {
95          x[column] = inputData[row, column];
96        }
97        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
98        lock (mlpLocker) {
99          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
100        }
101        yield return y[0];
102      }
103    }
104
105    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
106      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
107
108      int n = inputData.GetLength(0);
109      int columns = inputData.GetLength(1);
110      double[] x = new double[columns];
111      double[] y = new double[classValues.Length];
112
113      for (int row = 0; row < n; row++) {
114        for (int column = 0; column < columns; column++) {
115          x[column] = inputData[row, column];
116        }
117        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
118        lock (mlpLocker) {
119          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
120        }
121        // find class for with the largest probability value
122        int maxProbClassIndex = 0;
123        double maxProb = y[0];
124        for (int i = 1; i < y.Length; i++) {
125          if (maxProb < y[i]) {
126            maxProb = y[i];
127            maxProbClassIndex = i;
128          }
129        }
130        yield return classValues[maxProbClassIndex];
131      }
132    }
133
134    public bool IsProblemDataCompatible(IRegressionProblemData problemData, out string errorMessage) {
135      return RegressionModel.IsProblemDataCompatible(this, problemData, out errorMessage);
136    }
137
138    public override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) {
139      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
140
141      var regressionProblemData = problemData as IRegressionProblemData;
142      if (regressionProblemData != null)
143        return IsProblemDataCompatible(regressionProblemData, out errorMessage);
144
145      var classificationProblemData = problemData as IClassificationProblemData;
146      if (classificationProblemData != null)
147        return IsProblemDataCompatible(classificationProblemData, out errorMessage);
148
149      throw new ArgumentException("The problem data is not compatible with this neural network. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
150    }
151
152    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
153      return new NeuralNetworkRegressionSolution(this, new RegressionProblemData(problemData));
154    }
155    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
156      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
157    }
158
159    #region persistence
160    [Storable]
161    private double[,] MultiLayerPerceptronChunks {
162      get {
163        return multiLayerPerceptron.innerobj.chunks;
164      }
165      set {
166        multiLayerPerceptron.innerobj.chunks = value;
167      }
168    }
169    [Storable]
170    private double[] MultiLayerPerceptronColumnMeans {
171      get {
172        return multiLayerPerceptron.innerobj.columnmeans;
173      }
174      set {
175        multiLayerPerceptron.innerobj.columnmeans = value;
176      }
177    }
178    [Storable]
179    private double[] MultiLayerPerceptronColumnSigmas {
180      get {
181        return multiLayerPerceptron.innerobj.columnsigmas;
182      }
183      set {
184        multiLayerPerceptron.innerobj.columnsigmas = value;
185      }
186    }
187    [Storable]
188    private double[] MultiLayerPerceptronDError {
189      get {
190        return multiLayerPerceptron.innerobj.derror;
191      }
192      set {
193        multiLayerPerceptron.innerobj.derror = value;
194      }
195    }
196    [Storable]
197    private double[] MultiLayerPerceptronDfdnet {
198      get {
199        return multiLayerPerceptron.innerobj.dfdnet;
200      }
201      set {
202        multiLayerPerceptron.innerobj.dfdnet = value;
203      }
204    }
205    [Storable]
206    private double[] MultiLayerPerceptronNeurons {
207      get {
208        return multiLayerPerceptron.innerobj.neurons;
209      }
210      set {
211        multiLayerPerceptron.innerobj.neurons = value;
212      }
213    }
214    [Storable]
215    private double[] MultiLayerPerceptronNwbuf {
216      get {
217        return multiLayerPerceptron.innerobj.nwbuf;
218      }
219      set {
220        multiLayerPerceptron.innerobj.nwbuf = value;
221      }
222    }
223    [Storable]
224    private int[] MultiLayerPerceptronStuctinfo {
225      get {
226        return multiLayerPerceptron.innerobj.structinfo;
227      }
228      set {
229        multiLayerPerceptron.innerobj.structinfo = value;
230      }
231    }
232    [Storable]
233    private double[] MultiLayerPerceptronWeights {
234      get {
235        return multiLayerPerceptron.innerobj.weights;
236      }
237      set {
238        multiLayerPerceptron.innerobj.weights = value;
239      }
240    }
241    [Storable]
242    private double[] MultiLayerPerceptronX {
243      get {
244        return multiLayerPerceptron.innerobj.x;
245      }
246      set {
247        multiLayerPerceptron.innerobj.x = value;
248      }
249    }
250    [Storable]
251    private double[] MultiLayerPerceptronY {
252      get {
253        return multiLayerPerceptron.innerobj.y;
254      }
255      set {
256        multiLayerPerceptron.innerobj.y = value;
257      }
258    }
259    #endregion
260  }
261}
Note: See TracBrowser for help on using the repository browser.