Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2520_PersistenceReintegration/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs @ 16451

Last change on this file since 16451 was 16243, checked in by mkommend, 5 years ago

#2955: Added IsProblemDataCompatible and IsDatasetCompatible to all DataAnalysisModels.

File size: 9.9 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a neural network model for regression and classification
33  /// </summary>
34  [StorableClass]
35  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
36  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
37
38    private object mlpLocker = new object();
39    private alglib.multilayerperceptron multiLayerPerceptron;
40
41    public override IEnumerable<string> VariablesUsedForPrediction {
42      get { return allowedInputVariables; }
43    }
44
45    [Storable]
46    private string[] allowedInputVariables;
47    [Storable]
48    private double[] classValues;
49    [StorableConstructor]
50    private NeuralNetworkModel(bool deserializing)
51      : base(deserializing) {
52      if (deserializing)
53        multiLayerPerceptron = new alglib.multilayerperceptron();
54    }
55    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
56      : base(original, cloner) {
57      multiLayerPerceptron = new alglib.multilayerperceptron();
58      multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
59      multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
60      multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
61      multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
62      multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
63      multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
64      multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
65      multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
66      multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
67      multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
68      multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
69      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
70      if (original.classValues != null)
71        this.classValues = (double[])original.classValues.Clone();
72    }
73    public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
74      : base(targetVariable) {
75      this.name = ItemName;
76      this.description = ItemDescription;
77      this.multiLayerPerceptron = multiLayerPerceptron;
78      this.allowedInputVariables = allowedInputVariables.ToArray();
79      if (classValues != null)
80        this.classValues = (double[])classValues.Clone();
81    }
82
83    public override IDeepCloneable Clone(Cloner cloner) {
84      return new NeuralNetworkModel(this, cloner);
85    }
86
87    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
88      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
89
90      int n = inputData.GetLength(0);
91      int columns = inputData.GetLength(1);
92      double[] x = new double[columns];
93      double[] y = new double[1];
94
95      for (int row = 0; row < n; row++) {
96        for (int column = 0; column < columns; column++) {
97          x[column] = inputData[row, column];
98        }
99        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
100        lock (mlpLocker) {
101          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
102        }
103        yield return y[0];
104      }
105    }
106
107    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
108      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
109
110      int n = inputData.GetLength(0);
111      int columns = inputData.GetLength(1);
112      double[] x = new double[columns];
113      double[] y = new double[classValues.Length];
114
115      for (int row = 0; row < n; row++) {
116        for (int column = 0; column < columns; column++) {
117          x[column] = inputData[row, column];
118        }
119        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
120        lock (mlpLocker) {
121          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
122        }
123        // find class for with the largest probability value
124        int maxProbClassIndex = 0;
125        double maxProb = y[0];
126        for (int i = 1; i < y.Length; i++) {
127          if (maxProb < y[i]) {
128            maxProb = y[i];
129            maxProbClassIndex = i;
130          }
131        }
132        yield return classValues[maxProbClassIndex];
133      }
134    }
135
136    public bool IsProblemDataCompatible(IRegressionProblemData problemData, out string errorMessage) {
137      return RegressionModel.IsProblemDataCompatible(this, problemData, out errorMessage);
138    }
139
140    public override bool IsProblemDataCompatible(IDataAnalysisProblemData problemData, out string errorMessage) {
141      if (problemData == null) throw new ArgumentNullException("problemData", "The provided problemData is null.");
142
143      var regressionProblemData = problemData as IRegressionProblemData;
144      if (regressionProblemData != null)
145        return IsProblemDataCompatible(regressionProblemData, out errorMessage);
146
147      var classificationProblemData = problemData as IClassificationProblemData;
148      if (classificationProblemData != null)
149        return IsProblemDataCompatible(classificationProblemData, out errorMessage);
150
151      throw new ArgumentException("The problem data is not a regression nor a classification problem data. Instead a " + problemData.GetType().GetPrettyName() + " was provided.", "problemData");
152    }
153
154    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
155      return new NeuralNetworkRegressionSolution(this, new RegressionProblemData(problemData));
156    }
157    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
158      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
159    }
160
161    #region persistence
162    [Storable]
163    private double[,] MultiLayerPerceptronChunks {
164      get {
165        return multiLayerPerceptron.innerobj.chunks;
166      }
167      set {
168        multiLayerPerceptron.innerobj.chunks = value;
169      }
170    }
171    [Storable]
172    private double[] MultiLayerPerceptronColumnMeans {
173      get {
174        return multiLayerPerceptron.innerobj.columnmeans;
175      }
176      set {
177        multiLayerPerceptron.innerobj.columnmeans = value;
178      }
179    }
180    [Storable]
181    private double[] MultiLayerPerceptronColumnSigmas {
182      get {
183        return multiLayerPerceptron.innerobj.columnsigmas;
184      }
185      set {
186        multiLayerPerceptron.innerobj.columnsigmas = value;
187      }
188    }
189    [Storable]
190    private double[] MultiLayerPerceptronDError {
191      get {
192        return multiLayerPerceptron.innerobj.derror;
193      }
194      set {
195        multiLayerPerceptron.innerobj.derror = value;
196      }
197    }
198    [Storable]
199    private double[] MultiLayerPerceptronDfdnet {
200      get {
201        return multiLayerPerceptron.innerobj.dfdnet;
202      }
203      set {
204        multiLayerPerceptron.innerobj.dfdnet = value;
205      }
206    }
207    [Storable]
208    private double[] MultiLayerPerceptronNeurons {
209      get {
210        return multiLayerPerceptron.innerobj.neurons;
211      }
212      set {
213        multiLayerPerceptron.innerobj.neurons = value;
214      }
215    }
216    [Storable]
217    private double[] MultiLayerPerceptronNwbuf {
218      get {
219        return multiLayerPerceptron.innerobj.nwbuf;
220      }
221      set {
222        multiLayerPerceptron.innerobj.nwbuf = value;
223      }
224    }
225    [Storable]
226    private int[] MultiLayerPerceptronStuctinfo {
227      get {
228        return multiLayerPerceptron.innerobj.structinfo;
229      }
230      set {
231        multiLayerPerceptron.innerobj.structinfo = value;
232      }
233    }
234    [Storable]
235    private double[] MultiLayerPerceptronWeights {
236      get {
237        return multiLayerPerceptron.innerobj.weights;
238      }
239      set {
240        multiLayerPerceptron.innerobj.weights = value;
241      }
242    }
243    [Storable]
244    private double[] MultiLayerPerceptronX {
245      get {
246        return multiLayerPerceptron.innerobj.x;
247      }
248      set {
249        multiLayerPerceptron.innerobj.x = value;
250      }
251    }
252    [Storable]
253    private double[] MultiLayerPerceptronY {
254      get {
255        return multiLayerPerceptron.innerobj.y;
256      }
257      set {
258        multiLayerPerceptron.innerobj.y = value;
259      }
260    }
261    #endregion
262  }
263}
Note: See TracBrowser for help on using the repository browser.