source: stable/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs @ 16387

Last change on this file since 16387 was 16387, checked in by gkronber, 11 months ago

#2891: merged r15739 and r16168 from trunk to stable

File size: 8.9 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a neural network model for regression and classification
33  /// </summary>
34  [StorableClass]
35  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
36  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
37
38    private object mlpLocker = new object();
39    private alglib.multilayerperceptron multiLayerPerceptron;
40
41    public override IEnumerable<string> VariablesUsedForPrediction {
42      get { return allowedInputVariables; }
43    }
44
45    [Storable]
46    private string[] allowedInputVariables;
47    [Storable]
48    private double[] classValues;
49    [StorableConstructor]
50    private NeuralNetworkModel(bool deserializing)
51      : base(deserializing) {
52      if (deserializing)
53        multiLayerPerceptron = new alglib.multilayerperceptron();
54    }
55    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
56      : base(original, cloner) {
57      multiLayerPerceptron = new alglib.multilayerperceptron();
58      multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
59      multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
60      multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
61      multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
62      multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
63      multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
64      multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
65      multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
66      multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
67      multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
68      multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
69      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
70      if (original.classValues != null)
71        this.classValues = (double[])original.classValues.Clone();
72    }
73    public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
74      : base(targetVariable) {
75      this.name = ItemName;
76      this.description = ItemDescription;
77      this.multiLayerPerceptron = multiLayerPerceptron;
78      this.allowedInputVariables = allowedInputVariables.ToArray();
79      if (classValues != null)
80        this.classValues = (double[])classValues.Clone();
81    }
82
83    public override IDeepCloneable Clone(Cloner cloner) {
84      return new NeuralNetworkModel(this, cloner);
85    }
86
87    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
88      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
89
90      int n = inputData.GetLength(0);
91      int columns = inputData.GetLength(1);
92      double[] x = new double[columns];
93      double[] y = new double[1];
94
95      for (int row = 0; row < n; row++) {
96        for (int column = 0; column < columns; column++) {
97          x[column] = inputData[row, column];
98        }
99        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
100        lock (mlpLocker) {
101          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
102        }
103        yield return y[0];
104      }
105    }
106
107    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
108      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
109
110      int n = inputData.GetLength(0);
111      int columns = inputData.GetLength(1);
112      double[] x = new double[columns];
113      double[] y = new double[classValues.Length];
114
115      for (int row = 0; row < n; row++) {
116        for (int column = 0; column < columns; column++) {
117          x[column] = inputData[row, column];
118        }
119        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
120        lock (mlpLocker) {
121          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
122        }
123        // find class for with the largest probability value
124        int maxProbClassIndex = 0;
125        double maxProb = y[0];
126        for (int i = 1; i < y.Length; i++) {
127          if (maxProb < y[i]) {
128            maxProb = y[i];
129            maxProbClassIndex = i;
130          }
131        }
132        yield return classValues[maxProbClassIndex];
133      }
134    }
135
136    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
137      return new NeuralNetworkRegressionSolution(this, new RegressionProblemData(problemData));
138    }
139    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
140      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
141    }
142
143    #region persistence
144    [Storable]
145    private double[,] MultiLayerPerceptronChunks {
146      get {
147        return multiLayerPerceptron.innerobj.chunks;
148      }
149      set {
150        multiLayerPerceptron.innerobj.chunks = value;
151      }
152    }
153    [Storable]
154    private double[] MultiLayerPerceptronColumnMeans {
155      get {
156        return multiLayerPerceptron.innerobj.columnmeans;
157      }
158      set {
159        multiLayerPerceptron.innerobj.columnmeans = value;
160      }
161    }
162    [Storable]
163    private double[] MultiLayerPerceptronColumnSigmas {
164      get {
165        return multiLayerPerceptron.innerobj.columnsigmas;
166      }
167      set {
168        multiLayerPerceptron.innerobj.columnsigmas = value;
169      }
170    }
171    [Storable]
172    private double[] MultiLayerPerceptronDError {
173      get {
174        return multiLayerPerceptron.innerobj.derror;
175      }
176      set {
177        multiLayerPerceptron.innerobj.derror = value;
178      }
179    }
180    [Storable]
181    private double[] MultiLayerPerceptronDfdnet {
182      get {
183        return multiLayerPerceptron.innerobj.dfdnet;
184      }
185      set {
186        multiLayerPerceptron.innerobj.dfdnet = value;
187      }
188    }
189    [Storable]
190    private double[] MultiLayerPerceptronNeurons {
191      get {
192        return multiLayerPerceptron.innerobj.neurons;
193      }
194      set {
195        multiLayerPerceptron.innerobj.neurons = value;
196      }
197    }
198    [Storable]
199    private double[] MultiLayerPerceptronNwbuf {
200      get {
201        return multiLayerPerceptron.innerobj.nwbuf;
202      }
203      set {
204        multiLayerPerceptron.innerobj.nwbuf = value;
205      }
206    }
207    [Storable]
208    private int[] MultiLayerPerceptronStuctinfo {
209      get {
210        return multiLayerPerceptron.innerobj.structinfo;
211      }
212      set {
213        multiLayerPerceptron.innerobj.structinfo = value;
214      }
215    }
216    [Storable]
217    private double[] MultiLayerPerceptronWeights {
218      get {
219        return multiLayerPerceptron.innerobj.weights;
220      }
221      set {
222        multiLayerPerceptron.innerobj.weights = value;
223      }
224    }
225    [Storable]
226    private double[] MultiLayerPerceptronX {
227      get {
228        return multiLayerPerceptron.innerobj.x;
229      }
230      set {
231        multiLayerPerceptron.innerobj.x = value;
232      }
233    }
234    [Storable]
235    private double[] MultiLayerPerceptronY {
236      get {
237        return multiLayerPerceptron.innerobj.y;
238      }
239      set {
240        multiLayerPerceptron.innerobj.y = value;
241      }
242    }
243    #endregion
244  }
245}
Note: See TracBrowser for help on using the repository browser.