Free cookie consent management tool by TermsFeed Policy Generator

source: stable/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs @ 16308

Last change on this file since 16308 was 15584, checked in by swagner, 7 years ago

#2640: Updated year of copyrights in license headers on stable

File size: 9.1 KB
RevLine 
[6577]1#region License Information
2/* HeuristicLab
[15584]3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[6577]4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a neural network model for regression and classification
33  /// </summary>
34  [StorableClass]
35  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
[14027]36  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
[6577]37
38    private alglib.multilayerperceptron multiLayerPerceptron;
39    public alglib.multilayerperceptron MultiLayerPerceptron {
40      get { return multiLayerPerceptron; }
41      set {
42        if (value != multiLayerPerceptron) {
43          if (value == null) throw new ArgumentNullException();
44          multiLayerPerceptron = value;
45          OnChanged(EventArgs.Empty);
46        }
47      }
48    }
49
[14027]50    public override IEnumerable<string> VariablesUsedForPrediction {
51      get { return allowedInputVariables; }
52    }
53
[6577]54    [Storable]
55    private string[] allowedInputVariables;
56    [Storable]
57    private double[] classValues;
58    [StorableConstructor]
59    private NeuralNetworkModel(bool deserializing)
60      : base(deserializing) {
61      if (deserializing)
62        multiLayerPerceptron = new alglib.multilayerperceptron();
63    }
64    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
65      : base(original, cloner) {
66      multiLayerPerceptron = new alglib.multilayerperceptron();
67      multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
68      multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
69      multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
70      multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
71      multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
72      multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
73      multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
74      multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
75      multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
76      multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
77      multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
78      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
79      if (original.classValues != null)
80        this.classValues = (double[])original.classValues.Clone();
81    }
82    public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
[14027]83      : base(targetVariable) {
[6577]84      this.name = ItemName;
85      this.description = ItemDescription;
86      this.multiLayerPerceptron = multiLayerPerceptron;
87      this.allowedInputVariables = allowedInputVariables.ToArray();
88      if (classValues != null)
89        this.classValues = (double[])classValues.Clone();
90    }
91
92    public override IDeepCloneable Clone(Cloner cloner) {
93      return new NeuralNetworkModel(this, cloner);
94    }
95
[12702]96    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
[15142]97      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
[6577]98
99      int n = inputData.GetLength(0);
100      int columns = inputData.GetLength(1);
101      double[] x = new double[columns];
102      double[] y = new double[1];
103
104      for (int row = 0; row < n; row++) {
105        for (int column = 0; column < columns; column++) {
106          x[column] = inputData[row, column];
107        }
108        alglib.mlpprocess(multiLayerPerceptron, x, ref y);
109        yield return y[0];
110      }
111    }
112
[14027]113    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
[15142]114      double[,] inputData = dataset.ToArray( allowedInputVariables, rows);
[6577]115
116      int n = inputData.GetLength(0);
117      int columns = inputData.GetLength(1);
118      double[] x = new double[columns];
119      double[] y = new double[classValues.Length];
120
121      for (int row = 0; row < n; row++) {
122        for (int column = 0; column < columns; column++) {
123          x[column] = inputData[row, column];
124        }
125        alglib.mlpprocess(multiLayerPerceptron, x, ref y);
126        // find class for with the largest probability value
127        int maxProbClassIndex = 0;
128        double maxProb = y[0];
129        for (int i = 1; i < y.Length; i++) {
130          if (maxProb < y[i]) {
131            maxProb = y[i];
132            maxProbClassIndex = i;
133          }
134        }
135        yield return classValues[maxProbClassIndex];
136      }
137    }
138
[14027]139    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
140      return new NeuralNetworkRegressionSolution(this, new RegressionProblemData(problemData));
[6603]141    }
[14027]142    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
143      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
[6603]144    }
145
[6577]146    #region events
147    public event EventHandler Changed;
148    private void OnChanged(EventArgs e) {
149      var handlers = Changed;
150      if (handlers != null)
151        handlers(this, e);
152    }
153    #endregion
154
155    #region persistence
156    [Storable]
157    private double[,] MultiLayerPerceptronChunks {
158      get {
159        return multiLayerPerceptron.innerobj.chunks;
160      }
161      set {
162        multiLayerPerceptron.innerobj.chunks = value;
163      }
164    }
165    [Storable]
166    private double[] MultiLayerPerceptronColumnMeans {
167      get {
168        return multiLayerPerceptron.innerobj.columnmeans;
169      }
170      set {
171        multiLayerPerceptron.innerobj.columnmeans = value;
172      }
173    }
174    [Storable]
175    private double[] MultiLayerPerceptronColumnSigmas {
176      get {
177        return multiLayerPerceptron.innerobj.columnsigmas;
178      }
179      set {
180        multiLayerPerceptron.innerobj.columnsigmas = value;
181      }
182    }
183    [Storable]
184    private double[] MultiLayerPerceptronDError {
185      get {
186        return multiLayerPerceptron.innerobj.derror;
187      }
188      set {
189        multiLayerPerceptron.innerobj.derror = value;
190      }
191    }
192    [Storable]
193    private double[] MultiLayerPerceptronDfdnet {
194      get {
195        return multiLayerPerceptron.innerobj.dfdnet;
196      }
197      set {
198        multiLayerPerceptron.innerobj.dfdnet = value;
199      }
200    }
201    [Storable]
202    private double[] MultiLayerPerceptronNeurons {
203      get {
204        return multiLayerPerceptron.innerobj.neurons;
205      }
206      set {
207        multiLayerPerceptron.innerobj.neurons = value;
208      }
209    }
210    [Storable]
211    private double[] MultiLayerPerceptronNwbuf {
212      get {
213        return multiLayerPerceptron.innerobj.nwbuf;
214      }
215      set {
216        multiLayerPerceptron.innerobj.nwbuf = value;
217      }
218    }
219    [Storable]
220    private int[] MultiLayerPerceptronStuctinfo {
221      get {
222        return multiLayerPerceptron.innerobj.structinfo;
223      }
224      set {
225        multiLayerPerceptron.innerobj.structinfo = value;
226      }
227    }
228    [Storable]
229    private double[] MultiLayerPerceptronWeights {
230      get {
231        return multiLayerPerceptron.innerobj.weights;
232      }
233      set {
234        multiLayerPerceptron.innerobj.weights = value;
235      }
236    }
237    [Storable]
238    private double[] MultiLayerPerceptronX {
239      get {
240        return multiLayerPerceptron.innerobj.x;
241      }
242      set {
243        multiLayerPerceptron.innerobj.x = value;
244      }
245    }
246    [Storable]
247    private double[] MultiLayerPerceptronY {
248      get {
249        return multiLayerPerceptron.innerobj.y;
250      }
251      set {
252        multiLayerPerceptron.innerobj.y = value;
253      }
254    }
255    #endregion
256  }
257}
Note: See TracBrowser for help on using the repository browser.