Free cookie consent management tool by TermsFeed Policy Generator

source: branches/PersistenceReintegration/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs @ 15018

Last change on this file since 15018 was 15018, checked in by gkronber, 7 years ago

#2520 introduced StorableConstructorFlag type for StorableConstructors

File size: 9.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a neural network model for regression and classification
33  /// </summary>
34  [StorableType("6aed3009-9bad-4459-ac5c-6b4ad4e682d6")]
35  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
36  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
37
38    private alglib.multilayerperceptron multiLayerPerceptron = new alglib.multilayerperceptron();
39    public alglib.multilayerperceptron MultiLayerPerceptron {
40      get { return multiLayerPerceptron; }
41      set {
42        if (value != multiLayerPerceptron) {
43          if (value == null) throw new ArgumentNullException();
44          multiLayerPerceptron = value;
45          OnChanged(EventArgs.Empty);
46        }
47      }
48    }
49
50    public override IEnumerable<string> VariablesUsedForPrediction {
51      get { return allowedInputVariables; }
52    }
53
54    [Storable]
55    private string[] allowedInputVariables;
56    [Storable]
57    private double[] classValues;
58    [StorableConstructor]
59    private NeuralNetworkModel(StorableConstructorFlag deserializing)
60      : base(deserializing) {
61    }
62    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
63      : base(original, cloner) {
64      multiLayerPerceptron = new alglib.multilayerperceptron();
65      multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
66      multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
67      multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
68      multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
69      multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
70      multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
71      multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
72      multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
73      multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
74      multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
75      multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
76      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
77      if (original.classValues != null)
78        this.classValues = (double[])original.classValues.Clone();
79    }
80    public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
81      : base(targetVariable) {
82      this.name = ItemName;
83      this.description = ItemDescription;
84      this.multiLayerPerceptron = multiLayerPerceptron;
85      this.allowedInputVariables = allowedInputVariables.ToArray();
86      if (classValues != null)
87        this.classValues = (double[])classValues.Clone();
88    }
89
90    public override IDeepCloneable Clone(Cloner cloner) {
91      return new NeuralNetworkModel(this, cloner);
92    }
93
94    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
95      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
96
97      int n = inputData.GetLength(0);
98      int columns = inputData.GetLength(1);
99      double[] x = new double[columns];
100      double[] y = new double[1];
101
102      for (int row = 0; row < n; row++) {
103        for (int column = 0; column < columns; column++) {
104          x[column] = inputData[row, column];
105        }
106        alglib.mlpprocess(multiLayerPerceptron, x, ref y);
107        yield return y[0];
108      }
109    }
110
111    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
112      double[,] inputData = dataset.ToArray(allowedInputVariables, rows);
113
114      int n = inputData.GetLength(0);
115      int columns = inputData.GetLength(1);
116      double[] x = new double[columns];
117      double[] y = new double[classValues.Length];
118
119      for (int row = 0; row < n; row++) {
120        for (int column = 0; column < columns; column++) {
121          x[column] = inputData[row, column];
122        }
123        alglib.mlpprocess(multiLayerPerceptron, x, ref y);
124        // find class for with the largest probability value
125        int maxProbClassIndex = 0;
126        double maxProb = y[0];
127        for (int i = 1; i < y.Length; i++) {
128          if (maxProb < y[i]) {
129            maxProb = y[i];
130            maxProbClassIndex = i;
131          }
132        }
133        yield return classValues[maxProbClassIndex];
134      }
135    }
136
137    public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
138      return new NeuralNetworkRegressionSolution(this, new RegressionProblemData(problemData));
139    }
140    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
141      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
142    }
143
144    #region events
145    public event EventHandler Changed;
146    private void OnChanged(EventArgs e) {
147      var handlers = Changed;
148      if (handlers != null)
149        handlers(this, e);
150    }
151    #endregion
152
153    #region persistence
154    [Storable]
155    private double[,] MultiLayerPerceptronChunks {
156      get {
157        return multiLayerPerceptron.innerobj.chunks;
158      }
159      set {
160        multiLayerPerceptron.innerobj.chunks = value;
161      }
162    }
163    [Storable]
164    private double[] MultiLayerPerceptronColumnMeans {
165      get {
166        return multiLayerPerceptron.innerobj.columnmeans;
167      }
168      set {
169        multiLayerPerceptron.innerobj.columnmeans = value;
170      }
171    }
172    [Storable]
173    private double[] MultiLayerPerceptronColumnSigmas {
174      get {
175        return multiLayerPerceptron.innerobj.columnsigmas;
176      }
177      set {
178        multiLayerPerceptron.innerobj.columnsigmas = value;
179      }
180    }
181    [Storable]
182    private double[] MultiLayerPerceptronDError {
183      get {
184        return multiLayerPerceptron.innerobj.derror;
185      }
186      set {
187        multiLayerPerceptron.innerobj.derror = value;
188      }
189    }
190    [Storable]
191    private double[] MultiLayerPerceptronDfdnet {
192      get {
193        return multiLayerPerceptron.innerobj.dfdnet;
194      }
195      set {
196        multiLayerPerceptron.innerobj.dfdnet = value;
197      }
198    }
199    [Storable]
200    private double[] MultiLayerPerceptronNeurons {
201      get {
202        return multiLayerPerceptron.innerobj.neurons;
203      }
204      set {
205        multiLayerPerceptron.innerobj.neurons = value;
206      }
207    }
208    [Storable]
209    private double[] MultiLayerPerceptronNwbuf {
210      get {
211        return multiLayerPerceptron.innerobj.nwbuf;
212      }
213      set {
214        multiLayerPerceptron.innerobj.nwbuf = value;
215      }
216    }
217    [Storable]
218    private int[] MultiLayerPerceptronStuctinfo {
219      get {
220        return multiLayerPerceptron.innerobj.structinfo;
221      }
222      set {
223        multiLayerPerceptron.innerobj.structinfo = value;
224      }
225    }
226    [Storable]
227    private double[] MultiLayerPerceptronWeights {
228      get {
229        return multiLayerPerceptron.innerobj.weights;
230      }
231      set {
232        multiLayerPerceptron.innerobj.weights = value;
233      }
234    }
235    [Storable]
236    private double[] MultiLayerPerceptronX {
237      get {
238        return multiLayerPerceptron.innerobj.x;
239      }
240      set {
241        multiLayerPerceptron.innerobj.x = value;
242      }
243    }
244    [Storable]
245    private double[] MultiLayerPerceptronY {
246      get {
247        return multiLayerPerceptron.innerobj.y;
248      }
249      set {
250        multiLayerPerceptron.innerobj.y = value;
251      }
252    }
253    #endregion
254  }
255}
Note: See TracBrowser for help on using the repository browser.