Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs @ 6577

Last change on this file since 6577 was 6577, checked in by gkronber, 13 years ago

#1474: added first implementation of neural networks for regression wrapper for alglib.

File size: 8.7 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2011 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.IO;
25using System.Linq;
26using System.Text;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
30using HeuristicLab.Problems.DataAnalysis;
31using SVM;
32
33namespace HeuristicLab.Algorithms.DataAnalysis {
34  /// <summary>
35  /// Represents a neural network model for regression and classification
36  /// </summary>
37  [StorableClass]
38  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
39  public sealed class NeuralNetworkModel : NamedItem, INeuralNetworkModel {
40
41    private alglib.multilayerperceptron multiLayerPerceptron;
42    public alglib.multilayerperceptron MultiLayerPerceptron {
43      get { return multiLayerPerceptron; }
44      set {
45        if (value != multiLayerPerceptron) {
46          if (value == null) throw new ArgumentNullException();
47          multiLayerPerceptron = value;
48          OnChanged(EventArgs.Empty);
49        }
50      }
51    }
52
53    [Storable]
54    private string targetVariable;
55    [Storable]
56    private string[] allowedInputVariables;
57    [Storable]
58    private double[] classValues;
59    [StorableConstructor]
60    private NeuralNetworkModel(bool deserializing)
61      : base(deserializing) {
62      if (deserializing)
63        multiLayerPerceptron = new alglib.multilayerperceptron();
64    }
65    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
66      : base(original, cloner) {
67      multiLayerPerceptron = new alglib.multilayerperceptron();
68      multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
69      multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
70      multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
71      multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
72      multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
73      multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
74      multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
75      multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
76      multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
77      multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
78      multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
79      targetVariable = original.targetVariable;
80      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
81      if (original.classValues != null)
82        this.classValues = (double[])original.classValues.Clone();
83    }
84    public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
85      : base() {
86      this.name = ItemName;
87      this.description = ItemDescription;
88      this.multiLayerPerceptron = multiLayerPerceptron;
89      this.targetVariable = targetVariable;
90      this.allowedInputVariables = allowedInputVariables.ToArray();
91      if (classValues != null)
92        this.classValues = (double[])classValues.Clone();
93    }
94
95    public override IDeepCloneable Clone(Cloner cloner) {
96      return new NeuralNetworkModel(this, cloner);
97    }
98
99    public IEnumerable<double> GetEstimatedValues(Dataset dataset, IEnumerable<int> rows) {
100      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
101
102      int n = inputData.GetLength(0);
103      int columns = inputData.GetLength(1);
104      double[] x = new double[columns];
105      double[] y = new double[1];
106
107      for (int row = 0; row < n; row++) {
108        for (int column = 0; column < columns; column++) {
109          x[column] = inputData[row, column];
110        }
111        alglib.mlpprocess(multiLayerPerceptron, x, ref y);
112        yield return y[0];
113      }
114    }
115
116    public IEnumerable<double> GetEstimatedClassValues(Dataset dataset, IEnumerable<int> rows) {
117      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
118
119      int n = inputData.GetLength(0);
120      int columns = inputData.GetLength(1);
121      double[] x = new double[columns];
122      double[] y = new double[classValues.Length];
123
124      for (int row = 0; row < n; row++) {
125        for (int column = 0; column < columns; column++) {
126          x[column] = inputData[row, column];
127        }
128        alglib.mlpprocess(multiLayerPerceptron, x, ref y);
129        // find class for with the largest probability value
130        int maxProbClassIndex = 0;
131        double maxProb = y[0];
132        for (int i = 1; i < y.Length; i++) {
133          if (maxProb < y[i]) {
134            maxProb = y[i];
135            maxProbClassIndex = i;
136          }
137        }
138        yield return classValues[maxProbClassIndex];
139      }
140    }
141
142    #region events
143    public event EventHandler Changed;
144    private void OnChanged(EventArgs e) {
145      var handlers = Changed;
146      if (handlers != null)
147        handlers(this, e);
148    }
149    #endregion
150
151    #region persistence
152    [Storable]
153    private double[,] MultiLayerPerceptronChunks {
154      get {
155        return multiLayerPerceptron.innerobj.chunks;
156      }
157      set {
158        multiLayerPerceptron.innerobj.chunks = value;
159      }
160    }
161    [Storable]
162    private double[] MultiLayerPerceptronColumnMeans {
163      get {
164        return multiLayerPerceptron.innerobj.columnmeans;
165      }
166      set {
167        multiLayerPerceptron.innerobj.columnmeans = value;
168      }
169    }
170    [Storable]
171    private double[] MultiLayerPerceptronColumnSigmas {
172      get {
173        return multiLayerPerceptron.innerobj.columnsigmas;
174      }
175      set {
176        multiLayerPerceptron.innerobj.columnsigmas = value;
177      }
178    }
179    [Storable]
180    private double[] MultiLayerPerceptronDError {
181      get {
182        return multiLayerPerceptron.innerobj.derror;
183      }
184      set {
185        multiLayerPerceptron.innerobj.derror = value;
186      }
187    }
188    [Storable]
189    private double[] MultiLayerPerceptronDfdnet {
190      get {
191        return multiLayerPerceptron.innerobj.dfdnet;
192      }
193      set {
194        multiLayerPerceptron.innerobj.dfdnet = value;
195      }
196    }
197    [Storable]
198    private double[] MultiLayerPerceptronNeurons {
199      get {
200        return multiLayerPerceptron.innerobj.neurons;
201      }
202      set {
203        multiLayerPerceptron.innerobj.neurons = value;
204      }
205    }
206    [Storable]
207    private double[] MultiLayerPerceptronNwbuf {
208      get {
209        return multiLayerPerceptron.innerobj.nwbuf;
210      }
211      set {
212        multiLayerPerceptron.innerobj.nwbuf = value;
213      }
214    }
215    [Storable]
216    private int[] MultiLayerPerceptronStuctinfo {
217      get {
218        return multiLayerPerceptron.innerobj.structinfo;
219      }
220      set {
221        multiLayerPerceptron.innerobj.structinfo = value;
222      }
223    }
224    [Storable]
225    private double[] MultiLayerPerceptronWeights {
226      get {
227        return multiLayerPerceptron.innerobj.weights;
228      }
229      set {
230        multiLayerPerceptron.innerobj.weights = value;
231      }
232    }
233    [Storable]
234    private double[] MultiLayerPerceptronX {
235      get {
236        return multiLayerPerceptron.innerobj.x;
237      }
238      set {
239        multiLayerPerceptron.innerobj.x = value;
240      }
241    }
242    [Storable]
243    private double[] MultiLayerPerceptronY {
244      get {
245        return multiLayerPerceptron.innerobj.y;
246      }
247      set {
248        multiLayerPerceptron.innerobj.y = value;
249      }
250    }
251    #endregion
252  }
253}
Note: See TracBrowser for help on using the repository browser.