Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs @ 8465

Last change on this file since 8465 was 8465, checked in by abeham, 12 years ago

#1913: Changed k-NN to move model representation (kdTree) into the model object

File size: 12.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a nearest neighbour model for regression and classification
33  /// </summary>
34  [StorableClass]
35  [Item("NearestNeighbourModel", "Represents a nearest neighbour model for regression and classification.")]
36  public sealed class NearestNeighbourModel : NamedItem, INearestNeighbourModel {
37
38    private alglib.nearestneighbor.kdtree kdTree;
39    public alglib.nearestneighbor.kdtree KDTree {
40      get { return kdTree; }
41      set {
42        if (value != kdTree) {
43          if (value == null) throw new ArgumentNullException();
44          kdTree = value;
45          OnChanged(EventArgs.Empty);
46        }
47      }
48    }
49
50    [Storable]
51    private string targetVariable;
52    [Storable]
53    private string[] allowedInputVariables;
54    [Storable]
55    private double[] classValues;
56    [Storable]
57    private int k;
58
59    [StorableConstructor]
60    private NearestNeighbourModel(bool deserializing)
61      : base(deserializing) {
62      if (deserializing)
63        kdTree = new alglib.nearestneighbor.kdtree();
64    }
65    private NearestNeighbourModel(NearestNeighbourModel original, Cloner cloner)
66      : base(original, cloner) {
67      kdTree = new alglib.nearestneighbor.kdtree();
68      kdTree.approxf = original.kdTree.approxf;
69      kdTree.boxmax = (double[])original.kdTree.boxmax.Clone();
70      kdTree.boxmin = (double[])original.kdTree.boxmin.Clone();
71      kdTree.buf = (double[])original.kdTree.buf.Clone();
72      kdTree.curboxmax = (double[])original.kdTree.curboxmax.Clone();
73      kdTree.curboxmin = (double[])original.kdTree.curboxmin.Clone();
74      kdTree.curdist = original.kdTree.curdist;
75      kdTree.debugcounter = original.kdTree.debugcounter;
76      kdTree.idx = (int[])original.kdTree.idx.Clone();
77      kdTree.kcur = original.kdTree.kcur;
78      kdTree.kneeded = original.kdTree.kneeded;
79      kdTree.n = original.kdTree.n;
80      kdTree.nodes = (int[])original.kdTree.nodes.Clone();
81      kdTree.normtype = original.kdTree.normtype;
82      kdTree.nx = original.kdTree.nx;
83      kdTree.ny = original.kdTree.ny;
84      kdTree.r = (double[])original.kdTree.r.Clone();
85      kdTree.rneeded = original.kdTree.rneeded;
86      kdTree.selfmatch = original.kdTree.selfmatch;
87      kdTree.splits = (double[])original.kdTree.splits.Clone();
88      kdTree.tags = (int[])original.kdTree.tags.Clone();
89      kdTree.x = (double[])original.kdTree.x.Clone();
90      kdTree.xy = (double[,])original.kdTree.xy.Clone();
91
92      k = original.k;
93      targetVariable = original.targetVariable;
94      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
95      if (original.classValues != null)
96        this.classValues = (double[])original.classValues.Clone();
97    }
98    public NearestNeighbourModel(Dataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) {
99      this.k = k;
100      this.targetVariable = targetVariable;
101      this.allowedInputVariables = allowedInputVariables.ToArray();
102
103      var inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,
104                                   allowedInputVariables.Concat(new string[] { targetVariable }),
105                                   rows);
106
107      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
108        throw new NotSupportedException(
109          "Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
110
111      this.kdTree = new alglib.nearestneighbor.kdtree();
112
113      var nRows = inputMatrix.GetLength(0);
114      var nFeatures = inputMatrix.GetLength(1) - 1;
115
116      if (classValues != null) {
117        this.classValues = (double[])classValues.Clone();
118        int nClasses = classValues.Length;
119        // map original class values to values [0..nClasses-1]
120        var classIndices = new Dictionary<double, double>();
121        for (int i = 0; i < nClasses; i++)
122          classIndices[classValues[i]] = i;
123
124        for (int row = 0; row < nRows; row++) {
125          inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
126        }
127      }
128      alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
129    }
130
131    public override IDeepCloneable Clone(Cloner cloner) {
132      return new NearestNeighbourModel(this, cloner);
133    }
134
135    public IEnumerable<double> GetEstimatedValues(Dataset dataset, IEnumerable<int> rows) {
136      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
137
138      int n = inputData.GetLength(0);
139      int columns = inputData.GetLength(1);
140      double[] x = new double[columns];
141      double[] y = new double[1];
142      double[] dists = new double[k];
143      double[,] neighbours = new double[k, columns + 1];
144
145      for (int row = 0; row < n; row++) {
146        for (int column = 0; column < columns; column++) {
147          x[column] = inputData[row, column];
148        }
149        int actNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
150        alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
151        alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
152
153        double distanceWeightedValue = 0.0;
154        double distsSum = 0.0;
155        for (int i = 0; i < actNeighbours; i++) {
156          distanceWeightedValue += neighbours[i, columns] / dists[i];
157          distsSum += 1.0 / dists[i];
158        }
159        yield return distanceWeightedValue / distsSum;
160      }
161    }
162
163    public IEnumerable<double> GetEstimatedClassValues(Dataset dataset, IEnumerable<int> rows) {
164      if (classValues == null) throw new InvalidOperationException("No class values are defined.");
165      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
166
167      int n = inputData.GetLength(0);
168      int columns = inputData.GetLength(1);
169      double[] x = new double[columns];
170      int[] y = new int[classValues.Length];
171      double[] dists = new double[k];
172      double[,] neighbours = new double[k, columns + 1];
173
174      for (int row = 0; row < n; row++) {
175        for (int column = 0; column < columns; column++) {
176          x[column] = inputData[row, column];
177        }
178        int actNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
179        alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
180        alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
181
182        Array.Clear(y, 0, y.Length);
183        for (int i = 0; i < actNeighbours; i++) {
184          int classValue = (int)Math.Round(neighbours[i, columns]);
185          y[classValue]++;
186        }
187
188        // find class for with the largest probability value
189        int maxProbClassIndex = 0;
190        double maxProb = y[0];
191        for (int i = 1; i < y.Length; i++) {
192          if (maxProb < y[i]) {
193            maxProb = y[i];
194            maxProbClassIndex = i;
195          }
196        }
197        yield return classValues[maxProbClassIndex];
198      }
199    }
200
201    public INearestNeighbourRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
202      return new NearestNeighbourRegressionSolution(problemData, this);
203    }
204    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
205      return CreateRegressionSolution(problemData);
206    }
207    public INearestNeighbourClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
208      return new NearestNeighbourClassificationSolution(problemData, this);
209    }
210    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
211      return CreateClassificationSolution(problemData);
212    }
213
214    #region events
215    public event EventHandler Changed;
216    private void OnChanged(EventArgs e) {
217      var handlers = Changed;
218      if (handlers != null)
219        handlers(this, e);
220    }
221    #endregion
222
223    #region persistence
224    [Storable]
225    public double KDTreeApproxF {
226      get { return kdTree.approxf; }
227      set { kdTree.approxf = value; }
228    }
229    [Storable]
230    public double[] KDTreeBoxMax {
231      get { return kdTree.boxmax; }
232      set { kdTree.boxmax = value; }
233    }
234    [Storable]
235    public double[] KDTreeBoxMin {
236      get { return kdTree.boxmin; }
237      set { kdTree.boxmin = value; }
238    }
239    [Storable]
240    public double[] KDTreeBuf {
241      get { return kdTree.buf; }
242      set { kdTree.buf = value; }
243    }
244    [Storable]
245    public double[] KDTreeCurBoxMax {
246      get { return kdTree.curboxmax; }
247      set { kdTree.curboxmax = value; }
248    }
249    [Storable]
250    public double[] KDTreeCurBoxMin {
251      get { return kdTree.curboxmin; }
252      set { kdTree.curboxmin = value; }
253    }
254    [Storable]
255    public double KDTreeCurDist {
256      get { return kdTree.curdist; }
257      set { kdTree.curdist = value; }
258    }
259    [Storable]
260    public int KDTreeDebugCounter {
261      get { return kdTree.debugcounter; }
262      set { kdTree.debugcounter = value; }
263    }
264    [Storable]
265    public int[] KDTreeIdx {
266      get { return kdTree.idx; }
267      set { kdTree.idx = value; }
268    }
269    [Storable]
270    public int KDTreeKCur {
271      get { return kdTree.kcur; }
272      set { kdTree.kcur = value; }
273    }
274    [Storable]
275    public int KDTreeKNeeded {
276      get { return kdTree.kneeded; }
277      set { kdTree.kneeded = value; }
278    }
279    [Storable]
280    public int KDTreeN {
281      get { return kdTree.n; }
282      set { kdTree.n = value; }
283    }
284    [Storable]
285    public int[] KDTreeNodes {
286      get { return kdTree.nodes; }
287      set { kdTree.nodes = value; }
288    }
289    [Storable]
290    public int KDTreeNormType {
291      get { return kdTree.normtype; }
292      set { kdTree.normtype = value; }
293    }
294    [Storable]
295    public int KDTreeNX {
296      get { return kdTree.nx; }
297      set { kdTree.nx = value; }
298    }
299    [Storable]
300    public int KDTreeNY {
301      get { return kdTree.ny; }
302      set { kdTree.ny = value; }
303    }
304    [Storable]
305    public double[] KDTreeR {
306      get { return kdTree.r; }
307      set { kdTree.r = value; }
308    }
309    [Storable]
310    public double KDTreeRNeeded {
311      get { return kdTree.rneeded; }
312      set { kdTree.rneeded = value; }
313    }
314    [Storable]
315    public bool KDTreeSelfMatch {
316      get { return kdTree.selfmatch; }
317      set { kdTree.selfmatch = value; }
318    }
319    [Storable]
320    public double[] KDTreeSplits {
321      get { return kdTree.splits; }
322      set { kdTree.splits = value; }
323    }
324    [Storable]
325    public int[] KDTreeTags {
326      get { return kdTree.tags; }
327      set { kdTree.tags = value; }
328    }
329    [Storable]
330    public double[] KDTreeX {
331      get { return kdTree.x; }
332      set { kdTree.x = value; }
333    }
334    [Storable]
335    public double[,] KDTreeXY {
336      get { return kdTree.xy; }
337      set { kdTree.xy = value; }
338    }
339    #endregion
340  }
341}
Note: See TracBrowser for help on using the repository browser.