Free cookie consent management tool by TermsFeed Policy Generator

source: stable/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs @ 14186

Last change on this file since 14186 was 14186, checked in by swagner, 8 years ago

#2526: Updated year of copyrights in license headers

File size: 11.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a nearest neighbour model for regression and classification
33  /// </summary>
34  [StorableClass]
35  [Item("NearestNeighbourModel", "Represents a nearest neighbour model for regression and classification.")]
36  public sealed class NearestNeighbourModel : ClassificationModel, INearestNeighbourModel {
37
38    private alglib.nearestneighbor.kdtree kdTree;
39    public alglib.nearestneighbor.kdtree KDTree {
40      get { return kdTree; }
41      set {
42        if (value != kdTree) {
43          if (value == null) throw new ArgumentNullException();
44          kdTree = value;
45          OnChanged(EventArgs.Empty);
46        }
47      }
48    }
49
50    public override IEnumerable<string> VariablesUsedForPrediction {
51      get { return allowedInputVariables; }
52    }
53
54    [Storable]
55    private string[] allowedInputVariables;
56    [Storable]
57    private double[] classValues;
58    [Storable]
59    private int k;
60
61    [StorableConstructor]
62    private NearestNeighbourModel(bool deserializing)
63      : base(deserializing) {
64      if (deserializing)
65        kdTree = new alglib.nearestneighbor.kdtree();
66    }
67    private NearestNeighbourModel(NearestNeighbourModel original, Cloner cloner)
68      : base(original, cloner) {
69      kdTree = new alglib.nearestneighbor.kdtree();
70      kdTree.approxf = original.kdTree.approxf;
71      kdTree.boxmax = (double[])original.kdTree.boxmax.Clone();
72      kdTree.boxmin = (double[])original.kdTree.boxmin.Clone();
73      kdTree.buf = (double[])original.kdTree.buf.Clone();
74      kdTree.curboxmax = (double[])original.kdTree.curboxmax.Clone();
75      kdTree.curboxmin = (double[])original.kdTree.curboxmin.Clone();
76      kdTree.curdist = original.kdTree.curdist;
77      kdTree.debugcounter = original.kdTree.debugcounter;
78      kdTree.idx = (int[])original.kdTree.idx.Clone();
79      kdTree.kcur = original.kdTree.kcur;
80      kdTree.kneeded = original.kdTree.kneeded;
81      kdTree.n = original.kdTree.n;
82      kdTree.nodes = (int[])original.kdTree.nodes.Clone();
83      kdTree.normtype = original.kdTree.normtype;
84      kdTree.nx = original.kdTree.nx;
85      kdTree.ny = original.kdTree.ny;
86      kdTree.r = (double[])original.kdTree.r.Clone();
87      kdTree.rneeded = original.kdTree.rneeded;
88      kdTree.selfmatch = original.kdTree.selfmatch;
89      kdTree.splits = (double[])original.kdTree.splits.Clone();
90      kdTree.tags = (int[])original.kdTree.tags.Clone();
91      kdTree.x = (double[])original.kdTree.x.Clone();
92      kdTree.xy = (double[,])original.kdTree.xy.Clone();
93
94      k = original.k;
95      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
96      if (original.classValues != null)
97        this.classValues = (double[])original.classValues.Clone();
98    }
99    public NearestNeighbourModel(IDataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
100      : base(targetVariable) {
101      Name = ItemName;
102      Description = ItemDescription;
103      this.k = k;
104      this.allowedInputVariables = allowedInputVariables.ToArray();
105
106      var inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,
107                                   allowedInputVariables.Concat(new string[] { targetVariable }),
108                                   rows);
109
110      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
111        throw new NotSupportedException(
112          "Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
113
114      this.kdTree = new alglib.nearestneighbor.kdtree();
115
116      var nRows = inputMatrix.GetLength(0);
117      var nFeatures = inputMatrix.GetLength(1) - 1;
118
119      if (classValues != null) {
120        this.classValues = (double[])classValues.Clone();
121        int nClasses = classValues.Length;
122        // map original class values to values [0..nClasses-1]
123        var classIndices = new Dictionary<double, double>();
124        for (int i = 0; i < nClasses; i++)
125          classIndices[classValues[i]] = i;
126
127        for (int row = 0; row < nRows; row++) {
128          inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
129        }
130      }
131      alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
132    }
133
134    public override IDeepCloneable Clone(Cloner cloner) {
135      return new NearestNeighbourModel(this, cloner);
136    }
137
138    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
139      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
140
141      int n = inputData.GetLength(0);
142      int columns = inputData.GetLength(1);
143      double[] x = new double[columns];
144      double[] y = new double[1];
145      double[] dists = new double[k];
146      double[,] neighbours = new double[k, columns + 1];
147
148      for (int row = 0; row < n; row++) {
149        for (int column = 0; column < columns; column++) {
150          x[column] = inputData[row, column];
151        }
152        int actNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
153        alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
154        alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
155
156        double distanceWeightedValue = 0.0;
157        double distsSum = 0.0;
158        for (int i = 0; i < actNeighbours; i++) {
159          distanceWeightedValue += neighbours[i, columns] / dists[i];
160          distsSum += 1.0 / dists[i];
161        }
162        yield return distanceWeightedValue / distsSum;
163      }
164    }
165
166    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
167      if (classValues == null) throw new InvalidOperationException("No class values are defined.");
168      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
169
170      int n = inputData.GetLength(0);
171      int columns = inputData.GetLength(1);
172      double[] x = new double[columns];
173      int[] y = new int[classValues.Length];
174      double[] dists = new double[k];
175      double[,] neighbours = new double[k, columns + 1];
176
177      for (int row = 0; row < n; row++) {
178        for (int column = 0; column < columns; column++) {
179          x[column] = inputData[row, column];
180        }
181        int actNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
182        alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
183        alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
184
185        Array.Clear(y, 0, y.Length);
186        for (int i = 0; i < actNeighbours; i++) {
187          int classValue = (int)Math.Round(neighbours[i, columns]);
188          y[classValue]++;
189        }
190
191        // find class for with the largest probability value
192        int maxProbClassIndex = 0;
193        double maxProb = y[0];
194        for (int i = 1; i < y.Length; i++) {
195          if (maxProb < y[i]) {
196            maxProb = y[i];
197            maxProbClassIndex = i;
198          }
199        }
200        yield return classValues[maxProbClassIndex];
201      }
202    }
203
204
205    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
206      return new NearestNeighbourRegressionSolution(this, new RegressionProblemData(problemData));
207    }
208    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
209      return new NearestNeighbourClassificationSolution(this, new ClassificationProblemData(problemData));
210    }
211
212    #region events
213    public event EventHandler Changed;
214    private void OnChanged(EventArgs e) {
215      var handlers = Changed;
216      if (handlers != null)
217        handlers(this, e);
218    }
219    #endregion
220
221    #region persistence
222    [Storable]
223    public double KDTreeApproxF {
224      get { return kdTree.approxf; }
225      set { kdTree.approxf = value; }
226    }
227    [Storable]
228    public double[] KDTreeBoxMax {
229      get { return kdTree.boxmax; }
230      set { kdTree.boxmax = value; }
231    }
232    [Storable]
233    public double[] KDTreeBoxMin {
234      get { return kdTree.boxmin; }
235      set { kdTree.boxmin = value; }
236    }
237    [Storable]
238    public double[] KDTreeBuf {
239      get { return kdTree.buf; }
240      set { kdTree.buf = value; }
241    }
242    [Storable]
243    public double[] KDTreeCurBoxMax {
244      get { return kdTree.curboxmax; }
245      set { kdTree.curboxmax = value; }
246    }
247    [Storable]
248    public double[] KDTreeCurBoxMin {
249      get { return kdTree.curboxmin; }
250      set { kdTree.curboxmin = value; }
251    }
252    [Storable]
253    public double KDTreeCurDist {
254      get { return kdTree.curdist; }
255      set { kdTree.curdist = value; }
256    }
257    [Storable]
258    public int KDTreeDebugCounter {
259      get { return kdTree.debugcounter; }
260      set { kdTree.debugcounter = value; }
261    }
262    [Storable]
263    public int[] KDTreeIdx {
264      get { return kdTree.idx; }
265      set { kdTree.idx = value; }
266    }
267    [Storable]
268    public int KDTreeKCur {
269      get { return kdTree.kcur; }
270      set { kdTree.kcur = value; }
271    }
272    [Storable]
273    public int KDTreeKNeeded {
274      get { return kdTree.kneeded; }
275      set { kdTree.kneeded = value; }
276    }
277    [Storable]
278    public int KDTreeN {
279      get { return kdTree.n; }
280      set { kdTree.n = value; }
281    }
282    [Storable]
283    public int[] KDTreeNodes {
284      get { return kdTree.nodes; }
285      set { kdTree.nodes = value; }
286    }
287    [Storable]
288    public int KDTreeNormType {
289      get { return kdTree.normtype; }
290      set { kdTree.normtype = value; }
291    }
292    [Storable]
293    public int KDTreeNX {
294      get { return kdTree.nx; }
295      set { kdTree.nx = value; }
296    }
297    [Storable]
298    public int KDTreeNY {
299      get { return kdTree.ny; }
300      set { kdTree.ny = value; }
301    }
302    [Storable]
303    public double[] KDTreeR {
304      get { return kdTree.r; }
305      set { kdTree.r = value; }
306    }
307    [Storable]
308    public double KDTreeRNeeded {
309      get { return kdTree.rneeded; }
310      set { kdTree.rneeded = value; }
311    }
312    [Storable]
313    public bool KDTreeSelfMatch {
314      get { return kdTree.selfmatch; }
315      set { kdTree.selfmatch = value; }
316    }
317    [Storable]
318    public double[] KDTreeSplits {
319      get { return kdTree.splits; }
320      set { kdTree.splits = value; }
321    }
322    [Storable]
323    public int[] KDTreeTags {
324      get { return kdTree.tags; }
325      set { kdTree.tags = value; }
326    }
327    [Storable]
328    public double[] KDTreeX {
329      get { return kdTree.x; }
330      set { kdTree.x = value; }
331    }
332    [Storable]
333    public double[,] KDTreeXY {
334      get { return kdTree.xy; }
335      set { kdTree.xy = value; }
336    }
337    #endregion
338  }
339}
Note: See TracBrowser for help on using the repository browser.