Free cookie consent management tool by TermsFeed Policy Generator

source: branches/HiveHiveEngine/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs @ 7994

Last change on this file since 7994 was 7383, checked in by ascheibe, 13 years ago

#1745 merged trunk changes into branch

File size: 10.9 KB
RevLine 
[6583]1#region License Information
2/* HeuristicLab
[7259]3 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[6583]4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a nearest neighbour model for regression and classification
33  /// </summary>
34  [StorableClass]
35  [Item("NearestNeighbourModel", "Represents a neural network for regression and classification.")]
36  public sealed class NearestNeighbourModel : NamedItem, INearestNeighbourModel {
37
38    private alglib.nearestneighbor.kdtree kdTree;
39    public alglib.nearestneighbor.kdtree KDTree {
40      get { return kdTree; }
41      set {
42        if (value != kdTree) {
43          if (value == null) throw new ArgumentNullException();
44          kdTree = value;
45          OnChanged(EventArgs.Empty);
46        }
47      }
48    }
49
50    [Storable]
51    private string targetVariable;
52    [Storable]
53    private string[] allowedInputVariables;
54    [Storable]
55    private double[] classValues;
56    [Storable]
57    private int k;
58    [StorableConstructor]
59    private NearestNeighbourModel(bool deserializing)
60      : base(deserializing) {
61      if (deserializing)
62        kdTree = new alglib.nearestneighbor.kdtree();
63    }
64    private NearestNeighbourModel(NearestNeighbourModel original, Cloner cloner)
65      : base(original, cloner) {
66      kdTree = new alglib.nearestneighbor.kdtree();
67      kdTree.approxf = original.kdTree.approxf;
68      kdTree.boxmax = (double[])original.kdTree.boxmax.Clone();
69      kdTree.boxmin = (double[])original.kdTree.boxmin.Clone();
70      kdTree.buf = (double[])original.kdTree.buf.Clone();
71      kdTree.curboxmax = (double[])original.kdTree.curboxmax.Clone();
72      kdTree.curboxmin = (double[])original.kdTree.curboxmin.Clone();
73      kdTree.curdist = original.kdTree.curdist;
74      kdTree.debugcounter = original.kdTree.debugcounter;
75      kdTree.idx = (int[])original.kdTree.idx.Clone();
76      kdTree.kcur = original.kdTree.kcur;
77      kdTree.kneeded = original.kdTree.kneeded;
78      kdTree.n = original.kdTree.n;
79      kdTree.nodes = (int[])original.kdTree.nodes.Clone();
80      kdTree.normtype = original.kdTree.normtype;
81      kdTree.nx = original.kdTree.nx;
82      kdTree.ny = original.kdTree.ny;
83      kdTree.r = (double[])original.kdTree.r.Clone();
84      kdTree.rneeded = original.kdTree.rneeded;
85      kdTree.selfmatch = original.kdTree.selfmatch;
86      kdTree.splits = (double[])original.kdTree.splits.Clone();
87      kdTree.tags = (int[])original.kdTree.tags.Clone();
88      kdTree.x = (double[])original.kdTree.x.Clone();
89      kdTree.xy = (double[,])original.kdTree.xy.Clone();
90
91      k = original.k;
92      targetVariable = original.targetVariable;
93      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
94      if (original.classValues != null)
95        this.classValues = (double[])original.classValues.Clone();
96    }
97    public NearestNeighbourModel(alglib.nearestneighbor.kdtree kdTree, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
98      : base() {
99      this.name = ItemName;
100      this.description = ItemDescription;
101      this.kdTree = kdTree;
102      this.k = k;
103      this.targetVariable = targetVariable;
104      this.allowedInputVariables = allowedInputVariables.ToArray();
105      if (classValues != null)
106        this.classValues = (double[])classValues.Clone();
107    }
108
109    public override IDeepCloneable Clone(Cloner cloner) {
110      return new NearestNeighbourModel(this, cloner);
111    }
112
113    public IEnumerable<double> GetEstimatedValues(Dataset dataset, IEnumerable<int> rows) {
114      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
115
116      int n = inputData.GetLength(0);
117      int columns = inputData.GetLength(1);
118      double[] x = new double[columns];
119      double[] y = new double[1];
120      double[] dists = new double[k];
121      double[,] neighbours = new double[k, columns + 1];
122
123      for (int row = 0; row < n; row++) {
124        for (int column = 0; column < columns; column++) {
125          x[column] = inputData[row, column];
126        }
127        int actNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
128        alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
129        alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
130
131        double distanceWeightedValue = 0.0;
132        double distsSum = 0.0;
133        for (int i = 0; i < actNeighbours; i++) {
134          distanceWeightedValue += neighbours[i, columns] / dists[i];
135          distsSum += 1.0 / dists[i];
136        }
137        yield return distanceWeightedValue / distsSum;
138      }
139    }
140
141    public IEnumerable<double> GetEstimatedClassValues(Dataset dataset, IEnumerable<int> rows) {
142      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
143
144      int n = inputData.GetLength(0);
145      int columns = inputData.GetLength(1);
146      double[] x = new double[columns];
147      int[] y = new int[classValues.Length];
148      double[] dists = new double[k];
149      double[,] neighbours = new double[k, columns + 1];
150
151      for (int row = 0; row < n; row++) {
152        for (int column = 0; column < columns; column++) {
153          x[column] = inputData[row, column];
154        }
155        int actNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
156        alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
157        alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
158
159        Array.Clear(y, 0, y.Length);
160        for (int i = 0; i < actNeighbours; i++) {
161          int classValue = (int)Math.Round(neighbours[i, columns]);
162          y[classValue]++;
163        }
164
165        // find class for with the largest probability value
166        int maxProbClassIndex = 0;
167        double maxProb = y[0];
168        for (int i = 1; i < y.Length; i++) {
169          if (maxProb < y[i]) {
170            maxProb = y[i];
171            maxProbClassIndex = i;
172          }
173        }
174        yield return classValues[maxProbClassIndex];
175      }
176    }
177
[6603]178    public INearestNeighbourRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
179      return new NearestNeighbourRegressionSolution(problemData, this);
180    }
181    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
182      return CreateRegressionSolution(problemData);
183    }
[6604]184    public INearestNeighbourClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
185      return new NearestNeighbourClassificationSolution(problemData, this);
186    }
187    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
188      return CreateClassificationSolution(problemData);
189    }
[6603]190
[6583]191    #region events
192    public event EventHandler Changed;
193    private void OnChanged(EventArgs e) {
194      var handlers = Changed;
195      if (handlers != null)
196        handlers(this, e);
197    }
198    #endregion
199
200    #region persistence
[6584]201    [Storable]
202    public double KDTreeApproxF {
203      get { return kdTree.approxf; }
204      set { kdTree.approxf = value; }
205    }
206    [Storable]
207    public double[] KDTreeBoxMax {
208      get { return kdTree.boxmax; }
209      set { kdTree.boxmax = value; }
210    }
211    [Storable]
212    public double[] KDTreeBoxMin {
213      get { return kdTree.boxmin; }
214      set { kdTree.boxmin = value; }
215    }
216    [Storable]
217    public double[] KDTreeBuf {
218      get { return kdTree.buf; }
219      set { kdTree.buf = value; }
220    }
221    [Storable]
222    public double[] KDTreeCurBoxMax {
223      get { return kdTree.curboxmax; }
224      set { kdTree.curboxmax = value; }
225    }
226    [Storable]
227    public double[] KDTreeCurBoxMin {
228      get { return kdTree.curboxmin; }
229      set { kdTree.curboxmin = value; }
230    }
231    [Storable]
232    public double KDTreeCurDist {
233      get { return kdTree.curdist; }
234      set { kdTree.curdist = value; }
235    }
236    [Storable]
237    public int KDTreeDebugCounter {
238      get { return kdTree.debugcounter; }
239      set { kdTree.debugcounter = value; }
240    }
241    [Storable]
242    public int[] KDTreeIdx {
243      get { return kdTree.idx; }
244      set { kdTree.idx = value; }
245    }
246    [Storable]
247    public int KDTreeKCur {
248      get { return kdTree.kcur; }
249      set { kdTree.kcur = value; }
250    }
251    [Storable]
252    public int KDTreeKNeeded {
253      get { return kdTree.kneeded; }
254      set { kdTree.kneeded = value; }
255    }
256    [Storable]
257    public int KDTreeN {
258      get { return kdTree.n; }
259      set { kdTree.n = value; }
260    }
261    [Storable]
262    public int[] KDTreeNodes {
263      get { return kdTree.nodes; }
264      set { kdTree.nodes = value; }
265    }
266    [Storable]
267    public int KDTreeNormType {
268      get { return kdTree.normtype; }
269      set { kdTree.normtype = value; }
270    }
271    [Storable]
272    public int KDTreeNX {
273      get { return kdTree.nx; }
274      set { kdTree.nx = value; }
275    }
276    [Storable]
277    public int KDTreeNY {
278      get { return kdTree.ny; }
279      set { kdTree.ny = value; }
280    }
281    [Storable]
282    public double[] KDTreeR {
283      get { return kdTree.r; }
284      set { kdTree.r = value; }
285    }
286    [Storable]
287    public double KDTreeRNeeded {
288      get { return kdTree.rneeded; }
289      set { kdTree.rneeded = value; }
290    }
291    [Storable]
292    public bool KDTreeSelfMatch {
293      get { return kdTree.selfmatch; }
294      set { kdTree.selfmatch = value; }
295    }
296    [Storable]
297    public double[] KDTreeSplits {
298      get { return kdTree.splits; }
299      set { kdTree.splits = value; }
300    }
301    [Storable]
302    public int[] KDTreeTags {
303      get { return kdTree.tags; }
304      set { kdTree.tags = value; }
305    }
306    [Storable]
307    public double[] KDTreeX {
308      get { return kdTree.x; }
309      set { kdTree.x = value; }
310    }
311    [Storable]
312    public double[,] KDTreeXY {
313      get { return kdTree.xy; }
314      set { kdTree.xy = value; }
315    }
[6583]316    #endregion
317  }
318}
Note: See TracBrowser for help on using the repository browser.