source: trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs @ 16086

Last change on this file since 16086 was 16086, checked in by bwerth, 20 months ago

#2941 added divion-by-zero check to scaling

File size: 15.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
28using HeuristicLab.Problems.DataAnalysis;
29
30namespace HeuristicLab.Algorithms.DataAnalysis {
31  /// <summary>
32  /// Represents a nearest neighbour model for regression and classification
33  /// </summary>
34  [StorableClass]
35  [Item("NearestNeighbourModel", "Represents a nearest neighbour model for regression and classification.")]
36  public sealed class NearestNeighbourModel : ClassificationModel, INearestNeighbourModel {
37
38    private readonly object kdTreeLockObject = new object();
39    private alglib.nearestneighbor.kdtree kdTree;
40    public alglib.nearestneighbor.kdtree KDTree {
41      get { return kdTree; }
42      set {
43        if (value != kdTree) {
44          if (value == null) throw new ArgumentNullException();
45          kdTree = value;
46          OnChanged(EventArgs.Empty);
47        }
48      }
49    }
50
51
52    public override IEnumerable<string> VariablesUsedForPrediction {
53      get { return allowedInputVariables; }
54    }
55
56    [Storable]
57    private string[] allowedInputVariables;
58    [Storable]
59    private double[] classValues;
60    [Storable]
61    private int k;
62    [Storable(DefaultValue = null)]
63    private double[] weights; // not set for old versions loaded from disk
64    [Storable(DefaultValue = null)]
65    private double[] offsets; // not set for old versions loaded from disk
66
67    [StorableConstructor]
68    private NearestNeighbourModel(bool deserializing)
69      : base(deserializing) {
70      if (deserializing)
71        kdTree = new alglib.nearestneighbor.kdtree();
72    }
73    private NearestNeighbourModel(NearestNeighbourModel original, Cloner cloner)
74      : base(original, cloner) {
75      kdTree = new alglib.nearestneighbor.kdtree();
76      kdTree.approxf = original.kdTree.approxf;
77      kdTree.boxmax = (double[])original.kdTree.boxmax.Clone();
78      kdTree.boxmin = (double[])original.kdTree.boxmin.Clone();
79      kdTree.buf = (double[])original.kdTree.buf.Clone();
80      kdTree.curboxmax = (double[])original.kdTree.curboxmax.Clone();
81      kdTree.curboxmin = (double[])original.kdTree.curboxmin.Clone();
82      kdTree.curdist = original.kdTree.curdist;
83      kdTree.debugcounter = original.kdTree.debugcounter;
84      kdTree.idx = (int[])original.kdTree.idx.Clone();
85      kdTree.kcur = original.kdTree.kcur;
86      kdTree.kneeded = original.kdTree.kneeded;
87      kdTree.n = original.kdTree.n;
88      kdTree.nodes = (int[])original.kdTree.nodes.Clone();
89      kdTree.normtype = original.kdTree.normtype;
90      kdTree.nx = original.kdTree.nx;
91      kdTree.ny = original.kdTree.ny;
92      kdTree.r = (double[])original.kdTree.r.Clone();
93      kdTree.rneeded = original.kdTree.rneeded;
94      kdTree.selfmatch = original.kdTree.selfmatch;
95      kdTree.splits = (double[])original.kdTree.splits.Clone();
96      kdTree.tags = (int[])original.kdTree.tags.Clone();
97      kdTree.x = (double[])original.kdTree.x.Clone();
98      kdTree.xy = (double[,])original.kdTree.xy.Clone();
99
100      k = original.k;
101      isCompatibilityLoaded = original.IsCompatibilityLoaded;
102      if (!IsCompatibilityLoaded) {
103        weights = new double[original.weights.Length];
104        Array.Copy(original.weights, weights, weights.Length);
105        offsets = new double[original.offsets.Length];
106        Array.Copy(original.offsets, this.offsets, this.offsets.Length);
107      }
108      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
109      if (original.classValues != null)
110        this.classValues = (double[])original.classValues.Clone();
111    }
112    public NearestNeighbourModel(IDataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<double> weights = null, double[] classValues = null)
113      : base(targetVariable) {
114      Name = ItemName;
115      Description = ItemDescription;
116      this.k = k;
117      this.allowedInputVariables = allowedInputVariables.ToArray();
118      double[,] inputMatrix;
119      if (IsCompatibilityLoaded) {
120        // no scaling
121        inputMatrix = dataset.ToArray(
122          this.allowedInputVariables.Concat(new string[] { targetVariable }),
123          rows);
124      } else {
125        this.offsets = this.allowedInputVariables
126          .Select(name => dataset.GetDoubleValues(name, rows).Average() * -1)
127          .Concat(new double[] { 0 }) // no offset for target variable
128          .ToArray();
129        if (weights == null) {
130          // automatic determination of weights (all features should have variance = 1)
131          this.weights = this.allowedInputVariables
132            .Select(name => {
133              var pop = dataset.GetDoubleValues(name, rows).StandardDeviationPop();
134              return  pop.IsAlmost(0) ? 1.0 : 1.0/pop;
135            })
136            .Concat(new double[] { 1.0 }) // no scaling for target variable
137            .ToArray();
138        } else {
139          // user specified weights (+ 1 for target)
140          this.weights = weights.Concat(new double[] { 1.0 }).ToArray();
141          if (this.weights.Length - 1 != this.allowedInputVariables.Length)
142            throw new ArgumentException("The number of elements in the weight vector must match the number of input variables");
143        }
144        inputMatrix = CreateScaledData(dataset, this.allowedInputVariables.Concat(new string[] { targetVariable }), rows, this.offsets, this.weights);
145      }
146
147      if (inputMatrix.ContainsNanOrInfinity())
148        throw new NotSupportedException(
149          "Nearest neighbour model does not support NaN or infinity values in the input dataset.");
150
151      this.kdTree = new alglib.nearestneighbor.kdtree();
152
153      var nRows = inputMatrix.GetLength(0);
154      var nFeatures = inputMatrix.GetLength(1) - 1;
155
156      if (classValues != null) {
157        this.classValues = (double[])classValues.Clone();
158        int nClasses = classValues.Length;
159        // map original class values to values [0..nClasses-1]
160        var classIndices = new Dictionary<double, double>();
161        for (int i = 0; i < nClasses; i++)
162          classIndices[classValues[i]] = i;
163
164        for (int row = 0; row < nRows; row++) {
165          inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
166        }
167      }
168      alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
169    }
170
171    private static double[,] CreateScaledData(IDataset dataset, IEnumerable<string> variables, IEnumerable<int> rows, double[] offsets, double[] factors) {
172      var transforms =
173        variables.Select(
174          (_, colIdx) =>
175            new LinearTransformation(variables) { Addend = offsets[colIdx] * factors[colIdx], Multiplier = factors[colIdx] });
176      return dataset.ToArray(variables, transforms, rows);
177    }
178
179    public override IDeepCloneable Clone(Cloner cloner) {
180      return new NearestNeighbourModel(this, cloner);
181    }
182
183    public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
184      double[,] inputData;
185      if (IsCompatibilityLoaded) {
186        inputData = dataset.ToArray(allowedInputVariables, rows);
187      } else {
188        inputData = CreateScaledData(dataset, allowedInputVariables, rows, offsets, weights);
189      }
190
191      int n = inputData.GetLength(0);
192      int columns = inputData.GetLength(1);
193      double[] x = new double[columns];
194      double[] dists = new double[k];
195      double[,] neighbours = new double[k, columns + 1];
196
197      for (int row = 0; row < n; row++) {
198        for (int column = 0; column < columns; column++) {
199          x[column] = inputData[row, column];
200        }
201        int numNeighbours;
202        lock (kdTreeLockObject) { // gkronber: the following calls change the kdTree data structure
203          numNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
204          alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
205          alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
206        }
207
208        double distanceWeightedValue = 0.0;
209        double distsSum = 0.0;
210        for (int i = 0; i < numNeighbours; i++) {
211          distanceWeightedValue += neighbours[i, columns] / dists[i];
212          distsSum += 1.0 / dists[i];
213        }
214        yield return distanceWeightedValue / distsSum;
215      }
216    }
217
218    public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
219      if (classValues == null) throw new InvalidOperationException("No class values are defined.");
220      double[,] inputData;
221      if (IsCompatibilityLoaded) {
222        inputData = dataset.ToArray(allowedInputVariables, rows);
223      } else {
224        inputData = CreateScaledData(dataset, allowedInputVariables, rows, offsets, weights);
225      }
226      int n = inputData.GetLength(0);
227      int columns = inputData.GetLength(1);
228      double[] x = new double[columns];
229      int[] y = new int[classValues.Length];
230      double[] dists = new double[k];
231      double[,] neighbours = new double[k, columns + 1];
232
233      for (int row = 0; row < n; row++) {
234        for (int column = 0; column < columns; column++) {
235          x[column] = inputData[row, column];
236        }
237        int numNeighbours;
238        lock (kdTreeLockObject) {
239          // gkronber: the following calls change the kdTree data structure
240          numNeighbours = alglib.nearestneighbor.kdtreequeryknn(kdTree, x, k, false);
241          alglib.nearestneighbor.kdtreequeryresultsdistances(kdTree, ref dists);
242          alglib.nearestneighbor.kdtreequeryresultsxy(kdTree, ref neighbours);
243        }
244        Array.Clear(y, 0, y.Length);
245        for (int i = 0; i < numNeighbours; i++) {
246          int classValue = (int)Math.Round(neighbours[i, columns]);
247          y[classValue]++;
248        }
249
250        // find class for with the largest probability value
251        int maxProbClassIndex = 0;
252        double maxProb = y[0];
253        for (int i = 1; i < y.Length; i++) {
254          if (maxProb < y[i]) {
255            maxProb = y[i];
256            maxProbClassIndex = i;
257          }
258        }
259        yield return classValues[maxProbClassIndex];
260      }
261    }
262
263
264    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
265      return new NearestNeighbourRegressionSolution(this, new RegressionProblemData(problemData));
266    }
267    public override IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
268      return new NearestNeighbourClassificationSolution(this, new ClassificationProblemData(problemData));
269    }
270
271    #region events
272    public event EventHandler Changed;
273    private void OnChanged(EventArgs e) {
274      var handlers = Changed;
275      if (handlers != null)
276        handlers(this, e);
277    }
278    #endregion
279
280
281    // BackwardsCompatibility3.3
282    #region Backwards compatible code, remove with 3.4
283
284    private bool isCompatibilityLoaded = false; // new kNN models have the value false, kNN models loaded from disc have the value true
285    [Storable(DefaultValue = true)]
286    public bool IsCompatibilityLoaded {
287      get { return isCompatibilityLoaded; }
288      set { isCompatibilityLoaded = value; }
289    }
290    #endregion
291    #region persistence
292    [Storable]
293    public double KDTreeApproxF {
294      get { return kdTree.approxf; }
295      set { kdTree.approxf = value; }
296    }
297    [Storable]
298    public double[] KDTreeBoxMax {
299      get { return kdTree.boxmax; }
300      set { kdTree.boxmax = value; }
301    }
302    [Storable]
303    public double[] KDTreeBoxMin {
304      get { return kdTree.boxmin; }
305      set { kdTree.boxmin = value; }
306    }
307    [Storable]
308    public double[] KDTreeBuf {
309      get { return kdTree.buf; }
310      set { kdTree.buf = value; }
311    }
312    [Storable]
313    public double[] KDTreeCurBoxMax {
314      get { return kdTree.curboxmax; }
315      set { kdTree.curboxmax = value; }
316    }
317    [Storable]
318    public double[] KDTreeCurBoxMin {
319      get { return kdTree.curboxmin; }
320      set { kdTree.curboxmin = value; }
321    }
322    [Storable]
323    public double KDTreeCurDist {
324      get { return kdTree.curdist; }
325      set { kdTree.curdist = value; }
326    }
327    [Storable]
328    public int KDTreeDebugCounter {
329      get { return kdTree.debugcounter; }
330      set { kdTree.debugcounter = value; }
331    }
332    [Storable]
333    public int[] KDTreeIdx {
334      get { return kdTree.idx; }
335      set { kdTree.idx = value; }
336    }
337    [Storable]
338    public int KDTreeKCur {
339      get { return kdTree.kcur; }
340      set { kdTree.kcur = value; }
341    }
342    [Storable]
343    public int KDTreeKNeeded {
344      get { return kdTree.kneeded; }
345      set { kdTree.kneeded = value; }
346    }
347    [Storable]
348    public int KDTreeN {
349      get { return kdTree.n; }
350      set { kdTree.n = value; }
351    }
352    [Storable]
353    public int[] KDTreeNodes {
354      get { return kdTree.nodes; }
355      set { kdTree.nodes = value; }
356    }
357    [Storable]
358    public int KDTreeNormType {
359      get { return kdTree.normtype; }
360      set { kdTree.normtype = value; }
361    }
362    [Storable]
363    public int KDTreeNX {
364      get { return kdTree.nx; }
365      set { kdTree.nx = value; }
366    }
367    [Storable]
368    public int KDTreeNY {
369      get { return kdTree.ny; }
370      set { kdTree.ny = value; }
371    }
372    [Storable]
373    public double[] KDTreeR {
374      get { return kdTree.r; }
375      set { kdTree.r = value; }
376    }
377    [Storable]
378    public double KDTreeRNeeded {
379      get { return kdTree.rneeded; }
380      set { kdTree.rneeded = value; }
381    }
382    [Storable]
383    public bool KDTreeSelfMatch {
384      get { return kdTree.selfmatch; }
385      set { kdTree.selfmatch = value; }
386    }
387    [Storable]
388    public double[] KDTreeSplits {
389      get { return kdTree.splits; }
390      set { kdTree.splits = value; }
391    }
392    [Storable]
393    public int[] KDTreeTags {
394      get { return kdTree.tags; }
395      set { kdTree.tags = value; }
396    }
397    [Storable]
398    public double[] KDTreeX {
399      get { return kdTree.x; }
400      set { kdTree.x = value; }
401    }
402    [Storable]
403    public double[,] KDTreeXY {
404      get { return kdTree.xy; }
405      set { kdTree.xy = value; }
406    }
407    #endregion
408  }
409}
Note: See TracBrowser for help on using the repository browser.