Free cookie consent management tool by TermsFeed Policy Generator

source: branches/1614_GeneralizedQAP/HeuristicLab.Analysis/3.3/SelfOrganizingMaps/SOM.cs @ 17505

Last change on this file since 17505 was 13750, checked in by abeham, 9 years ago

#2457: Added SOM projection for problem instances, fixed a bug (learningRadius was not used)

File size: 5.6 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using HeuristicLab.Common;
23using HeuristicLab.Core;
24using HeuristicLab.Data;
25using HeuristicLab.Random;
26using System;
27using System.Linq;
28
29namespace HeuristicLab.Analysis.SelfOrganizingMaps {
30  public static class SOM {
31    /// <summary>
32    /// Performs a self-organizing map to a range of feature column-vectors organized in a matrix.
33    /// </summary>
34    /// <param name="features">The feature matrix containing the column-vectors.</param>
35    /// <param name="random">The random number generator to use.</param>
36    /// <param name="somSize">The length of a side of the SOM grid (there are somSize * somSize neurons).</param>
37    /// <param name="iterations">The amount of iterations to perform in learning.</param>
38    /// <param name="learningRate">The initial learning rate.</param>
39    /// <param name="learningRadius">The initial learning radius.</param>
40    /// <param name="jittering">If the final coordinates should be jittered slightly within the grid.</param>
41    /// <returns>A matrix of coordinates having N rows and 2 columns.</returns>
42    public static DoubleMatrix Map(DoubleMatrix features, IRandom random = null, int somSize = 5, int iterations = 100, double learningRate = double.NaN, double learningRadius = 5.0, bool jittering = true) {
43      if (random == null) random = new MersenneTwister();
44      var K = somSize * somSize;
45      var N = features.Columns;
46      var L = features.Rows;
47
48      if (double.IsNaN(learningRate)) learningRate = 1.0 / Math.Sqrt(2.0 * L);
49      var fixedLearningRate = learningRate / 10.0;
50      var varLearningRate = 9.0 * fixedLearningRate;
51      Func<int, int> getX = (neuron) => neuron % somSize;
52      Func<int, int> getY = (neuron) => neuron / somSize;
53      Func<int, int, int, int, double> neighborhood = (maxIter, iter, k, bmu) => {
54        var sigma = learningRadius * ((maxIter - iter) / (double)maxIter) + 0.0001;
55        var xK = getX(k);
56        var yK = getY(k);
57        var xW = getX(bmu);
58        var yW = getY(bmu);
59        var d = (xK - xW) * (xK - xW) + (yK - yW) * (yK - yW);
60        return Math.Exp(-d / (2.0 * sigma * sigma));
61      };
62      var scaledFeatures = Scale(features);
63      var weights = Enumerable.Range(0, K).Select(k => Enumerable.Range(0, L).Select(_ => random.NextDouble()).ToArray()).ToArray();
64      // normalize s.t. sum(alphas[k]) = 1
65      for (var k = 0; k < K; k++) {
66        var sum = weights[k].Sum();
67        for (var i = 0; i < weights[k].Length; i++) weights[k][i] /= sum;
68      }
69      var oldWeights = weights.Select(x => (double[])x.Clone()).ToArray();
70
71      for (var iter = 0; iter < iterations; iter++) {
72        learningRate = varLearningRate * ((iterations - iter) / (double)iterations) + fixedLearningRate;
73        var pointShuffle = Enumerable.Range(0, N).Shuffle(random).ToArray();
74        for (var p = 0; p < N; p++) {
75          var i = pointShuffle[p];
76          var bmu = GetBestMatchingUnit(scaledFeatures, weights, i);
77
78          for (var k = 0; k < K; k++) {
79            for (var j = 0; j < L; j++) {
80              weights[k][j] = oldWeights[k][j] + learningRate * neighborhood(iterations, iter, k, bmu) * (scaledFeatures[j, i] - oldWeights[k][j]);
81            }
82          }
83        }
84        for (var k = 0; k < K; k++) {
85          for (var j = 0; j < L; j++) {
86            oldWeights[k][j] = weights[k][j];
87          }
88        }
89      }
90
91      var result = new DoubleMatrix(N, 2);
92      for (var i = 0; i < N; i++) {
93        var bmu = GetBestMatchingUnit(scaledFeatures, weights, i);
94        if (!jittering) {
95          result[i, 0] = getX(bmu);
96          result[i, 1] = getY(bmu);
97        } else {
98          result[i, 0] = getX(bmu) + random.NextDouble() * 0.8;
99          result[i, 1] = getY(bmu) + random.NextDouble() * 0.8;
100        }
101      }
102      return result;
103    }
104
105    private static int GetBestMatchingUnit(DoubleMatrix D, double[][] weights, int i) {
106      var bmu = -1;
107      var minV = double.MaxValue;
108      for (var k = 0; k < weights.Length; k++) {
109        var d = 0.0;
110        for (var r = 0; r < weights[k].Length; r++) {
111          d += (D[r, i] - weights[k][r]) * (D[r, i] - weights[k][r]);
112        }
113        if (d < minV) {
114          minV = d;
115          bmu = k;
116        }
117      }
118      return bmu;
119    }
120
121    private static DoubleMatrix Scale(DoubleMatrix features) {
122      var scaledFeatures = new DoubleMatrix(features.Rows, features.Columns);
123      for (var i = 0; i < features.Rows; i++) {
124        var rowSum = 0.0;
125        for (var j = 0; j < features.Columns; j++) rowSum += features[i, j];
126        if (rowSum.IsAlmost(0.0)) rowSum = 1.0;
127        for (var j = 0; j < features.Columns; j++) scaledFeatures[i, j] = features[i, j] / rowSum;
128      }
129      return scaledFeatures;
130    }
131  }
132}
Note: See TracBrowser for help on using the repository browser.