1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using HeuristicLab.Core;
|
---|
23 | using HeuristicLab.Data;
|
---|
24 | using HeuristicLab.Random;
|
---|
25 | using System;
|
---|
26 | using System.Linq;
|
---|
27 |
|
---|
28 | namespace HeuristicLab.Analysis.SelfOrganizingMaps {
|
---|
29 | public static class RelationalSOM {
|
---|
30 | /// <summary>
|
---|
31 | /// This is the online algorithm described in
|
---|
32 | /// Olteanu, M. and Villa-Vialaneix, N. 2015.
|
---|
33 | /// On-line relational and multiple relational SOM.
|
---|
34 | /// Neurocomputing 147, pp. 15-30.
|
---|
35 | /// </summary>
|
---|
36 | /// <param name="dissimilarities">The full NxN matrix containing all dissimilarities between N points.</param>
|
---|
37 | /// <param name="random">The random number generator to use.</param>
|
---|
38 | /// <param name="somSize">The length of a side of the SOM grid (there are somSize * somSize neurons).</param>
|
---|
39 | /// <param name="iterations">The amount of iterations to perform in learning.</param>
|
---|
40 | /// <param name="learningRate">The initial learning rate.</param>
|
---|
41 | /// <param name="learningRadius">The initial learning radius.</param>
|
---|
42 | /// <param name="jittering">If the final coordinates should be jittered slightly within the grid.</param>
|
---|
43 | /// <returns>A matrix of coordinates having N rows and 2 columns.</returns>
|
---|
44 | public static DoubleMatrix Map(DoubleMatrix dissimilarities, IRandom random = null, int somSize = 5, int iterations = 100, double learningRate = double.NaN, double learningRadius = 5.0, bool jittering = true) {
|
---|
45 | if (random == null) random = new MersenneTwister();
|
---|
46 | var K = somSize * somSize;
|
---|
47 | var N = dissimilarities.Rows;
|
---|
48 | if (double.IsNaN(learningRate)) learningRate = 1.0 / Math.Sqrt(2.0 * N);
|
---|
49 | var fixedLearningRate = learningRate / 10.0;
|
---|
50 | var varLearningRate = 9.0 * fixedLearningRate;
|
---|
51 | Func<int, int> getX = (neuron) => neuron % somSize;
|
---|
52 | Func<int, int> getY = (neuron) => neuron / somSize;
|
---|
53 | Func<int, int, int, int, double> neighborhood = (maxIter, iter, k, bmu) => {
|
---|
54 | var sigma = learningRadius * ((maxIter - iter) / (double)maxIter) + 0.0001;
|
---|
55 | var xK = getX(k);
|
---|
56 | var yK = getY(k);
|
---|
57 | var xW = getX(bmu);
|
---|
58 | var yW = getY(bmu);
|
---|
59 | var d = (xK - xW) * (xK - xW) + (yK - yW) * (yK - yW);
|
---|
60 | return Math.Exp(-d / (2.0 * sigma * sigma));
|
---|
61 | };
|
---|
62 | var alphas = Enumerable.Range(0, K).Select(k => Enumerable.Range(0, N).Select(_ => random.NextDouble()).ToArray()).ToArray();
|
---|
63 | // normalize s.t. sum(alphas[k]) = 1
|
---|
64 | for (var k = 0; k < K; k++) {
|
---|
65 | var sum = alphas[k].Sum();
|
---|
66 | for (var i = 0; i < alphas[k].Length; i++) alphas[k][i] /= sum;
|
---|
67 | }
|
---|
68 | var oldAlphas = alphas.Select(x => (double[])x.Clone()).ToArray();
|
---|
69 |
|
---|
70 | for (var iter = 0; iter < iterations; iter++) {
|
---|
71 | learningRate = varLearningRate * ((iterations - iter) / (double)iterations) + fixedLearningRate;
|
---|
72 | var pointShuffle = Enumerable.Range(0, N).Shuffle(random).ToArray();
|
---|
73 | for (var p = 0; p < N; p++) {
|
---|
74 | var i = pointShuffle[p];
|
---|
75 | var bmu = GetBestMatchingUnit(dissimilarities, alphas, i);
|
---|
76 |
|
---|
77 | for (var k = 0; k < K; k++) {
|
---|
78 | for (var j = 0; j < N; j++) {
|
---|
79 | alphas[k][j] = oldAlphas[k][j] + learningRate * neighborhood(iterations, iter, k, bmu) * ((i == j ? 1.0 : 0.0) - oldAlphas[k][j]);
|
---|
80 | }
|
---|
81 | }
|
---|
82 | }
|
---|
83 | for (var k = 0; k < K; k++) {
|
---|
84 | for (var j = 0; j < N; j++) {
|
---|
85 | oldAlphas[k][j] = alphas[k][j];
|
---|
86 | }
|
---|
87 | }
|
---|
88 | }
|
---|
89 |
|
---|
90 | var result = new DoubleMatrix(N, 2);
|
---|
91 | for (var i = 0; i < N; i++) {
|
---|
92 | var bmu = GetBestMatchingUnit(dissimilarities, alphas, i);
|
---|
93 | if (!jittering) {
|
---|
94 | result[i, 0] = getX(bmu);
|
---|
95 | result[i, 1] = getY(bmu);
|
---|
96 | } else {
|
---|
97 | result[i, 0] = getX(bmu) + random.NextDouble() * 0.8;
|
---|
98 | result[i, 1] = getY(bmu) + random.NextDouble() * 0.8;
|
---|
99 | }
|
---|
100 | }
|
---|
101 | return result;
|
---|
102 | }
|
---|
103 |
|
---|
104 | private static int GetBestMatchingUnit(DoubleMatrix D, double[][] alphas, int i) {
|
---|
105 | var bmu = -1;
|
---|
106 | var minV = double.MaxValue;
|
---|
107 | for (var k = 0; k < alphas.Length; k++) {
|
---|
108 | var Daki = 0.0;
|
---|
109 | var akDak = 0.0;
|
---|
110 | for (var r = 0; r < D.Rows; r++) {
|
---|
111 | var Dakr = 0.0;
|
---|
112 | for (var s = 0; s < D.Rows; s++) {
|
---|
113 | Dakr += D[r, s] * alphas[k][s];
|
---|
114 | }
|
---|
115 | if (r == i) Daki = Dakr;
|
---|
116 | akDak += alphas[k][r] * Dakr;
|
---|
117 | }
|
---|
118 | var v = Daki - 0.5 * akDak;
|
---|
119 | if (v < minV) {
|
---|
120 | bmu = k;
|
---|
121 | minV = v;
|
---|
122 | }
|
---|
123 | }
|
---|
124 | return bmu;
|
---|
125 | }
|
---|
126 | }
|
---|
127 | }
|
---|