#region License Information
/* HeuristicLab
* Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Random;
using System;
using System.Linq;
namespace HeuristicLab.Analysis.SelfOrganizingMaps {
public static class SOM {
///
/// Performs a self-organizing map to a range of feature column-vectors organized in a matrix.
///
/// The feature matrix containing the column-vectors.
/// The random number generator to use.
/// The length of a side of the SOM grid (there are somSize * somSize neurons).
/// The amount of iterations to perform in learning.
/// The initial learning rate.
/// The initial learning radius.
/// If the final coordinates should be jittered slightly within the grid.
/// A matrix of coordinates having N rows and 2 columns.
public static DoubleMatrix Map(DoubleMatrix features, IRandom random = null, int somSize = 5, int iterations = 100, double learningRate = double.NaN, double learningRadius = 5.0, bool jittering = true) {
if (random == null) random = new MersenneTwister();
var K = somSize * somSize;
var N = features.Columns;
var L = features.Rows;
if (double.IsNaN(learningRate)) learningRate = 1.0 / Math.Sqrt(2.0 * L);
var fixedLearningRate = learningRate / 10.0;
var varLearningRate = 9.0 * fixedLearningRate;
Func getX = (neuron) => neuron % somSize;
Func getY = (neuron) => neuron / somSize;
Func neighborhood = (maxIter, iter, k, bmu) => {
var sigma = learningRadius * ((maxIter - iter) / (double)maxIter) + 0.0001;
var xK = getX(k);
var yK = getY(k);
var xW = getX(bmu);
var yW = getY(bmu);
var d = (xK - xW) * (xK - xW) + (yK - yW) * (yK - yW);
return Math.Exp(-d / (2.0 * sigma * sigma));
};
var scaledFeatures = Scale(features);
var weights = Enumerable.Range(0, K).Select(k => Enumerable.Range(0, L).Select(_ => random.NextDouble()).ToArray()).ToArray();
// normalize s.t. sum(alphas[k]) = 1
for (var k = 0; k < K; k++) {
var sum = weights[k].Sum();
for (var i = 0; i < weights[k].Length; i++) weights[k][i] /= sum;
}
var oldWeights = weights.Select(x => (double[])x.Clone()).ToArray();
for (var iter = 0; iter < iterations; iter++) {
learningRate = varLearningRate * ((iterations - iter) / (double)iterations) + fixedLearningRate;
var pointShuffle = Enumerable.Range(0, N).Shuffle(random).ToArray();
for (var p = 0; p < N; p++) {
var i = pointShuffle[p];
var bmu = GetBestMatchingUnit(scaledFeatures, weights, i);
for (var k = 0; k < K; k++) {
for (var j = 0; j < L; j++) {
weights[k][j] = oldWeights[k][j] + learningRate * neighborhood(iterations, iter, k, bmu) * (scaledFeatures[j, i] - oldWeights[k][j]);
}
}
}
for (var k = 0; k < K; k++) {
for (var j = 0; j < L; j++) {
oldWeights[k][j] = weights[k][j];
}
}
}
var result = new DoubleMatrix(N, 2);
for (var i = 0; i < N; i++) {
var bmu = GetBestMatchingUnit(scaledFeatures, weights, i);
if (!jittering) {
result[i, 0] = getX(bmu);
result[i, 1] = getY(bmu);
} else {
result[i, 0] = getX(bmu) + random.NextDouble() * 0.8;
result[i, 1] = getY(bmu) + random.NextDouble() * 0.8;
}
}
return result;
}
private static int GetBestMatchingUnit(DoubleMatrix D, double[][] weights, int i) {
var bmu = -1;
var minV = double.MaxValue;
for (var k = 0; k < weights.Length; k++) {
var d = 0.0;
for (var r = 0; r < weights[k].Length; r++) {
d += (D[r, i] - weights[k][r]) * (D[r, i] - weights[k][r]);
}
if (d < minV) {
minV = d;
bmu = k;
}
}
return bmu;
}
private static DoubleMatrix Scale(DoubleMatrix features) {
var scaledFeatures = new DoubleMatrix(features.Rows, features.Columns);
for (var i = 0; i < features.Rows; i++) {
var rowSum = 0.0;
for (var j = 0; j < features.Columns; j++) rowSum += features[i, j];
if (rowSum.IsAlmost(0.0)) rowSum = 1.0;
for (var j = 0; j < features.Columns; j++) scaledFeatures[i, j] = features[i, j] / rowSum;
}
return scaledFeatures;
}
}
}