Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
04/08/16 14:24:51 (9 years ago)
Author:
abeham
Message:

#2457: Added SOM projection for problem instances, fixed a bug (learningRadius was not used)

File:
1 edited

Legend:

Unmodified
Added
Removed
  • branches/PerformanceComparison/HeuristicLab.OptimizationExpertSystem/3.3/Views/UnderstandingSolutionsView.cs

    r13745 r13750  
    2222using HeuristicLab.Analysis;
    2323using HeuristicLab.Analysis.QualityAnalysis;
     24using HeuristicLab.Analysis.SelfOrganizingMaps;
    2425using HeuristicLab.Common;
    2526using HeuristicLab.Core;
     
    241242        DoubleMatrix coords = null;
    242243        if (projection == "SOM")
    243           coords = Som(dissimilarities, new MersenneTwister(42), jittering: true);
     244          coords = RelationalSOM.Map(dissimilarities, new MersenneTwister(42), jittering: true);
    244245        else coords = MultidimensionalScaling.KruskalShepard(dissimilarities);
    245246        var dataPoints = new List<DataPoint>();
     
    382383      return dval.Value;
    383384    }
    384 
    385     #region SOM projection
    386     /// <summary>
    387     /// This is the online algorithm described in
    388     /// Olteanu, M. and Villa-Vialaneix, N. 2015.
    389     /// On-line relational and multiple relational SOM.
    390     /// Neurocomputing 147, pp. 15-30.
    391     /// </summary>
    392     /// <param name="dissimilarities">The full NxN matrix containing all dissimilarities between N points.</param>
    393     /// <param name="random">The random number generator to use.</param>
    394     /// <param name="somSize">The length of a side of the SOM grid (there are somSize * somSize neurons).</param>
    395     /// <param name="iterations">The amount of iterations to perform in learning.</param>
    396     /// <param name="learningRate">The initial learning rate.</param>
    397     /// <param name="learningRadius">The initial learning radius.</param>
    398     /// <param name="jittering">If the final coordinates should be jittered slightly within the grid.</param>
    399     /// <returns>A matrix of coordinates having N rows and 2 columns.</returns>
    400     private DoubleMatrix Som(DoubleMatrix dissimilarities, IRandom random, int somSize = 5, int iterations = 100, double learningRate = double.NaN, double learningRadius = 5.0, bool jittering = true) {
    401       var K = somSize * somSize;
    402       var N = dissimilarities.Rows;
    403       if (double.IsNaN(learningRate)) learningRate = 1.0 / Math.Sqrt(2.0 * N);
    404       var fixedLearningRate = learningRate / 10.0;
    405       var varLearningRate = 9.0 * fixedLearningRate;
    406       Func<int, int, double> learningRateT = (maxIter, iter) => {
    407         return varLearningRate * ((maxIter - iter) / (double)maxIter) + fixedLearningRate;
    408       };
    409       Func<int, int> getX = (neuron) => neuron % somSize;
    410       Func<int, int> getY = (neuron) => neuron / somSize;
    411       Func<int, int, int, int, double> neighborhood = (maxIter, iter, k, bmu) => {
    412         var sigma = 1.0 * ((maxIter - iter) / (double)maxIter) + 0.0001;
    413         var xK = getX(k);
    414         var yK = getY(k);
    415         var xW = getX(bmu);
    416         var yW = getY(bmu);
    417         var d = (xK - xW) * (xK - xW) + (yK - yW) * (yK - yW);
    418         return Math.Exp(-d / (2.0 * sigma * sigma));
    419       };
    420       var alphas = Enumerable.Range(0, K).Select(k => Enumerable.Range(0, N).Select(_ => random.NextDouble()).ToArray()).ToArray();
    421       // normalize s.t. sum(alphas[k]) = 1
    422       for (var k = 0; k < K; k++) {
    423         var sum = alphas[k].Sum();
    424         for (var i = 0; i < alphas[k].Length; i++) alphas[k][i] /= sum;
    425       }
    426       var oldAlphas = alphas.Select(x => (double[])x.Clone()).ToArray();
    427 
    428       for (var iter = 0; iter < iterations; iter++) {
    429         var pointShuffle = Enumerable.Range(0, N).Shuffle(random).ToArray();
    430         for (var p = 0; p < N; p++) {
    431           var i = pointShuffle[p];
    432           var bmu = GetBestMatchingUnit(dissimilarities, alphas, i);
    433 
    434           for (var k = 0; k < K; k++) {
    435             for (var j = 0; j < N; j++) {
    436               alphas[k][j] = oldAlphas[k][j] + learningRateT(iterations, iter) * neighborhood(iterations, iter, k, bmu) * ((i == j ? 1.0 : 0.0) - oldAlphas[k][j]);
    437             }
    438           }
    439         }
    440         for (var k = 0; k < K; k++) {
    441           for (var j = 0; j < N; j++) {
    442             oldAlphas[k][j] = alphas[k][j];
    443           }
    444         }
    445       }
    446 
    447       var result = new DoubleMatrix(N, 2);
    448       for (var i = 0; i < N; i++) {
    449         var bmu = GetBestMatchingUnit(dissimilarities, alphas, i);
    450         if (!jittering) {
    451           result[i, 0] = getX(bmu);
    452           result[i, 1] = getY(bmu);
    453         } else {
    454           result[i, 0] = getX(bmu) + random.NextDouble() * 0.8;
    455           result[i, 1] = getY(bmu) + random.NextDouble() * 0.8;
    456         }
    457       }
    458       return result;
    459     }
    460 
    461     private int GetBestMatchingUnit(DoubleMatrix D, double[][] alphas, int i) {
    462       var bmu = -1;
    463       var minV = double.MaxValue;
    464       for (var k = 0; k < alphas.Length; k++) {
    465         var Daki = 0.0;
    466         var akDak = 0.0;
    467         for (var r = 0; r < D.Rows; r++) {
    468           var Dakr = 0.0;
    469           for (var s = 0; s < D.Rows; s++) {
    470             Dakr += D[r, s] * alphas[k][s];
    471           }
    472           if (r == i) Daki = Dakr;
    473           akDak += alphas[k][r] * Dakr;
    474         }
    475         var v = Daki - 0.5 * akDak;
    476         if (v < minV) {
    477           bmu = k;
    478           minV = v;
    479         }
    480       }
    481       return bmu;
    482     }
    483     #endregion
    484 
    485     private DoubleMatrix Mds(DoubleMatrix dissimilarities) {
    486       return MultidimensionalScaling.KruskalShepard(dissimilarities);
    487     }
    488385    #endregion
    489386  }
Note: See TracChangeset for help on using the changeset viewer.