- Timestamp:
- 04/08/16 14:24:51 (9 years ago)
- Location:
- branches/PerformanceComparison
- Files:
-
- 3 added
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/PerformanceComparison/HeuristicLab.Analysis/3.3/HeuristicLab.Analysis-3.3.csproj
r13744 r13750 157 157 <SpecificVersion>False</SpecificVersion> 158 158 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.PluginInfrastructure-3.3.dll</HintPath> 159 <Private>False</Private> 160 </Reference> 161 <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 162 <SpecificVersion>False</SpecificVersion> 163 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Random-3.3.dll</HintPath> 159 164 <Private>False</Private> 160 165 </Reference> … … 217 222 <Compile Include="QualityAnalysis\QualityPerEvaluationsAnalyzer.cs" /> 218 223 <Compile Include="QualityAnalysis\ScaledQualityDifferenceAnalyzer.cs" /> 224 <Compile Include="SelfOrganizingMaps\RelationalSOM.cs" /> 225 <Compile Include="SelfOrganizingMaps\SOM.cs" /> 219 226 <Compile Include="Statistics\BonferroniHolm.cs" /> 220 227 <Compile Include="Statistics\EnumerableStatisticsExtension.cs" /> -
branches/PerformanceComparison/HeuristicLab.OptimizationExpertSystem.Common/3.3/HeuristicLab.OptimizationExpertSystem.Common-3.3.csproj
r13722 r13750 132 132 <Private>False</Private> 133 133 </Reference> 134 <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 135 <SpecificVersion>False</SpecificVersion> 136 <HintPath>..\..\..\..\trunk\sources\bin\HeuristicLab.Random-3.3.dll</HintPath> 137 <Private>False</Private> 138 </Reference> 134 139 <Reference Include="System" /> 135 140 <Reference Include="System.Core" /> -
branches/PerformanceComparison/HeuristicLab.OptimizationExpertSystem.Common/3.3/KnowledgeCenter.cs
r13748 r13750 21 21 22 22 using HeuristicLab.Analysis; 23 using HeuristicLab.Analysis.SelfOrganizingMaps; 23 24 using HeuristicLab.Collections; 24 25 using HeuristicLab.Common; … … 29 30 using HeuristicLab.Optimization; 30 31 using HeuristicLab.Persistence.Default.Xml; 32 using HeuristicLab.Random; 31 33 using System; 32 34 using System.Collections.Generic; … … 232 234 alglib.pcabuildbasis(ds, ds.GetLength(0), ds.GetLength(1), out info, out s2, out v); 233 235 #endregion 236 #region SOM 237 var features = new DoubleMatrix(commonCharacteristics.Count, instances.Count); 238 foreach (var instance in instances) { 239 var arr = instance.Value; 240 for (var feature = 0; feature < arr.Length; feature++) 241 features[feature, key2Idx.GetByFirst(instance.Key)] = arr[feature]; 242 } 243 var somCoords = SOM.Map(features, new MersenneTwister(42), somSize: 20, learningRadius: 20, jittering: true); 244 #endregion 234 245 235 246 ProblemInstances.UpdateOfRunsInProgress = true; … … 256 267 ((DoubleValue)item).Value = coords[key2Idx.GetByFirst(instance.Name), 1]; 257 268 } else instance.Results.Add("Projection.MDS.Y", new DoubleValue(coords[key2Idx.GetByFirst(instance.Name), 1])); 269 270 if (instance.Results.TryGetValue("Projection.SOM.X", out item)) { 271 ((DoubleValue)item).Value = somCoords[key2Idx.GetByFirst(instance.Name), 0]; 272 } else instance.Results.Add("Projection.SOM.X", new DoubleValue(somCoords[key2Idx.GetByFirst(instance.Name), 0])); 273 if (instance.Results.TryGetValue("Projection.SOM.Y", out item)) { 274 ((DoubleValue)item).Value = somCoords[key2Idx.GetByFirst(instance.Name), 1]; 275 } else instance.Results.Add("Projection.SOM.Y", new DoubleValue(somCoords[key2Idx.GetByFirst(instance.Name), 1])); 258 276 } 259 277 } finally { ProblemInstances.UpdateOfRunsInProgress = false; } -
branches/PerformanceComparison/HeuristicLab.OptimizationExpertSystem/3.3/Menu/900_Tools/910_NewCSharpScriptMenuItem.cs
r13720 r13750 55 55 56 56 public class MyScript : HeuristicLab.Scripting.CSharpScriptBase { 57 public ExpertSystem Instance { get { return ((OptimizationExpertSystem)HeuristicLab.MainForm.MainFormManager.MainForm).ExpertSystem; } }57 public KnowledgeCenter Instance { get { return ((OptimizationKnowledgeCenter)HeuristicLab.MainForm.MainFormManager.MainForm).ExpertSystem; } } 58 58 59 59 public override void Main() { -
branches/PerformanceComparison/HeuristicLab.OptimizationExpertSystem/3.3/Views/UnderstandingSolutionsView.cs
r13745 r13750 22 22 using HeuristicLab.Analysis; 23 23 using HeuristicLab.Analysis.QualityAnalysis; 24 using HeuristicLab.Analysis.SelfOrganizingMaps; 24 25 using HeuristicLab.Common; 25 26 using HeuristicLab.Core; … … 241 242 DoubleMatrix coords = null; 242 243 if (projection == "SOM") 243 coords = Som(dissimilarities, new MersenneTwister(42), jittering: true);244 coords = RelationalSOM.Map(dissimilarities, new MersenneTwister(42), jittering: true); 244 245 else coords = MultidimensionalScaling.KruskalShepard(dissimilarities); 245 246 var dataPoints = new List<DataPoint>(); … … 382 383 return dval.Value; 383 384 } 384 385 #region SOM projection386 /// <summary>387 /// This is the online algorithm described in388 /// Olteanu, M. and Villa-Vialaneix, N. 2015.389 /// On-line relational and multiple relational SOM.390 /// Neurocomputing 147, pp. 15-30.391 /// </summary>392 /// <param name="dissimilarities">The full NxN matrix containing all dissimilarities between N points.</param>393 /// <param name="random">The random number generator to use.</param>394 /// <param name="somSize">The length of a side of the SOM grid (there are somSize * somSize neurons).</param>395 /// <param name="iterations">The amount of iterations to perform in learning.</param>396 /// <param name="learningRate">The initial learning rate.</param>397 /// <param name="learningRadius">The initial learning radius.</param>398 /// <param name="jittering">If the final coordinates should be jittered slightly within the grid.</param>399 /// <returns>A matrix of coordinates having N rows and 2 columns.</returns>400 private DoubleMatrix Som(DoubleMatrix dissimilarities, IRandom random, int somSize = 5, int iterations = 100, double learningRate = double.NaN, double learningRadius = 5.0, bool jittering = true) {401 var K = somSize * somSize;402 var N = dissimilarities.Rows;403 if (double.IsNaN(learningRate)) learningRate = 1.0 / Math.Sqrt(2.0 * N);404 var fixedLearningRate = learningRate / 10.0;405 var varLearningRate = 9.0 * fixedLearningRate;406 Func<int, int, double> learningRateT = (maxIter, iter) => {407 return varLearningRate * ((maxIter - iter) / (double)maxIter) + fixedLearningRate;408 };409 Func<int, int> getX = (neuron) => neuron % somSize;410 Func<int, int> getY = (neuron) => neuron / somSize;411 Func<int, int, int, int, double> neighborhood = (maxIter, iter, k, bmu) => {412 var sigma = 1.0 * ((maxIter - iter) / (double)maxIter) + 0.0001;413 var xK = getX(k);414 var yK = getY(k);415 var xW = getX(bmu);416 var yW = getY(bmu);417 var d = (xK - xW) * (xK - xW) + (yK - yW) * (yK - yW);418 return Math.Exp(-d / (2.0 * sigma * sigma));419 };420 var alphas = Enumerable.Range(0, K).Select(k => Enumerable.Range(0, N).Select(_ => random.NextDouble()).ToArray()).ToArray();421 // normalize s.t. sum(alphas[k]) = 1422 for (var k = 0; k < K; k++) {423 var sum = alphas[k].Sum();424 for (var i = 0; i < alphas[k].Length; i++) alphas[k][i] /= sum;425 }426 var oldAlphas = alphas.Select(x => (double[])x.Clone()).ToArray();427 428 for (var iter = 0; iter < iterations; iter++) {429 var pointShuffle = Enumerable.Range(0, N).Shuffle(random).ToArray();430 for (var p = 0; p < N; p++) {431 var i = pointShuffle[p];432 var bmu = GetBestMatchingUnit(dissimilarities, alphas, i);433 434 for (var k = 0; k < K; k++) {435 for (var j = 0; j < N; j++) {436 alphas[k][j] = oldAlphas[k][j] + learningRateT(iterations, iter) * neighborhood(iterations, iter, k, bmu) * ((i == j ? 1.0 : 0.0) - oldAlphas[k][j]);437 }438 }439 }440 for (var k = 0; k < K; k++) {441 for (var j = 0; j < N; j++) {442 oldAlphas[k][j] = alphas[k][j];443 }444 }445 }446 447 var result = new DoubleMatrix(N, 2);448 for (var i = 0; i < N; i++) {449 var bmu = GetBestMatchingUnit(dissimilarities, alphas, i);450 if (!jittering) {451 result[i, 0] = getX(bmu);452 result[i, 1] = getY(bmu);453 } else {454 result[i, 0] = getX(bmu) + random.NextDouble() * 0.8;455 result[i, 1] = getY(bmu) + random.NextDouble() * 0.8;456 }457 }458 return result;459 }460 461 private int GetBestMatchingUnit(DoubleMatrix D, double[][] alphas, int i) {462 var bmu = -1;463 var minV = double.MaxValue;464 for (var k = 0; k < alphas.Length; k++) {465 var Daki = 0.0;466 var akDak = 0.0;467 for (var r = 0; r < D.Rows; r++) {468 var Dakr = 0.0;469 for (var s = 0; s < D.Rows; s++) {470 Dakr += D[r, s] * alphas[k][s];471 }472 if (r == i) Daki = Dakr;473 akDak += alphas[k][r] * Dakr;474 }475 var v = Daki - 0.5 * akDak;476 if (v < minV) {477 bmu = k;478 minV = v;479 }480 }481 return bmu;482 }483 #endregion484 485 private DoubleMatrix Mds(DoubleMatrix dissimilarities) {486 return MultidimensionalScaling.KruskalShepard(dissimilarities);487 }488 385 #endregion 489 386 }
Note: See TracChangeset
for help on using the changeset viewer.