#region License Information
/* HeuristicLab
* Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using HeuristicLab.Random;
using ProtoBuf;
using static HeuristicLab.Analysis.FitnessLandscape.QAPDirectedWalk;
namespace WalkExporter {
class Program {
public static readonly string[] SBF = new[] { "Sharpness", "Bumpiness", "Flatness" };
public static readonly string[] RUG = new[] {
"AC1", "CorrelationLength" };
public static readonly string[] IAL = new[] {
"InformationContent", "DensityBasinInformation", "PartialInformationContent",
"InformationStability", "Diversity", "Regularity", "TotalEntropy", "SymmetricInformationContent",
"SymmetricDensityBasinInformation", "SymmetricTotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation",
"PeakTotalEntropy", "PeakSymmetricInformationContent", "PeakSymmetricDensityBasinInformation", "PeakSymmetricTotalEntropy" };
public static readonly string[] IALREG = new[] {
"InformationContent", "DensityBasinInformation", "PartialInformationContent",
"InformationStability", "Diversity", "Regularity", "TotalEntropy",
"PeakInformationContent", "PeakDensityBasinInformation", "PeakTotalEntropy"
};
public static readonly string[] IALSYM = new[] {
"PartialInformationContent", "InformationStability", "Diversity", "Regularity",
"SymmetricInformationContent", "SymmetricDensityBasinInformation", "SymmetricTotalEntropy",
"PeakSymmetricInformationContent", "PeakSymmetricDensityBasinInformation", "PeakSymmetricTotalEntropy"
};
static void Main(string[] args) {
//AnalyzeRandomWalkIdentification();
//AnalyzeDirectedWalkIdentification();
var provider = new HeuristicLab.Problems.Instances.QAPLIB.QAPLIBInstanceProvider();
var tai30a = provider.LoadData(provider.GetDataDescriptors().Single(x => x.Name == "tai30a"));
RandomWalk.ConfinedRandomWalkAnalysis(tai30a);
var esc32f = provider.LoadData(provider.GetDataDescriptors().Single(x => x.Name == "esc32f"));
RandomWalk.ConfinedRandomWalkAnalysis(esc32f);
}
private static void AnalyzeRandomWalkIdentification() {
string[] RUG_IAL = RUG.Concat(IAL).ToArray();
var trainFiles = GetFiles(@"randwalk_kb_train_(?\d+)");
var testFiles = GetFiles(@"randwalk_kb_test_(?\d+)");
var filename = "randwalk_results.csv";
var features = new[] { (Name: "RUG", Set: RUG), (Name: "IAL", Set: IAL),
(Name: "IALREG", Set: IALREG), (Name: "IALSYM", Set: IALSYM), (Name: "RUG_IAL", Set: RUG_IAL) };
using (var writer = File.CreateText(filename)) {
CompareMatching(trainFiles, testFiles, features, "randwalk", writer);
}
}
private static void AnalyzeDirectedWalkIdentification() {
string[] RUG_IAL = RUG.Concat(IAL).ToArray();
string[] SBF_RUG = SBF.Concat(RUG).ToArray();
string[] SBF_IAL = SBF.Concat(IAL).ToArray();
string[] SBF_IALREG = SBF.Concat(IALREG).ToArray();
string[] SBF_IALSYM = SBF.Concat(IALSYM).ToArray();
var features = new[] { (Name: "RUG", Set: RUG), (Name: "IAL", Set: IAL),
(Name: "IALREG", Set: IALREG), (Name: "IALSYM", Set: IALSYM), (Name: "SBF", Set: SBF),
(Name: "RUG_IAL", Set: RUG_IAL), (Name: "SBF_RUG", Set: SBF_RUG), (Name: "SBF_IAL", Set: SBF_IAL),
(Name: "SBF_IALREG", Set: SBF_IALREG), (Name: "SBF_IALSYM", Set: SBF_IALSYM) };
var trainFiles = GetFiles(@"rrdw_best_kb_train_(?\d+)_qap");
var testFiles = GetFiles(@"rrdw_best_kb_test_(?\d+)_qap");
var filename = "rrdw_best_results.csv";
//using (var writer = File.CreateText(filename)) {
// CompareMatching(trainFiles, testFiles, features, "(rr)-dw", writer);
//}
//trainFiles = GetFiles(@"rldw_best_kb_train_(?\d+)_qap");
//testFiles = GetFiles(@"rldw_best_kb_test_(?\d+)_qap");
//filename = "rldw_best_results.csv";
//using (var writer = File.CreateText(filename)) {
// CompareMatching(trainFiles, testFiles, features, "(rl)-dw", writer);
//}
trainFiles = GetFiles(@"lldw_best_kb_train_(?\d+)_qap");
testFiles = GetFiles(@"lldw_best_kb_test_(?\d+)_qap");
filename = "lldw_best_results.csv";
using (var writer = File.CreateText(filename)) {
CompareMatching(trainFiles, testFiles, features, "(ll)-dw", writer);
}
trainFiles = GetFiles(@"lidw_best_kb_train_(?\d+)_qap");
testFiles = GetFiles(@"lidw_best_kb_test_(?\d+)_qap");
filename = "lidw_best_results.csv";
using (var writer = File.CreateText(filename)) {
CompareMatching(trainFiles, testFiles, features, "(li)-dw", writer);
}
}
private static List<(string Filename, int Effort)> GetFiles(string pattern) {
// randwalk_kb_(train|test)_{n}.buf
// {type}dw_best_kb_(train|test)_{n}_qap.buf
return Directory.EnumerateFiles(".").Where(x => x.EndsWith(".buf"))
.Select(x => {
var match = Regex.Match(Path.GetFileName(x), pattern);
if (match.Success) {
return (Filename: x, Effort: int.Parse(match.Groups["eff"].Value));
}
return (Filename: "", Effort: -1);
}).Where(x => !string.IsNullOrEmpty(x.Filename)).ToList();
}
private static void CompareMatching(List<(string Filename, int Effort)> trainFiles,
List<(string Filename, int Effort)> testFiles, (string Name, string[] Set)[] featuresets,
string type,
StreamWriter writer) {
var random = new MersenneTwister(42);
var header = string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}\t{11}",
"Dimension", "FSet", "Type", "TrainEff", "TestEff", "ExCnt", "ExRnk", "ClsCnt", "ClsRnk", "TotCnt", "TrainEffSolEquiv", "TestEffSolEquiv");
writer.WriteLine(header);
Console.WriteLine(header);
foreach (var features in featuresets) {
foreach (var dim in new[] { 20, 30, 40 }) {
foreach (var a in trainFiles) {
Knowledgebase train = null;
using (var stream = File.OpenRead(a.Filename))
train = Serializer.Deserialize(stream);
train.Problems.RemoveAll(x => x.Dimension != dim);
if (train.Problems.Count == 0) throw new InvalidOperationException("Dimension does not exist: " + dim);
var standardizer = InstancesStandardizer.CreateAndApply(train.Problems, features.Set);
foreach (var b in testFiles) {
Knowledgebase test = null;
using (var stream = File.OpenRead(b.Filename))
test = Serializer.Deserialize(stream);
test.Problems.RemoveAll(x => x.Dimension != dim);
standardizer.Apply(test.Problems);
// MATCH
var match = EvaluateMatch(random, train, test, new HashSet(features.Set));
//correlation analysis
//var corr = AnalyzeFeatureCorrelation(features.Set, train, test);
string output = string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6:F2}\t{7}\t{8:F2}\t{9}\t{10:F2}\t{11:F2}",
dim, features.Name, type, a.Effort, b.Effort, match.ExactCount,
match.ExactAverageRank, match.ClsCount, match.ClsAverageRank, match.TotalCount,
match.TrainingDescriptionEffort, match.TestDescriptionEffort);
writer.WriteLine(output);
Console.WriteLine(output);
}
}
}
}
}
private static MatchResult EvaluateMatch(MersenneTwister random, Knowledgebase train, Knowledgebase test, ISet features) {
var result = new MatchResult();
foreach (var x in train.Problems) {
var ranked = test.Problems.Shuffle(random).Select(y => new {
Instance = y,
Distance = (from xx in x.Features.Where(f => features.Contains(f.Key))
from yy in y.Features.Where(f => features.Contains(f.Key))
where xx.Key == yy.Key
let vxx = xx.GetNumericValue()
let vyy = yy.GetNumericValue()
select (vxx - vyy) * (vxx - vyy)).Sum(),
}).OrderBy(xx => xx.Distance).ToList();
var exactRank = -1;
var clsRank = -1;
var count = 1;
foreach (var r in ranked) {
result.TestDescriptionEffort += r.Instance.DescriptionEffort;
if (clsRank < 0 && r.Instance.Class == x.Class) {
clsRank = count;
}
if (r.Instance.Name == x.Name) {
exactRank = count;
break;
}
count++;
}
result.TestDescriptionEffort /= test.Problems.Count;
if (exactRank == 1) result.ExactCount++;
if (clsRank == 1) result.ClsCount++;
result.TotalCount++;
result.TrainingDescriptionEffort += x.DescriptionEffort;
result.ExactAverageRank += exactRank;
result.ClsAverageRank += clsRank;
}
result.TrainingDescriptionEffort /= train.Problems.Count;
result.ExactAverageRank /= train.Problems.Count;
result.ClsAverageRank /= train.Problems.Count;
return result;
}
private static double[,] AnalyzeFeatureCorrelation(string[] features, Knowledgebase train, Knowledgebase test) {
var trainMat = new double[train.Problems.Count, features.Length];
var testMat = new double[test.Problems.Count, features.Length];
int trainCount = 0, testCount = 0;
foreach (var x in train.Problems) {
var xFeatures = x.GetNumericFeatures(features);
foreach (var f in xFeatures.Select((v, i) => new { Index = i, Value = v })) {
trainMat[trainCount, f.Index] = f.Value;
}
trainCount++;
}
foreach (var y in test.Problems) {
var yFeatures = y.GetNumericFeatures(features);
foreach (var f in yFeatures.Select((v, i) => new { Index = i, Value = v })) {
testMat[testCount, f.Index] = f.Value;
}
testCount++;
}
double[,] corr;
alglib.pearsoncorrm2(trainMat, testMat, out corr);
return corr;
}
private static void DoRandomWalk() {
var experiment = RandomWalk.PerformExperiment();
Serializer.Serialize(File.Create("randwalk_trials_qap.buf"), experiment);
foreach (var exp in Enumerable.Range(7, 18 - 6)) {
var len = (int)Math.Pow(2, exp);
var (training, test) = RandomWalk.GetKnowledgeBases(experiment, len);
Serializer.Serialize(File.Create($"randwalk_kb_train_{exp}.buf"), training);
Serializer.Serialize(File.Create($"randwalk_kb_test_{exp}.buf"), test);
}
}
private static void DoDirectedWalk() {
var (exp, train, test) = DirectedWalk.PerformExperiment(WalkType.RandomRandom);
Save("rrdw_best", exp, train, test);
(exp, train, test) = DirectedWalk.PerformExperiment(WalkType.RandomLocal);
Save("rldw_best", exp, train, test);
(exp, train, test) = DirectedWalk.PerformExperiment(WalkType.LocalLocal);
Save("lldw_best", exp, train, test);
(exp, train, test) = DirectedWalk.PerformExperiment(WalkType.LocalInverse);
Save("lidw_best", exp, train, test);
}
private static void Save(string v, Experiment exp, Dictionary train, Dictionary test) {
Serializer.Serialize(File.Create(v + "_experiment_qap.buf"), exp);
foreach (var t in train.Keys) {
Serializer.Serialize(File.Create(v + $"_kb_train_{t}_qap.buf"), train[t]);
Serializer.Serialize(File.Create(v + $"_kb_test_{t}_qap.buf"), test[t]);
}
}
}
public class MatchResult {
public int ExactCount { get; set; }
public int ClsCount { get; set; }
public int TotalCount { get; set; }
public double ExactAverageRank { get; set; }
public double ClsAverageRank { get; set; }
public double TrainingDescriptionEffort { get; set; }
public double TestDescriptionEffort { get; set; }
}
}