using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Threading.Tasks; using HeuristicLab.Analysis.FitnessLandscape; using HeuristicLab.Problems.Instances.QAPLIB; using HeuristicLab.Problems.QuadraticAssignment; using HeuristicLab.Random; namespace ProblemInstanceIdentifier { class Program { static void Main(string[] args) { /*var classes = instances.Select(InstanceDescriptor.FromProblemOnly).GroupBy(x => x.Cls); foreach (var cls in classes.OrderBy(x => x.Key)) { Console.WriteLine("{0};{1}", cls.Key, cls.Count()); }*/ var dwFeatureSets = new Dictionary>() { { "SBF", new HashSet() { "Sharpness", "Bumpiness", "Flatness" } }, { "FRQ", new HashSet() { "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3" } }, { "SBF+FRQ", new HashSet() { "Sharpness", "Bumpiness", "Flatness", "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3" } }, { "RUG", new HashSet() { "AutoCorrelation1", "CorrelationLength" } }, { "IAL", new HashSet() { "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } }, { "RUG+IAL", new HashSet() { "AutoCorrelation1", "CorrelationLength", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } }, { "SBF+IAL", new HashSet() { "Sharpness", "Bumpiness", "Flatness", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } }, { "FRQ+IAL", new HashSet() { "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } }, { "SBF+FRQ+IAL", new HashSet() { "Sharpness", "Bumpiness", "Flatness", "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } }, { "SBF+FRQ+RUG+IAL", new HashSet() { "Sharpness", "Bumpiness", "Flatness", "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3", "AutoCorrelation1", "CorrelationLength", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } }, }; var dwTypes = new Dictionary { { "(rr)-dw", QAPDirectedWalk.WalkType.RandomRandom }, { "(rl)-dw", QAPDirectedWalk.WalkType.RandomLocal }, { "(ll)-dw", QAPDirectedWalk.WalkType.LocalLocal }, { "(li)-dw", QAPDirectedWalk.WalkType.LocalInverse } }; var dwPaths = new List { 1, 2, 5, 10, 20, 50, 100, 200, 500 }; var random = new Random(); using (var writer = File.CreateText("results.csv")) { writer.AutoFlush = true; var header = string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}\t{11}", "Rep", "FSet", "Type", "TrainEff", "TestEff", "ExCnt", "ExRnk", "ClsCnt", "ClsRnk", "TotCnt", "TrainEff2", "TestEff2"); writer.WriteLine(header); Console.WriteLine(header); foreach (var dim in new[] { 20, 30, 40, 50, 100 }) { foreach (var feat in dwFeatureSets) { foreach (var type in dwTypes) { for (var rep = 0; rep < 10; rep++) { var instances = GetSomeRandomInstances(dimension: dim, totalInstances: 20, seed: random.Next()); var explorers = from p in dwPaths select new PathRelinkingExplorer() { Features = feat.Value, Paths = p, Type = type.Value }; ExploreMatching(writer, instances, explorers.ToArray(), explorers.ToArray(), string.Format("{0}\t{1}\t{2}", rep, feat.Key, type.Key)); } } } } } //var training = GenerateData(instances, rrDwExplorer, parallel: true); //var standardizer = InstancesStandardizer.CreateAndApply(training); //var test = GenerateData(instances, rrDwExplorer, parallel: true); //standardizer.Apply(test); //PrintMatchResult(Compare(training, test)); //Console.WriteLine("=== Path Relinking Walk ==="); //ExploreMatching(instances, //new[] { 1, 5, 10, 20, 50, 100, 200 }.Select(x => new PathRelinkingExplorer() { LocalOptima = false, Paths = x }).ToArray(), //new[] { 1, 5, 10, 20, 50, 100, 200 }.Select(x => new PathRelinkingExplorer() { LocalOptima = false, Paths = x }).ToArray(), //parallel: true); //Console.WriteLine("=== Random Walk ==="); //ExploreMatching(instances, //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new RandomWalkExplorer() { Iterations = x }).ToArray(), //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new RandomWalkExplorer() { Iterations = x }).ToArray(), //parallel: true); //Console.WriteLine("=== Adaptive Walk ==="); //ExploreMatching(instances, //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new AdaptiveWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(), //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new AdaptiveWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(), //parallel: true); //Console.WriteLine("=== Up/Down Walk ==="); //ExploreMatching(instances, //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new UpDownWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(), //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new UpDownWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(), //parallel: true); } private static List GetSomeRandomInstances(int dimension, int totalInstances, int seed) { var sync = new object(); var provider = new OneSizeInstanceProvider(dimension); var instances = new List(); var random = new FastRandom(seed); Parallel.ForEach(provider.GetDataDescriptors().Shuffle(random), desc => { var qapData = provider.LoadData(desc); if (instances.Count >= totalInstances) return; var qap = new QuadraticAssignmentProblem(); qap.Load(qapData); lock (sync) { if (instances.Count >= totalInstances) return; instances.Add(qap); } }); return instances; } private static List GenerateData(List instances, InstanceExplorer explorer, bool parallel = false) { var sync = new object(); var data = new List(); Action body = (qap) => { var instance = explorer.Explore(qap); if (instance == null) return; lock (sync) { data.Add(instance); } }; if (parallel) { Parallel.ForEach(instances.Select(x => (QuadraticAssignmentProblem)x.Clone()), body); } else { foreach (var qap in instances) body(qap); } return data; } private static MatchResult Compare(List training, List test) { int exactCount = 0, clsCount = 0, totalCount = 0; int exactRank = 0, clsRank = 0; foreach (var e in test) { var ordered = training.OrderBy(x => x.CalculateSimilarity(e)).ToList(); var bestMatch = ordered.First(); if (bestMatch.Cls == e.Cls) clsCount++; if (bestMatch.Name == e.Name) exactCount++; var r = ordered.FindIndex((id) => id.Name == e.Name); if (r < 0) continue; totalCount++; exactRank += r + 1; clsRank += ordered.FindIndex((id) => id.Cls == e.Cls) + 1; } return new MatchResult() { ExactCount = exactCount, ClsCount = clsCount, TotalCount = totalCount, ExactAverageRank = exactRank / (double)totalCount, ClsAverageRank = clsRank / (double)totalCount, TrainingDescriptionEffort = training.Average(x => x.DescriptionEffort), TestDescriptionEffort = test.Average(x => x.DescriptionEffort) }; } private static void PrintData(List instances) { using (var iter = instances.GetEnumerator()) { if (!iter.MoveNext()) return; Console.WriteLine(string.Join(";", new[] {"Name", "Cls", "Dimension", "Effort"} .Concat(iter.Current != null ? iter.Current.FeatureNames : new [] { "(null)" }))); do { PrintInstanceLine(iter.Current); } while (iter.MoveNext()); } } private static void PrintInstanceLine(InstanceDescriptor instance) { Console.WriteLine(string.Join(";", new[] {instance.Name, instance.Cls, instance.Dimension.ToString(CultureInfo.CurrentCulture), instance.DescriptionEffort.ToString(CultureInfo.CurrentCulture)} .Concat(instance.FeatureValues.Select(x => x.ToString(CultureInfo.CurrentCulture))))); } private static void ExploreMatching(StreamWriter writer, List instances, InstanceExplorer[] trainingExplorers, InstanceExplorer[] testExporers, string info, bool parallel = false) { var sync = new object(); var knowledgeBase = new Dictionary>>(); Action trainingBody = (kbExplorer) => { var trainingData = GenerateData(instances, kbExplorer, parallel); var standardizer = InstancesStandardizer.Create(trainingData); standardizer.Apply(trainingData); lock (sync) { knowledgeBase.Add(kbExplorer, Tuple.Create(standardizer, trainingData)); } }; if (parallel) { Parallel.ForEach(trainingExplorers, trainingBody); } else { foreach (var kbExplorer in trainingExplorers) trainingBody(kbExplorer); } var experimentBase = new Dictionary>(); Action testBody = (expExplorer) => { var testData = GenerateData(instances, expExplorer, parallel); lock (sync) { experimentBase.Add(expExplorer, testData); } }; if (parallel) { Parallel.ForEach(testExporers, testBody); } else { foreach (var expExplorer in testExporers) testBody(expExplorer); } var data = from kb in knowledgeBase from exp in experimentBase select new { Training = kb, Test = exp }; if (parallel) { Parallel.ForEach(data, (point) => { var normalizedTest = point.Test.Value.Select(x => new InstanceDescriptor(x)).ToList(); point.Training.Value.Item1.Apply(normalizedTest); var result = Compare(point.Training.Value.Item2, normalizedTest); lock (sync) { string output = string.Format("{0}\t{1}\t{2}\t{3:F2}\t{4}\t{5:F2}\t{6}\t{7}\t{8}\t{9}", info, point.Training.Key.Effort, point.Test.Key.Effort, result.ExactCount, result.ExactAverageRank, result.ClsCount, result.ClsAverageRank, result.TotalCount, result.TrainingDescriptionEffort, result.TestDescriptionEffort); writer.WriteLine(output); Console.WriteLine(output); } }); } else { foreach (var point in data) { var normalizedTest = point.Test.Value.Select(x => new InstanceDescriptor(x)).ToList(); point.Training.Value.Item1.Apply(normalizedTest); var result = Compare(point.Training.Value.Item2, normalizedTest); string output = string.Format("{0}\t{1}\t{2}\t{3:F2}\t{4}\t{5:F2}\t{6}\t{7}\t{8}\t{9}", info, point.Training.Key.Effort, point.Test.Key.Effort, result.ExactCount, result.ExactAverageRank, result.ClsCount, result.ClsAverageRank, result.TotalCount, result.TrainingDescriptionEffort, result.TestDescriptionEffort); writer.WriteLine(output); Console.WriteLine(output); } } } private class MatchResult { public int ExactCount { get; set; } public int ClsCount { get; set; } public int TotalCount { get; set; } public double ExactAverageRank { get; set; } public double ClsAverageRank { get; set; } public double TrainingDescriptionEffort { get; set; } public double TestDescriptionEffort { get; set; } } } }