Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2457_ExpertSystem/ProblemInstanceIdentifier/Program.cs @ 17687

Last change on this file since 17687 was 16955, checked in by abeham, 5 years ago

#2457: worked on thesis

File size: 13.1 KB
Line 
1using System;
2using System.Collections.Generic;
3using System.Globalization;
4using System.IO;
5using System.Linq;
6using System.Threading.Tasks;
7using HeuristicLab.Analysis.FitnessLandscape;
8using HeuristicLab.Problems.Instances.QAPLIB;
9using HeuristicLab.Problems.QuadraticAssignment;
10using HeuristicLab.Random;
11
12namespace ProblemInstanceIdentifier {
13  class Program {
14    static void Main(string[] args) {
15
16      /*var classes = instances.Select(InstanceDescriptor.FromProblemOnly).GroupBy(x => x.Cls);
17      foreach (var cls in classes.OrderBy(x => x.Key)) {
18        Console.WriteLine("{0};{1}", cls.Key, cls.Count());
19      }*/
20
21      var dwFeatureSets = new Dictionary<string, HashSet<string>>() {
22        { "SBF", new HashSet<string>() { "Sharpness", "Bumpiness", "Flatness" } },
23        { "FRQ", new HashSet<string>() { "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3" } },
24        { "SBF+FRQ", new HashSet<string>() { "Sharpness", "Bumpiness", "Flatness", "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3" } },
25        { "RUG", new HashSet<string>() { "AutoCorrelation1", "CorrelationLength" } },
26        { "IAL", new HashSet<string>() { "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } },
27        { "RUG+IAL", new HashSet<string>() { "AutoCorrelation1", "CorrelationLength", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } },
28        { "SBF+IAL", new HashSet<string>() { "Sharpness", "Bumpiness", "Flatness", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } },
29        { "FRQ+IAL", new HashSet<string>() { "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } },
30        { "SBF+FRQ+IAL", new HashSet<string>() { "Sharpness", "Bumpiness", "Flatness", "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } },
31        { "SBF+FRQ+RUG+IAL", new HashSet<string>() { "Sharpness", "Bumpiness", "Flatness", "DownQ2", "NeutQ2", "UpQ2", "DownQ1", "NeutQ1", "UpQ1", "DownQ3", "NeutQ3", "UpQ3", "AutoCorrelation1", "CorrelationLength", "InformationContent", "DensityBasinInformation", "PartialInformationContent", "Regularity", "TotalEntropy", "PeakInformationContent", "PeakDensityBasinInformation" } },
32      };
33
34      var dwTypes = new Dictionary<string, QAPDirectedWalk.WalkType> {
35        { "(rr)-dw", QAPDirectedWalk.WalkType.RandomRandom },
36        { "(rl)-dw", QAPDirectedWalk.WalkType.RandomLocal },
37        { "(ll)-dw", QAPDirectedWalk.WalkType.LocalLocal },
38        { "(li)-dw", QAPDirectedWalk.WalkType.LocalInverse }
39      };
40
41      var dwPaths = new List<int> { 1, 2, 5, 10, 20, 50, 100, 200, 500 };
42
43      var random = new Random();
44      using (var writer = File.CreateText("results.csv")) {
45        writer.AutoFlush = true;
46        var header = string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}\t{11}\t{12}",
47          "Rep", "Dimension", "FSet", "Type", "TrainEff", "TestEff", "ExCnt", "ExRnk", "ClsCnt", "ClsRnk", "TotCnt", "TrainEff2", "TestEff2");
48        writer.WriteLine(header);
49        Console.WriteLine(header);
50        foreach (var dim in new[] { 20, 30, 40 }) {
51          foreach (var feat in dwFeatureSets) {
52            foreach (var type in dwTypes) {
53              for (var rep = 0; rep < 10; rep++) {
54                var instances = GetSomeRandomInstances(dimension: dim, totalInstances: 20, seed: random.Next());
55
56                var explorers = from p in dwPaths
57                                select new PathRelinkingExplorer() { Features = feat.Value, Paths = p, Type = type.Value, BestImprovement = false };
58                ExploreMatching(writer, instances, explorers.ToArray(), explorers.ToArray(), string.Format("{0}\t{1}\t{2}\t{3}", rep, dim, feat.Key, type.Key));
59              }
60            }
61          }
62        }
63      }
64
65
66      //var training = GenerateData(instances, rrDwExplorer, parallel: true);
67      //var standardizer = InstancesStandardizer.CreateAndApply(training);
68      //var test = GenerateData(instances, rrDwExplorer, parallel: true);
69      //standardizer.Apply(test);
70      //PrintMatchResult(Compare(training, test));
71
72      //Console.WriteLine("=== Path Relinking Walk ===");
73      //ExploreMatching(instances,
74      //new[] { 1, 5, 10, 20, 50, 100, 200 }.Select(x => new PathRelinkingExplorer() { LocalOptima = false, Paths = x }).ToArray(),
75      //new[] { 1, 5, 10, 20, 50, 100, 200 }.Select(x => new PathRelinkingExplorer() { LocalOptima = false, Paths = x }).ToArray(),
76      //parallel: true);
77      //Console.WriteLine("=== Random Walk ===");
78      //ExploreMatching(instances,
79      //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new RandomWalkExplorer() { Iterations = x }).ToArray(),
80      //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new RandomWalkExplorer() { Iterations = x }).ToArray(),
81      //parallel: true);
82      //Console.WriteLine("=== Adaptive Walk ===");
83      //ExploreMatching(instances,
84      //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new AdaptiveWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(),
85      //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new AdaptiveWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(),
86      //parallel: true);
87      //Console.WriteLine("=== Up/Down Walk ===");
88      //ExploreMatching(instances,
89      //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new UpDownWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(),
90      //new[] { 300, 1500, 3000, 6000, 15000, 30000, 60000 }.Select(x => new UpDownWalkExplorer() { Iterations = x / 10, SampleSize = 10 }).ToArray(),
91      //parallel: true);
92    }
93
94    private static List<QuadraticAssignmentProblem> GetSomeRandomInstances(int dimension, int totalInstances, int seed) {
95      var sync = new object();
96      var provider = new OneSizeInstanceProvider(dimension);
97      var instances = new List<QuadraticAssignmentProblem>();
98      var random = new FastRandom(seed);
99      Parallel.ForEach(provider.GetDataDescriptors().Shuffle(random), desc => {
100        var qapData = provider.LoadData(desc);
101        if (instances.Count >= totalInstances) return;
102        var qap = new QuadraticAssignmentProblem();
103        qap.Load(qapData);
104        lock (sync) {
105          if (instances.Count >= totalInstances) return;
106          instances.Add(qap);
107        }
108      });
109      return instances;
110    }
111
112    private static List<InstanceDescriptor> GenerateData(List<QuadraticAssignmentProblem> instances, InstanceExplorer explorer, bool parallel = false) {
113      var sync = new object();
114      var data = new List<InstanceDescriptor>();
115      Action<QuadraticAssignmentProblem> body = (qap) => {
116        var instance = explorer.Explore(qap);
117        if (instance == null) return;
118        lock (sync) {
119          data.Add(instance);
120        }
121      };
122      if (parallel) {
123        Parallel.ForEach(instances.Select(x => (QuadraticAssignmentProblem)x.Clone()), body);
124      } else {
125        foreach (var qap in instances) body(qap);
126      }
127      return data;
128    }
129
130    private static MatchResult Compare(List<InstanceDescriptor> training, List<InstanceDescriptor> test) {
131      int exactCount = 0, clsCount = 0, totalCount = 0;
132      int exactRank = 0, clsRank = 0;
133      foreach (var e in test) {
134        var ordered = training.OrderBy(x => x.CalculateSimilarity(e)).ToList();
135        var bestMatch = ordered.First();
136        if (bestMatch.Cls == e.Cls) clsCount++;
137        if (bestMatch.Name == e.Name) exactCount++;
138        var r = ordered.FindIndex((id) => id.Name == e.Name);
139        if (r < 0) continue;
140        totalCount++;
141        exactRank += r + 1;
142        clsRank += ordered.FindIndex((id) => id.Cls == e.Cls) + 1;
143      }
144
145      return new MatchResult() {
146        ExactCount = exactCount,
147        ClsCount = clsCount,
148        TotalCount = totalCount,
149        ExactAverageRank = exactRank / (double)totalCount,
150        ClsAverageRank = clsRank / (double)totalCount,
151        TrainingDescriptionEffort = training.Average(x => x.DescriptionEffort),
152        TestDescriptionEffort = test.Average(x => x.DescriptionEffort)
153      };
154    }
155
156    private static void PrintData(List<InstanceDescriptor> instances) {
157      using (var iter = instances.GetEnumerator()) {
158        if (!iter.MoveNext()) return;
159        Console.WriteLine(string.Join(";", new[] {"Name", "Cls", "Dimension", "Effort"}
160          .Concat(iter.Current != null ? iter.Current.FeatureNames : new [] { "(null)" })));
161        do {
162          PrintInstanceLine(iter.Current);
163        } while (iter.MoveNext());
164      }
165    }
166
167    private static void PrintInstanceLine(InstanceDescriptor instance) {
168      Console.WriteLine(string.Join(";",
169        new[] {instance.Name, instance.Cls, instance.Dimension.ToString(CultureInfo.CurrentCulture), instance.DescriptionEffort.ToString(CultureInfo.CurrentCulture)}
170          .Concat(instance.FeatureValues.Select(x => x.ToString(CultureInfo.CurrentCulture)))));
171    }
172
173    private static void ExploreMatching(StreamWriter writer, List<QuadraticAssignmentProblem> instances, InstanceExplorer[] trainingExplorers, InstanceExplorer[] testExporers, string info, bool parallel = false) {
174      var sync = new object();
175
176      var knowledgeBase = new Dictionary<InstanceExplorer, Tuple<InstancesStandardizer, List<InstanceDescriptor>>>();
177      Action<InstanceExplorer> trainingBody = (kbExplorer) => {
178        var trainingData = GenerateData(instances, kbExplorer, parallel);
179        var standardizer = InstancesStandardizer.Create(trainingData);
180        standardizer.Apply(trainingData);
181        lock (sync) {
182          knowledgeBase.Add(kbExplorer, Tuple.Create(standardizer, trainingData));
183        }
184      };
185      if (parallel) {
186        Parallel.ForEach(trainingExplorers, trainingBody);
187      } else {
188        foreach (var kbExplorer in trainingExplorers) trainingBody(kbExplorer);
189      }
190
191      var experimentBase = new Dictionary<InstanceExplorer, List<InstanceDescriptor>>();
192      Action<InstanceExplorer> testBody = (expExplorer) => {
193        var testData = GenerateData(instances, expExplorer, parallel);
194        lock (sync) {
195          experimentBase.Add(expExplorer, testData);
196        }
197      };
198      if (parallel) {
199        Parallel.ForEach(testExporers, testBody);
200      } else {
201        foreach (var expExplorer in testExporers) testBody(expExplorer);
202      }
203
204      var data = from kb in knowledgeBase
205        from exp in experimentBase
206        select new { Training = kb, Test = exp };
207
208      if (parallel) {
209        Parallel.ForEach(data, (point) => {
210          var normalizedTest = point.Test.Value.Select(x => new InstanceDescriptor(x)).ToList();
211          point.Training.Value.Item1.Apply(normalizedTest);
212          var result = Compare(point.Training.Value.Item2, normalizedTest);
213          lock (sync) {
214            string output = string.Format("{0}\t{1}\t{2}\t{3:F2}\t{4}\t{5:F2}\t{6}\t{7}\t{8}\t{9}",
215              info, point.Training.Key.Effort, point.Test.Key.Effort, result.ExactCount,
216              result.ExactAverageRank, result.ClsCount, result.ClsAverageRank, result.TotalCount,
217              result.TrainingDescriptionEffort, result.TestDescriptionEffort);
218            writer.WriteLine(output);
219            Console.WriteLine(output);
220          }
221        });
222      } else {
223        foreach (var point in data) {
224          var normalizedTest = point.Test.Value.Select(x => new InstanceDescriptor(x)).ToList();
225          point.Training.Value.Item1.Apply(normalizedTest);
226          var result = Compare(point.Training.Value.Item2, normalizedTest);
227          string output = string.Format("{0}\t{1}\t{2}\t{3:F2}\t{4}\t{5:F2}\t{6}\t{7}\t{8}\t{9}",
228            info, point.Training.Key.Effort, point.Test.Key.Effort, result.ExactCount,
229            result.ExactAverageRank, result.ClsCount, result.ClsAverageRank, result.TotalCount,
230            result.TrainingDescriptionEffort, result.TestDescriptionEffort);
231          writer.WriteLine(output);
232          Console.WriteLine(output);
233        }
234      }
235    }
236
237    private class MatchResult {
238      public int ExactCount { get; set; }
239      public int ClsCount { get; set; }
240      public int TotalCount { get; set; }
241      public double ExactAverageRank { get; set; }
242      public double ClsAverageRank { get; set; }
243
244      public double TrainingDescriptionEffort { get; set; }
245      public double TestDescriptionEffort { get; set; }
246    }
247  }
248}
Note: See TracBrowser for help on using the repository browser.