Changeset 14563 for branches/MemPRAlgorithm
- Timestamp:
- 01/13/17 18:18:37 (8 years ago)
- Location:
- branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/Binary/BinaryMemPR.cs
r14557 r14563 45 45 foreach (var trainer in ApplicationManager.Manager.GetInstances<ISolutionModelTrainer<BinaryMemPRPopulationContext>>()) 46 46 SolutionModelTrainerParameter.ValidValues.Add(trainer); 47 47 48 if (SolutionModelTrainerParameter.ValidValues.Count > 0) { 49 var unbiased = SolutionModelTrainerParameter.ValidValues.FirstOrDefault(x => !x.Bias); 50 if (unbiased != null) SolutionModelTrainerParameter.Value = unbiased; 51 } 52 48 53 foreach (var localSearch in ApplicationManager.Manager.GetInstances<ILocalSearch<BinaryMemPRSolutionContext>>()) { 49 54 LocalSearchParameter.ValidValues.Add(localSearch); … … 156 161 cache.Add(p2.Solution); 157 162 158 var cacheHits = 0;163 var cacheHits = new Dictionary<int, int>() { { 0, 0 }, { 1, 0 }, { 2, 0 } }; 159 164 ISingleObjectiveSolutionScope<BinaryVector> offspring = null; 165 160 166 while (evaluations < N) { 161 167 BinaryVector c = null; 162 var xochoice = Context.Random.Next(3);168 var xochoice = cacheHits.SampleRandom(Context.Random).Key; 163 169 switch (xochoice) { 164 170 case 0: c = NPointCrossover.Apply(Context.Random, p1.Solution, p2.Solution, new IntValue(1)); break; … … 167 173 } 168 174 if (cache.Contains(c)) { 169 cacheHits++; 170 if (cacheHits > 50) break; 175 cacheHits[xochoice]++; 176 if (cacheHits[xochoice] > 10) { 177 cacheHits.Remove(xochoice); 178 if (cacheHits.Count == 0) break; 179 } 171 180 continue; 172 181 } -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/Binary/SolutionModel/Univariate/BiasedModelTrainer.cs
r14552 r14563 36 36 where TContext : IPopulationBasedHeuristicAlgorithmContext<ISingleObjectiveHeuristicOptimizationProblem, BinaryVector>, ISolutionModelContext<BinaryVector> { 37 37 38 public bool Bias { get { return true; } } 39 38 40 [Storable] 39 41 private IValueParameter<EnumValue<ModelBiasOptions>> modelBiasParameter; -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/Binary/SolutionModel/Univariate/UnbiasedModelTrainer.cs
r14552 r14563 33 33 public class UniasedModelTrainer<TContext> : NamedItem, ISolutionModelTrainer<TContext> 34 34 where TContext : IPopulationBasedHeuristicAlgorithmContext<ISingleObjectiveHeuristicOptimizationProblem, BinaryVector>, ISolutionModelContext<BinaryVector> { 35 35 36 public bool Bias { get { return false; } } 37 36 38 [StorableConstructor] 37 39 protected UniasedModelTrainer(bool deserializing) : base(deserializing) { } -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/Interfaces/Interfaces.cs
r14552 r14563 50 50 51 51 public interface ISolutionModelTrainer<TContext> : IItem { 52 bool Bias { get; } 52 53 void TrainModel(TContext context); 53 54 } -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/LinearLinkage/LinearLinkageMemPR.cs
r14556 r14563 32 32 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 33 33 using HeuristicLab.PluginInfrastructure; 34 using HeuristicLab.Random; 34 35 35 36 namespace HeuristicLab.Algorithms.MemPR.Grouping { … … 44 45 foreach (var trainer in ApplicationManager.Manager.GetInstances<ISolutionModelTrainer<LinearLinkageMemPRPopulationContext>>()) 45 46 SolutionModelTrainerParameter.ValidValues.Add(trainer); 46 47 48 if (SolutionModelTrainerParameter.ValidValues.Count > 0) { 49 var unbiased = SolutionModelTrainerParameter.ValidValues.FirstOrDefault(x => !x.Bias); 50 if (unbiased != null) SolutionModelTrainerParameter.Value = unbiased; 51 } 52 47 53 foreach (var localSearch in ApplicationManager.Manager.GetInstances<ILocalSearch<LinearLinkageMemPRSolutionContext>>()) { 48 54 LocalSearchParameter.ValidValues.Add(localSearch); … … 245 251 cache.Add(p2.Solution); 246 252 247 var cache hits = 0;253 var cacheHits = new Dictionary<int, int>() { { 0, 0 }, { 1, 0 } }; 248 254 var evaluations = 0; 249 255 var probe = Context.ToScope((LinearLinkage)p1.Solution.Clone()); … … 251 257 while (evaluations < p1.Solution.Length) { 252 258 LinearLinkage c = null; 253 if (Context.Random.NextDouble() < 0.8) 254 c = GroupCrossover.Apply(Context.Random, p1.Solution, p2.Solution); 255 else c = SinglePointCrossover.Apply(Context.Random, p1.Solution, p2.Solution); 256 259 var xochoice = cacheHits.SampleRandom(Context.Random).Key; 260 switch (xochoice) { 261 case 0: c = GroupCrossover.Apply(Context.Random, p1.Solution, p2.Solution); break; 262 case 1: c = SinglePointCrossover.Apply(Context.Random, p1.Solution, p2.Solution); break; 263 } 257 264 if (cache.Contains(c)) { 258 cachehits++; 259 if (cachehits > 10) break; 265 cacheHits[xochoice]++; 266 if (cacheHits[xochoice] > 10) { 267 cacheHits.Remove(xochoice); 268 if (cacheHits.Count == 0) break; 269 } 260 270 continue; 261 271 } -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/LinearLinkage/SolutionModel/Univariate/UnbiasedModelTrainer.cs
r14552 r14563 33 33 public class UniasedModelTrainer<TContext> : NamedItem, ISolutionModelTrainer<TContext> 34 34 where TContext : IPopulationBasedHeuristicAlgorithmContext<ISingleObjectiveHeuristicOptimizationProblem, LinearLinkage>, ISolutionModelContext<LinearLinkage> { 35 35 36 public bool Bias { get { return false; } } 37 36 38 [StorableConstructor] 37 39 protected UniasedModelTrainer(bool deserializing) : base(deserializing) { } -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRAlgorithm.cs
r14562 r14563 138 138 [Storable] 139 139 private BestAverageWorstQualityAnalyzer qualityAnalyzer; 140 [Storable] 141 private QualityPerClockAnalyzer qualityPerClockAnalyzer; 142 [Storable] 143 private QualityPerEvaluationsAnalyzer qualityPerEvaluationsAnalyzer; 140 144 141 145 [StorableConstructor] … … 144 148 context = cloner.Clone(original.context); 145 149 qualityAnalyzer = cloner.Clone(original.qualityAnalyzer); 150 qualityPerClockAnalyzer = cloner.Clone(original.qualityPerClockAnalyzer); 151 qualityPerEvaluationsAnalyzer = cloner.Clone(original.qualityPerEvaluationsAnalyzer); 152 146 153 RegisterEventHandlers(); 147 154 } … … 150 157 Parameters.Add(new FixedValueParameter<IntValue>("MaximumPopulationSize", "The maximum size of the population that is evolved.", new IntValue(20))); 151 158 Parameters.Add(new OptionalValueParameter<IntValue>("MaximumEvaluations", "The maximum number of solution evaluations.")); 152 Parameters.Add(new OptionalValueParameter<TimeSpanValue>("MaximumExecutionTime", "The maximum runtime.", new TimeSpanValue(TimeSpan.FromMinutes(1 ))));159 Parameters.Add(new OptionalValueParameter<TimeSpanValue>("MaximumExecutionTime", "The maximum runtime.", new TimeSpanValue(TimeSpan.FromMinutes(10)))); 153 160 Parameters.Add(new OptionalValueParameter<DoubleValue>("TargetQuality", "The target quality at which the algorithm terminates.")); 154 161 Parameters.Add(new FixedValueParameter<BoolValue>("SetSeedRandomly", "Whether each run of the algorithm should be conducted with a new random seed.", new BoolValue(true))); … … 158 165 159 166 qualityAnalyzer = new BestAverageWorstQualityAnalyzer(); 167 qualityPerClockAnalyzer = new QualityPerClockAnalyzer(); 168 qualityPerEvaluationsAnalyzer = new QualityPerEvaluationsAnalyzer(); 169 160 170 RegisterEventHandlers(); 161 171 } … … 193 203 foreach (var param in analyzer.Parameters.OfType<IScopeTreeLookupParameter>()) 194 204 param.Depth = 1; 195 multiAnalyzer.Operators.Add(analyzer, analyzer.EnabledByDefault );205 multiAnalyzer.Operators.Add(analyzer, analyzer.EnabledByDefault || analyzer is ISimilarityBasedOperator); 196 206 } 197 207 } 198 208 multiAnalyzer.Operators.Add(qualityAnalyzer, qualityAnalyzer.EnabledByDefault); 209 multiAnalyzer.Operators.Add(qualityPerClockAnalyzer, true); 210 multiAnalyzer.Operators.Add(qualityPerEvaluationsAnalyzer, true); 199 211 } 200 212 } … … 300 312 301 313 if (!replaced) { 302 offspring = (ISingleObjectiveSolutionScope<TSolution>)Context.Population.SampleRandom(Context.Random).Clone();303 var before = offspring.Fitness;314 var before = Context.Population.SampleRandom(Context.Random); 315 offspring = (ISingleObjectiveSolutionScope<TSolution>)before.Clone(); 304 316 AdaptiveWalk(offspring, Context.LocalSearchEvaluations * 2, token); 305 Context.AdaptivewalkingStat.Add(Tuple.Create(before, offspring.Fitness));306 if (Context.AdaptivewalkingStat.Count % 10 == 0) Context.RelearnAdaptiveWalkPerformanceModel();317 if (!Eq(before, offspring)) 318 Context.AddAdaptivewalkingResult(before, offspring); 307 319 if (Replace(offspring, token)) { 308 320 Context.ByAdaptivewalking++; … … 345 357 346 358 var sp = new ScatterPlot("Breeding Correlation", ""); 347 sp.Rows.Add(new ScatterPlotDataRow("Parent1 vs Offspring", "", Context.BreedingStat.Select(x => new Point2D<double>(x.Item1, x.Item3))) { VisualProperties = { PointSize = 6 }}); 348 sp.Rows.Add(new ScatterPlotDataRow("Parent2 vs Offspring", "", Context.BreedingStat.Select(x => new Point2D<double>(x.Item2, x.Item3))) { VisualProperties = { PointSize = 6 } }); 359 sp.Rows.Add(new ScatterPlotDataRow("Parent1 vs Offspring", "", Context.BreedingStat.Select(x => new Point2D<double>(x.Item1, x.Item4))) { VisualProperties = { PointSize = 6 }}); 360 sp.Rows.Add(new ScatterPlotDataRow("Parent2 vs Offspring", "", Context.BreedingStat.Select(x => new Point2D<double>(x.Item2, x.Item4))) { VisualProperties = { PointSize = 6 } }); 361 sp.Rows.Add(new ScatterPlotDataRow("Parent Distance vs Offspring", "", Context.BreedingStat.Select(x => new Point2D<double>(x.Item3, x.Item4))) { VisualProperties = { PointSize = 6 } }); 349 362 if (!Results.TryGetValue("BreedingStat", out res)) { 350 363 Results.Add(new Result("BreedingStat", sp)); … … 352 365 353 366 sp = new ScatterPlot("Relinking Correlation", ""); 354 sp.Rows.Add(new ScatterPlotDataRow("A vs Relink", "", Context.RelinkingStat.Select(x => new Point2D<double>(x.Item1, x.Item3))) { VisualProperties = { PointSize = 6 } }); 355 sp.Rows.Add(new ScatterPlotDataRow("B vs Relink", "", Context.RelinkingStat.Select(x => new Point2D<double>(x.Item2, x.Item3))) { VisualProperties = { PointSize = 6 } }); 367 sp.Rows.Add(new ScatterPlotDataRow("A vs Relink", "", Context.RelinkingStat.Select(x => new Point2D<double>(x.Item1, x.Item4))) { VisualProperties = { PointSize = 6 } }); 368 sp.Rows.Add(new ScatterPlotDataRow("B vs Relink", "", Context.RelinkingStat.Select(x => new Point2D<double>(x.Item2, x.Item4))) { VisualProperties = { PointSize = 6 } }); 369 sp.Rows.Add(new ScatterPlotDataRow("d(A,B) vs Offspring", "", Context.RelinkingStat.Select(x => new Point2D<double>(x.Item3, x.Item4))) { VisualProperties = { PointSize = 6 } }); 356 370 if (!Results.TryGetValue("RelinkingStat", out res)) { 357 371 Results.Add(new Result("RelinkingStat", sp)); … … 359 373 360 374 sp = new ScatterPlot("Delinking Correlation", ""); 361 sp.Rows.Add(new ScatterPlotDataRow("A vs Delink", "", Context.DelinkingStat.Select(x => new Point2D<double>(x.Item1, x.Item3))) { VisualProperties = { PointSize = 6 } }); 362 sp.Rows.Add(new ScatterPlotDataRow("B vs Delink", "", Context.DelinkingStat.Select(x => new Point2D<double>(x.Item2, x.Item3))) { VisualProperties = { PointSize = 6 } }); 375 sp.Rows.Add(new ScatterPlotDataRow("A vs Delink", "", Context.DelinkingStat.Select(x => new Point2D<double>(x.Item1, x.Item4))) { VisualProperties = { PointSize = 6 } }); 376 sp.Rows.Add(new ScatterPlotDataRow("B vs Delink", "", Context.DelinkingStat.Select(x => new Point2D<double>(x.Item2, x.Item4))) { VisualProperties = { PointSize = 6 } }); 377 sp.Rows.Add(new ScatterPlotDataRow("d(A,B) vs Offspring", "", Context.DelinkingStat.Select(x => new Point2D<double>(x.Item3, x.Item4))) { VisualProperties = { PointSize = 6 } }); 363 378 if (!Results.TryGetValue("DelinkingStat", out res)) { 364 379 Results.Add(new Result("DelinkingStat", sp)); … … 372 387 373 388 sp = new ScatterPlot("Hillclimbing Correlation", ""); 374 sp.Rows.Add(new ScatterPlotDataRow("Start vs End", "", Context.HillclimbingStat.Select(x => new Point2D<double>(x.Item1, x.Item2))) { VisualProperties = { PointSize = 6 } });389 sp.Rows.Add(new ScatterPlotDataRow("Start vs Improvement", "", Context.HillclimbingStat.Select(x => new Point2D<double>(x.Item1, x.Item2))) { VisualProperties = { PointSize = 6 } }); 375 390 if (!Results.TryGetValue("HillclimbingStat", out res)) { 376 391 Results.Add(new Result("HillclimbingStat", sp)); … … 535 550 Context.IncrementEvaluatedSolutions(1); 536 551 } 537 var before = scope.Fitness;552 var before = (ISingleObjectiveSolutionScope<TSolution>)scope.Clone(); 538 553 var lscontext = Context.CreateSingleSolutionContext(scope); 539 554 LocalSearchParameter.Value.Optimize(lscontext); 540 var after = scope.Fitness; 541 Context.HillclimbingStat.Add(Tuple.Create(before, after)); 542 if (Context.HillclimbingStat.Count % 10 == 0) Context.RelearnHillclimbingPerformanceModel(); 555 Context.AddHillclimbingResult(before, scope); 543 556 Context.IncrementEvaluatedSolutions(lscontext.EvaluatedSolutions); 544 557 return lscontext.EvaluatedSolutions; … … 550 563 Context.IncrementEvaluatedSolutions(1); 551 564 } 552 var before = scope.Fitness;553 565 var newScope = (ISingleObjectiveSolutionScope<TSolution>)scope.Clone(); 554 566 AdaptiveWalk(newScope, maxEvals, token, subspace); 555 Context.AdaptivewalkingStat.Add(Tuple.Create(before, newScope.Fitness));556 if (Context. AdaptivewalkingStat.Count % 10 == 0) Context.RelearnAdaptiveWalkPerformanceModel();557 if (Context.IsBetter(newScope, scope))567 568 if (Context.IsBetter(newScope, scope)) { 569 Context.AddAdaptivewalkingResult(scope, newScope); 558 570 scope.Adopt(newScope); 571 } else if (!Eq(newScope, scope)) 572 Context.AddAdaptivewalkingResult(scope, newScope); 559 573 } 560 574 protected abstract void AdaptiveWalk(ISingleObjectiveSolutionScope<TSolution> scope, int maxEvals, CancellationToken token, ISolutionSubspace<TSolution> subspace = null); … … 580 594 } 581 595 582 if (Context.BreedingSuited(p1, p2)) { 583 var offspring = Breed(p1, p2, token); 584 585 if (double.IsNaN(offspring.Fitness)) { 586 Context.Evaluate(offspring, token); 587 Context.IncrementEvaluatedSolutions(1); 588 } 589 590 // new best solutions are improved using hill climbing in full solution space 591 if (Context.Population.All(p => Context.IsBetter(offspring, p))) 592 HillClimb(offspring, token); 593 else if (!Eq(offspring, p1) && !Eq(offspring, p2) && Context.HillclimbingSuited(offspring.Fitness)) 594 HillClimb(offspring, token, CalculateSubspace(new[] { p1.Solution, p2.Solution }, inverse: false)); 595 596 Context.AddBreedingResult(p1, p2, offspring); 597 if (Context.BreedingStat.Count % 10 == 0) Context.RelearnBreedingPerformanceModel(); 598 return offspring; 599 } 600 return null; 596 if (!Context.BreedingSuited(p1, p2, Dist(p1, p2))) return null; 597 598 var offspring = Breed(p1, p2, token); 599 600 if (double.IsNaN(offspring.Fitness)) { 601 Context.Evaluate(offspring, token); 602 Context.IncrementEvaluatedSolutions(1); 603 } 604 605 Context.AddBreedingResult(p1, p2, Dist(p1, p2), offspring); 606 607 // new best solutions are improved using hill climbing in full solution space 608 if (Context.Population.All(p => Context.IsBetter(offspring, p))) 609 HillClimb(offspring, token); 610 else if (!Eq(offspring, p1) && !Eq(offspring, p2) && Context.HillclimbingSuited(offspring.Fitness)) 611 HillClimb(offspring, token, CalculateSubspace(new[] { p1.Solution, p2.Solution }, inverse: false)); 612 613 return offspring; 601 614 } 602 615 … … 613 626 var p2 = Context.AtPopulation(i2); 614 627 615 if (!Context.RelinkSuited(p1, p2 )) return null;628 if (!Context.RelinkSuited(p1, p2, Dist(p1, p2))) return null; 616 629 617 630 var link = PerformRelinking(p1, p2, token, delink: false); 618 if (double.IsNaN(link.Fitness)) { 619 Context.Evaluate(link, token); 620 Context.IncrementEvaluatedSolutions(1); 621 } 631 622 632 // new best solutions are improved using hill climbing in full solution space 623 633 if (Context.Population.All(p => Context.IsBetter(link, p))) … … 637 647 var p2 = Context.AtPopulation(i2); 638 648 639 if (!Context.DelinkSuited(p1, p2 )) return null;649 if (!Context.DelinkSuited(p1, p2, Dist(p1, p2))) return null; 640 650 641 651 var link = PerformRelinking(p1, p2, token, delink: true); 642 if (double.IsNaN(link.Fitness)) { 643 Context.Evaluate(link, token); 644 Context.IncrementEvaluatedSolutions(1); 645 } 652 646 653 // new best solutions are improved using hill climbing in full solution space 647 654 if (Context.Population.All(p => Context.IsBetter(link, p))) 648 655 HillClimb(link, token); 649 // intentionally not making hill climbing after delinking in sub-space656 // intentionally not making hill climbing otherwise after delinking in sub-space 650 657 return link; 651 658 } … … 659 666 } 660 667 661 // new best solutions are improved using hill climbing662 if (Context.Population.All(p => Context.IsBetter(relink, p)))663 HillClimb(relink, token);664 665 668 if (delink) { 666 Context.AddDelinkingResult(a, b, relink); 667 if (Context.DelinkingStat.Count % 10 == 0) Context.RelearnDelinkingPerformanceModel(); 669 Context.AddDelinkingResult(a, b, Dist(a, b), relink); 668 670 } else { 669 Context.AddRelinkingResult(a, b, relink);670 if (context.RelinkingStat.Count % 10 == 0) Context.RelearnRelinkingPerformanceModel();671 } 671 Context.AddRelinkingResult(a, b, Dist(a, b), relink); 672 } 673 672 674 return relink; 673 675 } … … 682 684 ISingleObjectiveSolutionScope<TSolution> bestSample = null; 683 685 var tries = 1; 686 var avgDist = (from a in Context.Population.Shuffle(Context.Random) 687 from b in Context.Population.Shuffle(Context.Random) 688 select Dist(a, b)).Average(); 684 689 for (; tries < 100; tries++) { 685 690 var sample = Context.ToScope(Context.Model.Sample()); … … 689 694 if (Context.Population.Any(x => !Context.IsBetter(x, bestSample))) break; 690 695 } 691 if (!Context.SamplingSuited( )) break;696 if (!Context.SamplingSuited(avgDist)) break; 692 697 } 693 698 Context.IncrementEvaluatedSolutions(tries); 694 Context.AddSamplingResult(bestSample); 695 if (Context.SamplingStat.Count % 10 == 0) Context.RelearnSamplingPerformanceModel(); 699 Context.AddSamplingResult(bestSample, avgDist); 696 700 return bestSample; 697 701 } -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs
r14552 r14563 197 197 } 198 198 [Storable] 199 private List<Tuple<double, double, double >> breedingStat;200 public List<Tuple<double, double, double>> BreedingStat {199 private List<Tuple<double, double, double, double>> breedingStat; 200 public IEnumerable<Tuple<double, double, double, double>> BreedingStat { 201 201 get { return breedingStat; } 202 202 } … … 207 207 } 208 208 [Storable] 209 private List<Tuple<double, double, double >> relinkingStat;210 public List<Tuple<double, double, double>> RelinkingStat {209 private List<Tuple<double, double, double, double>> relinkingStat; 210 public IEnumerable<Tuple<double, double, double, double>> RelinkingStat { 211 211 get { return relinkingStat; } 212 212 } … … 217 217 } 218 218 [Storable] 219 private List<Tuple<double, double, double >> delinkingStat;220 public List<Tuple<double, double, double>> DelinkingStat {219 private List<Tuple<double, double, double, double>> delinkingStat; 220 public IEnumerable<Tuple<double, double, double, double>> DelinkingStat { 221 221 get { return delinkingStat; } 222 222 } … … 228 228 [Storable] 229 229 private List<Tuple<double, double>> samplingStat; 230 public List<Tuple<double, double>> SamplingStat {230 public IEnumerable<Tuple<double, double>> SamplingStat { 231 231 get { return samplingStat; } 232 232 } … … 238 238 [Storable] 239 239 private List<Tuple<double, double>> hillclimbingStat; 240 public List<Tuple<double, double>> HillclimbingStat {240 public IEnumerable<Tuple<double, double>> HillclimbingStat { 241 241 get { return hillclimbingStat; } 242 242 } … … 248 248 [Storable] 249 249 private List<Tuple<double, double>> adaptivewalkingStat; 250 public List<Tuple<double, double>> AdaptivewalkingStat {250 public IEnumerable<Tuple<double, double>> AdaptivewalkingStat { 251 251 get { return adaptivewalkingStat; } 252 252 } … … 276 276 random = cloner.Clone(original.random); 277 277 breedingPerformanceModel = cloner.Clone(original.breedingPerformanceModel); 278 breedingStat = original.breedingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3 )).ToList();278 breedingStat = original.breedingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList(); 279 279 relinkingPerformanceModel = cloner.Clone(original.relinkingPerformanceModel); 280 relinkingStat = original.relinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3 )).ToList();280 relinkingStat = original.relinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList(); 281 281 delinkingPerformanceModel = cloner.Clone(original.delinkingPerformanceModel); 282 delinkingStat = original.delinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3 )).ToList();282 delinkingStat = original.delinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList(); 283 283 samplingPerformanceModel = cloner.Clone(original.samplingPerformanceModel); 284 284 samplingStat = original.samplingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList(); … … 310 310 Parameters.Add(random = new ValueParameter<IRandom>("Random", new MersenneTwister())); 311 311 312 breedingStat = new List<Tuple<double, double, double >>();313 relinkingStat = new List<Tuple<double, double, double >>();314 delinkingStat = new List<Tuple<double, double, double >>();312 breedingStat = new List<Tuple<double, double, double, double>>(); 313 relinkingStat = new List<Tuple<double, double, double, double>>(); 314 delinkingStat = new List<Tuple<double, double, double, double>>(); 315 315 samplingStat = new List<Tuple<double, double>>(); 316 316 hillclimbingStat = new List<Tuple<double, double>>(); … … 344 344 345 345 public void RelearnBreedingPerformanceModel() { 346 breedingPerformanceModel = RunRegression(PrepareRegression( BreedingStat), breedingPerformanceModel).Model;347 } 348 public bool BreedingSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2 ) {346 breedingPerformanceModel = RunRegression(PrepareRegression(ToListRow(breedingStat)), breedingPerformanceModel).Model; 347 } 348 public bool BreedingSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) { 349 349 if (breedingPerformanceModel == null) return true; 350 350 double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue; … … 355 355 if (d.Item2 > maxI2) maxI2 = d.Item2; 356 356 } 357 if (IsBetter(p1, p2)) { 358 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 359 return true; 360 return Random.NextDouble() < ProbabilityAccept3dModel(p1.Fitness, p2.Fitness, breedingPerformanceModel); 361 } 362 if (p1.Fitness < minI2 || p1.Fitness > maxI2 || p2.Fitness < minI1 || p2.Fitness > maxI1) 357 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 363 358 return true; 364 return Random.NextDouble() < ProbabilityAccept3dModel(p2.Fitness, p1.Fitness, breedingPerformanceModel); 359 360 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { p1.Fitness, p2.Fitness, dist }, breedingPerformanceModel); 365 361 } 366 362 367 363 public void RelearnRelinkingPerformanceModel() { 368 relinkingPerformanceModel = RunRegression(PrepareRegression( RelinkingStat), relinkingPerformanceModel).Model;369 } 370 public bool RelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2 ) {364 relinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(relinkingStat)), relinkingPerformanceModel).Model; 365 } 366 public bool RelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) { 371 367 if (relinkingPerformanceModel == null) return true; 372 368 double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue; … … 377 373 if (d.Item2 > maxI2) maxI2 = d.Item2; 378 374 } 375 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 376 return true; 377 379 378 if (IsBetter(p1, p2)) { 380 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 381 return true; 382 return Random.NextDouble() < ProbabilityAccept3dModel(p1.Fitness, p2.Fitness, relinkingPerformanceModel); 383 } 384 if (p1.Fitness < minI2 || p1.Fitness > maxI2 || p2.Fitness < minI1 || p2.Fitness > maxI1) 385 return true; 386 return Random.NextDouble() < ProbabilityAccept3dModel(p2.Fitness, p1.Fitness, relinkingPerformanceModel); 379 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel); 380 } 381 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel); 387 382 } 388 383 389 384 public void RelearnDelinkingPerformanceModel() { 390 delinkingPerformanceModel = RunRegression(PrepareRegression( DelinkingStat), delinkingPerformanceModel).Model;391 } 392 public bool DelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2 ) {385 delinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(delinkingStat)), delinkingPerformanceModel).Model; 386 } 387 public bool DelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) { 393 388 if (delinkingPerformanceModel == null) return true; 394 389 double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue; … … 399 394 if (d.Item2 > maxI2) maxI2 = d.Item2; 400 395 } 396 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 397 return true; 401 398 if (IsBetter(p1, p2)) { 402 if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2) 403 return true; 404 return Random.NextDouble() < ProbabilityAccept3dModel(p1.Fitness, p2.Fitness, delinkingPerformanceModel); 405 } 406 if (p1.Fitness < minI2 || p1.Fitness > maxI2 || p2.Fitness < minI1 || p2.Fitness > maxI1) 407 return true; 408 return Random.NextDouble() < ProbabilityAccept3dModel(p2.Fitness, p1.Fitness, delinkingPerformanceModel); 399 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel); 400 } 401 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel); 409 402 } 410 403 411 404 public void RelearnSamplingPerformanceModel() { 412 samplingPerformanceModel = RunRegression(PrepareRegression( SamplingStat), samplingPerformanceModel).Model;413 } 414 public bool SamplingSuited( ) {405 samplingPerformanceModel = RunRegression(PrepareRegression(ToListRow(samplingStat)), samplingPerformanceModel).Model; 406 } 407 public bool SamplingSuited(double avgDist) { 415 408 if (samplingPerformanceModel == null) return true; 416 return Random.NextDouble() < ProbabilityAccept2dModel(Population.Average(x => x.Fitness), samplingPerformanceModel); 409 if (avgDist < samplingStat.Min(x => x.Item1) || avgDist > samplingStat.Max(x => x.Item1)) return true; 410 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { avgDist }, samplingPerformanceModel); 417 411 } 418 412 419 413 public void RelearnHillclimbingPerformanceModel() { 420 hillclimbingPerformanceModel = RunRegression(PrepareRegression( HillclimbingStat), hillclimbingPerformanceModel).Model;414 hillclimbingPerformanceModel = RunRegression(PrepareRegression(ToListRow(hillclimbingStat)), hillclimbingPerformanceModel).Model; 421 415 } 422 416 public bool HillclimbingSuited(ISingleObjectiveSolutionScope<TSolution> scope) { 423 if (hillclimbingPerformanceModel == null) return true; 424 if (scope.Fitness < HillclimbingStat.Min(x => x.Item1) || scope.Fitness > HillclimbingStat.Max(x => x.Item1)) 425 return true; 426 return Random.NextDouble() < ProbabilityAccept2dModel(scope.Fitness, hillclimbingPerformanceModel); 417 return HillclimbingSuited(scope.Fitness); 427 418 } 428 419 public bool HillclimbingSuited(double startingFitness) { … … 430 421 if (startingFitness < HillclimbingStat.Min(x => x.Item1) || startingFitness > HillclimbingStat.Max(x => x.Item1)) 431 422 return true; 432 return Random.NextDouble() < ProbabilityAccept 2dModel(startingFitness, hillclimbingPerformanceModel);423 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, hillclimbingPerformanceModel); 433 424 } 434 425 435 426 public void RelearnAdaptiveWalkPerformanceModel() { 436 adaptiveWalkPerformanceModel = RunRegression(PrepareRegression( AdaptivewalkingStat), adaptiveWalkPerformanceModel).Model;427 adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(ToListRow(adaptivewalkingStat)), adaptiveWalkPerformanceModel).Model; 437 428 } 438 429 public bool AdaptivewalkingSuited(ISingleObjectiveSolutionScope<TSolution> scope) { 439 if (adaptiveWalkPerformanceModel == null) return true; 440 if (scope.Fitness < AdaptivewalkingStat.Min(x => x.Item1) || scope.Fitness > AdaptivewalkingStat.Max(x => x.Item1)) 441 return true; 442 return Random.NextDouble() < ProbabilityAccept2dModel(scope.Fitness, adaptiveWalkPerformanceModel); 430 return AdaptivewalkingSuited(scope.Fitness); 443 431 } 444 432 public bool AdaptivewalkingSuited(double startingFitness) { … … 446 434 if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1)) 447 435 return true; 448 return Random.NextDouble() < ProbabilityAccept2dModel(startingFitness, adaptiveWalkPerformanceModel); 449 } 450 451 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, List<Tuple<double, double>> data) { 452 return new ConfidenceRegressionSolution(model, PrepareRegression(data)); 453 } 454 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, List<Tuple<double, double, double>> data) { 455 return new ConfidenceRegressionSolution(model, PrepareRegression(data)); 456 } 457 458 protected RegressionProblemData PrepareRegression(List<Tuple<double, double>> sample) { 459 var inCol = new List<double>(); 460 var outCol = new List<double>(); 436 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { startingFitness }, adaptiveWalkPerformanceModel); 437 } 438 439 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double>> data) { 440 return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList()))); 441 } 442 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double>> data) { 443 return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList()))); 444 } 445 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double, double>> data) { 446 return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList()))); 447 } 448 449 protected RegressionProblemData PrepareRegression(List<List<double>> sample) { 450 var columns = sample.First().Select(y => new List<double>()).ToList(); 461 451 foreach (var next in sample.Shuffle(Random)) { 462 inCol.Add(next.Item1); 463 outCol.Add(next.Item2); 464 } 465 var ds = new Dataset(new[] { "in", "out" }, new[] { inCol, outCol }); 466 var regPrb = new RegressionProblemData(ds, new[] { "in" }, "out") { 467 TrainingPartition = { Start = 0, End = Math.Min(50, sample.Count) }, 468 TestPartition = { Start = Math.Min(50, sample.Count), End = sample.Count } 469 }; 470 return regPrb; 471 } 472 473 protected RegressionProblemData PrepareRegression(List<Tuple<double, double, double>> sample) { 474 var in1Col = new List<double>(); 475 var in2Col = new List<double>(); 476 var outCol = new List<double>(); 477 foreach (var next in sample.Shuffle(Random)) { 478 in1Col.Add(next.Item1); 479 in2Col.Add(next.Item2); 480 outCol.Add(next.Item3); 481 } 482 var ds = new Dataset(new[] { "in1", "in2", "out" }, new[] { in1Col, in2Col, outCol }); 483 var regPrb = new RegressionProblemData(ds, new[] { "in1", "in2" }, "out") { 452 for (var i = 0; i < next.Count; i++) { 453 columns[i].Add(next[i]); 454 } 455 } 456 var ds = new Dataset(columns.Select((v, i) => i < columns.Count - 1 ? "in" + i : "out").ToList(), columns); 457 var regPrb = new RegressionProblemData(ds, Enumerable.Range(0, columns.Count - 1).Select(x => "in" + x), "out") { 484 458 TrainingPartition = { Start = 0, End = Math.Min(50, sample.Count) }, 485 459 TestPartition = { Start = Math.Min(50, sample.Count), End = sample.Count } … … 525 499 } 526 500 527 private double ProbabilityAccept2dModel(double a, IConfidenceRegressionModel model) { 528 var ds = new Dataset(new[] { "in", "out" }, new[] { new List<double> { a }, new List<double> { double.NaN } }); 501 private double ProbabilityAcceptAbsolutePerformanceModel(List<double> inputs, IConfidenceRegressionModel model) { 502 var inputVariables = inputs.Select((v, i) => "in" + i); 503 var ds = new Dataset(inputVariables.Concat( new [] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new [] { new List<double> { double.NaN } })); 529 504 var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single(); 530 505 var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single()); 531 506 507 // calculate the fitness goal 532 508 var goal = Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness); 533 509 var z = (goal - mean) / sdev; 534 return Maximization ? 1.0 - Phi(z) /* P(X >= z) */ : Phi(z); // P(X <= z) 535 } 536 537 private double ProbabilityAccept3dModel(double a, double b, IConfidenceRegressionModel model) { 538 var ds = new Dataset(new[] { "in1", "in2", "out" }, new[] { new List<double> { a }, new List<double> { b }, new List<double> { double.NaN } }); 510 // return the probability of achieving or surpassing that goal 511 var y = alglib.invnormaldistribution(z); 512 return Maximization ? 1.0 - y /* P(X >= z) */ : y; // P(X <= z) 513 } 514 515 private double ProbabilityAcceptRelativePerformanceModel(double basePerformance, List<double> inputs, IConfidenceRegressionModel model) { 516 var inputVariables = inputs.Select((v, i) => "in" + i); 517 var ds = new Dataset(inputVariables.Concat(new[] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new[] { new List<double> { double.NaN } })); 539 518 var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single(); 540 519 var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single()); 541 520 542 var goal = Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness); 521 // calculate the improvement goal 522 var goal = Maximization ? Population.Min(x => x.Fitness) - basePerformance : basePerformance - Population.Max(x => x.Fitness); 543 523 var z = (goal - mean) / sdev; 544 return Maximization ? 1.0 - Phi(z) /* P(X >= z) */ : Phi(z); // P(X <= z) 524 // return the probability of achieving or surpassing that goal 525 return 1.0 - alglib.invnormaldistribution(z); /* P(X >= z) */ 526 } 527 528 private static List<List<double>> ToListRow(List<Tuple<double, double>> rows) { 529 return rows.Select(x => new List<double> { x.Item1, x.Item2 }).ToList(); 530 } 531 private static List<List<double>> ToListRow(List<Tuple<double, double, double>> rows) { 532 return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3 }).ToList(); 533 } 534 private static List<List<double>> ToListRow(List<Tuple<double, double, double, double>> rows) { 535 return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3, x.Item4 }).ToList(); 545 536 } 546 537 … … 556 547 } 557 548 558 public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, ISingleObjectiveSolutionScope<TSolution> child) {549 public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) { 559 550 if (IsBetter(a, b)) 560 BreedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, child.Fitness)); 561 else BreedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, child.Fitness)); 562 } 563 564 public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, ISingleObjectiveSolutionScope<TSolution> child) { 551 breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness)); 552 else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness)); 553 if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel(); 554 } 555 556 public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) { 565 557 if (IsBetter(a, b)) 566 RelinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, child.Fitness)); 567 else RelinkingStat.Add(Tuple.Create(b.Fitness, a.Fitness, child.Fitness)); 568 } 569 570 public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, ISingleObjectiveSolutionScope<TSolution> child) { 558 relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness)); 559 else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness)); 560 if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel(); 561 } 562 563 public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) { 571 564 if (IsBetter(a, b)) 572 DelinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, child.Fitness)); 573 else DelinkingStat.Add(Tuple.Create(b.Fitness, a.Fitness, child.Fitness)); 574 } 575 576 public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample) { 577 SamplingStat.Add(Tuple.Create(Population.Average(x => x.Fitness), sample.Fitness)); 565 delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness)); 566 else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness)); 567 if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel(); 568 } 569 570 public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) { 571 samplingStat.Add(Tuple.Create(avgDist, sample.Fitness)); 572 if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel(); 578 573 } 579 574 580 575 public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) { 581 HillclimbingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness)); 582 } 583 584 public void AddTabuwalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) { 585 AdaptivewalkingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness)); 576 hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness)); 577 if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel(); 578 } 579 580 public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) { 581 adaptivewalkingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness)); 582 if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel(); 586 583 } 587 584 … … 609 606 // license: "This code is in the public domain. Do whatever you want with it, no strings attached." 610 607 // added: 2016-11-19 21:46 CET 611 protected static double Phi(double x) {608 /*protected static double Phi(double x) { 612 609 // constants 613 610 double a1 = 0.254829592; … … 629 626 630 627 return 0.5 * (1.0 + sign * y); 631 } 628 }*/ 632 629 #endregion 633 630 -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/Permutation/PermutationMemPR.cs
r14556 r14563 52 52 foreach (var trainer in ApplicationManager.Manager.GetInstances<ISolutionModelTrainer<PermutationMemPRPopulationContext>>()) 53 53 SolutionModelTrainerParameter.ValidValues.Add(trainer); 54 54 55 if (SolutionModelTrainerParameter.ValidValues.Count > 0) { 56 var unbiased = SolutionModelTrainerParameter.ValidValues.FirstOrDefault(x => !x.Bias); 57 if (unbiased != null) SolutionModelTrainerParameter.Value = unbiased; 58 } 59 55 60 foreach (var localSearch in ApplicationManager.Manager.GetInstances<ILocalSearch<PermutationMemPRSolutionContext>>()) { 56 61 LocalSearchParameter.ValidValues.Add(localSearch); … … 290 295 InversionMove bestOfTheRest = null; 291 296 var improved = false; 292 297 293 298 foreach (var opt in ExhaustiveInversionMoveGenerator.Generate(current).Shuffle(random)) { 294 299 var prev = opt.Index1 - 1; … … 395 400 cache.Add(p2.Solution); 396 401 397 var cacheHits = 0;402 var cacheHits = new Dictionary<int, int>() { { 0, 0 }, { 1, 0 }, { 2, 0 } }; 398 403 var evaluations = 0; 399 404 ISingleObjectiveSolutionScope<Encodings.PermutationEncoding.Permutation> offspring = null; … … 401 406 while (evaluations < p1.Solution.Length) { 402 407 Encodings.PermutationEncoding.Permutation c = null; 403 var xochoice = Context.Random.Next(3);408 var xochoice = cacheHits.SampleRandom(Context.Random).Key; 404 409 switch (xochoice) { 405 410 case 0: c = CyclicCrossover2.Apply(Context.Random, p1.Solution, p2.Solution); break; … … 408 413 } 409 414 if (cache.Contains(c)) { 410 cacheHits++; 411 if (cacheHits > 10) break; 415 cacheHits[xochoice]++; 416 if (cacheHits[xochoice] > 10) { 417 cacheHits.Remove(xochoice); 418 if (cacheHits.Count == 0) break; 419 } 412 420 continue; 413 421 } … … 431 439 cache.Add(p2.Solution); 432 440 433 var cacheHits = 0;441 var cacheHits = new Dictionary<int, int>() { { 0, 0 }, { 1, 0 }, { 2, 0 } }; 434 442 var evaluations = 0; 435 443 ISingleObjectiveSolutionScope<Encodings.PermutationEncoding.Permutation> offspring = null; … … 437 445 while (evaluations < p1.Solution.Length) { 438 446 Encodings.PermutationEncoding.Permutation c = null; 439 var xochoice = Context.Random.Next(3);447 var xochoice = cacheHits.SampleRandom(Context.Random).Key; 440 448 switch (xochoice) { 441 449 case 0: c = OrderCrossover2.Apply(Context.Random, p1.Solution, p2.Solution); break; … … 444 452 } 445 453 if (cache.Contains(c)) { 446 cacheHits++; 447 if (cacheHits > 10) break; 454 cacheHits[xochoice]++; 455 if (cacheHits[xochoice] > 10) { 456 cacheHits.Remove(xochoice); 457 if (cacheHits.Count == 0) break; 458 } 448 459 continue; 449 460 } … … 467 478 cache.Add(p2.Solution); 468 479 469 var cacheHits = 0;480 var cacheHits = new Dictionary<int, int>() { { 0, 0 }, { 1, 0 }, { 2, 0 } }; 470 481 var evaluations = 0; 471 482 ISingleObjectiveSolutionScope<Encodings.PermutationEncoding.Permutation> offspring = null; … … 473 484 while (evaluations <= p1.Solution.Length) { 474 485 Encodings.PermutationEncoding.Permutation c = null; 475 var xochoice = Context.Random.Next(3);486 var xochoice = cacheHits.SampleRandom(Context.Random).Key; 476 487 switch (xochoice) { 477 488 case 0: c = OrderCrossover2.Apply(Context.Random, p1.Solution, p2.Solution); break; … … 480 491 } 481 492 if (cache.Contains(c)) { 482 cacheHits++; 483 if (cacheHits > 10) break; 493 cacheHits[xochoice]++; 494 if (cacheHits[xochoice] > 10) { 495 cacheHits.Remove(xochoice); 496 if (cacheHits.Count == 0) break; 497 } 484 498 continue; 485 499 } -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/Permutation/SolutionModel/Univariate/BiasedModelTrainer.cs
r14552 r14563 25 25 using HeuristicLab.Core; 26 26 using HeuristicLab.Data; 27 using HeuristicLab.Encodings.PermutationEncoding;28 27 using HeuristicLab.Optimization; 29 28 using HeuristicLab.Parameters; … … 36 35 where TContext : IPopulationBasedHeuristicAlgorithmContext<ISingleObjectiveHeuristicOptimizationProblem, Encodings.PermutationEncoding.Permutation>, 37 36 ISolutionModelContext<Encodings.PermutationEncoding.Permutation>, IEvaluationServiceContext<Encodings.PermutationEncoding.Permutation> { 37 38 public bool Bias { get { return true; } } 38 39 39 40 [Storable] -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/Permutation/SolutionModel/Univariate/UnbiasedModelTrainer.cs
r14552 r14563 21 21 22 22 using System.Linq; 23 using HeuristicLab.Algorithms.MemPR.Binary.SolutionModel.Univariate;24 23 using HeuristicLab.Algorithms.MemPR.Interfaces; 25 24 using HeuristicLab.Common; 26 25 using HeuristicLab.Core; 27 using HeuristicLab.Encodings.PermutationEncoding;28 26 using HeuristicLab.Optimization; 29 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; … … 35 33 where TContext : IPopulationBasedHeuristicAlgorithmContext<ISingleObjectiveHeuristicOptimizationProblem, Encodings.PermutationEncoding.Permutation>, 36 34 ISolutionModelContext<Encodings.PermutationEncoding.Permutation> { 37 35 36 public bool Bias { get { return false; } } 37 38 38 [StorableConstructor] 39 39 protected UnbiasedModelTrainer(bool deserializing) : base(deserializing) { }
Note: See TracChangeset
for help on using the changeset viewer.