- Timestamp:
- 08/29/19 13:53:26 (5 years ago)
- Location:
- branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/IIndicator.cs
r16807 r17225 36 36 /// <param name="problem">The problem on which the front is evaluated (!! The function itself will NOT be evluated only bounds referencePoints & other metadata will be used</param> 37 37 /// <returns>the index of the least contributing point according to any type of quality criteria</returns> 38 int LeastContributer(IReadOnlyList<Individual> front, MultiObjectiveProblem<RealVectorEncoding, RealVector>problem);38 int LeastContributer(IReadOnlyList<Individual> front, IMultiObjectiveProblemDefinition problem); 39 39 } 40 40 } -
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/Indicators/CrowdingIndicator.cs
r16807 r17225 20 20 #endregion 21 21 22 using System;23 22 using System.Collections.Generic; 24 23 using System.Linq; … … 37 36 protected CrowdingIndicator(StorableConstructorFlag _) : base(_) { } 38 37 protected CrowdingIndicator(CrowdingIndicator original, Cloner cloner) : base(original, cloner) { } 39 public override IDeepCloneable Clone(Cloner cloner) { return new CrowdingIndicator(this, cloner); } 38 public override IDeepCloneable Clone(Cloner cloner) { 39 return new CrowdingIndicator(this, cloner); 40 } 40 41 public CrowdingIndicator() { } 41 42 #endregion 42 43 43 public int LeastContributer(IReadOnlyList<Individual> front, MultiObjectiveProblem<RealVectorEncoding, RealVector> problem) { 44 var bounds = problem.Encoding.Bounds; 44 public int LeastContributer(IReadOnlyList<Individual> front, IMultiObjectiveProblemDefinition problem) { 45 45 var extracted = front.Select(x => x.PenalizedFitness).ToArray(); 46 46 if (extracted.Length <= 2) return 0; 47 var pointsums = new double[extracted.Length]; 48 49 for (var dim = 0; dim < problem.Maximization.Length; dim++) { 50 var arr = extracted.Select(x => x[dim]).ToArray(); 51 Array.Sort(arr); 52 var fmax = problem.Encoding.Bounds[dim % bounds.Rows, 1]; 53 var fmin = bounds[dim % bounds.Rows, 0]; 54 var pointIdx = 0; 55 foreach (var point in extracted) { 56 var pos = Array.BinarySearch(arr, point[dim]); 57 var d = pos != 0 && pos != arr.Length - 1 ? (arr[pos + 1] - arr[pos - 1]) / (fmax - fmin) : double.PositiveInfinity; 58 pointsums[pointIdx] += d; 59 pointIdx++; 60 } 61 } 62 return pointsums.Select((value, index) => new { value, index }).OrderBy(x => x.value).First().index; 47 var pointsums = CrowdingCalculator.CalculateCrowdingDistances(extracted); 48 return pointsums.Select((value, index) => new {value, index}).OrderBy(x => x.value).First().index; 63 49 } 64 50 } -
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/Indicators/HypervolumeIndicator.cs
r16807 r17225 20 20 #endregion 21 21 22 using System;23 22 using System.Collections.Generic; 24 23 using System.Linq; … … 26 25 using HeuristicLab.Common; 27 26 using HeuristicLab.Core; 28 using HeuristicLab.Encodings.RealVectorEncoding;29 27 using HeuristicLab.Optimization; 30 28 using HeuristicLab.Problems.TestFunctions.MultiObjective; 29 31 30 namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy { 32 [Item("HypervolumeIndicator", "Selection of Offspring based on contributing Hypervolume")]31 [Item("HypervolumeIndicator", "Selection of offspring based on contributing Hypervolume")] 33 32 [StorableType("ADF439D6-64E4-4C92-A4D3-E8C05B050406")] 34 33 internal class HypervolumeIndicator : Item, IIndicator { … … 37 36 protected HypervolumeIndicator(StorableConstructorFlag _) : base(_) { } 38 37 protected HypervolumeIndicator(HypervolumeIndicator original, Cloner cloner) : base(original, cloner) { } 39 public override IDeepCloneable Clone(Cloner cloner) { return new HypervolumeIndicator(this, cloner); } 38 public override IDeepCloneable Clone(Cloner cloner) { 39 return new HypervolumeIndicator(this, cloner); 40 } 40 41 public HypervolumeIndicator() { } 41 42 #endregion 42 43 43 public int LeastContributer(IReadOnlyList<Individual> front, MultiObjectiveProblem<RealVectorEncoding, RealVector>problem) {44 public int LeastContributer(IReadOnlyList<Individual> front, IMultiObjectiveProblemDefinition problem) { 44 45 var frontCopy = front.Select(x => x.PenalizedFitness).ToList(); 45 46 if (frontCopy.Count <= 1) return 0; 46 47 //TODO discuss with bwerth 47 var p = problem as MultiObjectiveTestFunctionProblem;48 var refPoint = BuildReferencePoint(p != null ? frontCopy.Concat(new[] { p.ReferencePoint.CloneAsArray() }) : frontCopy, problem.Maximization);48 var tep = problem != null ? frontCopy.Concat(new[] {problem.ReferencePoint}) : frontCopy; 49 var refPoint = HypervolumeCalculator.CalculateNadirPoint(tep, problem.Maximization); 49 50 var contributions = Enumerable.Range(0, frontCopy.Count).Select(i => Contribution(frontCopy, i, problem.Maximization, refPoint)); 50 return contributions.Select((value, index) => new { value, index}).OrderBy(x => x.value).First().index;51 return contributions.Select((value, index) => new {value, index}).OrderBy(x => x.value).First().index; 51 52 } 52 53 … … 55 56 var point = front[idx]; 56 57 front.RemoveAt(idx); 57 var contribution = -Hypervolume .Calculate(front.ToArray(), refPoint, maximization);58 var contribution = -HypervolumeCalculator.CalculateHypervolume(front.ToArray(), refPoint, maximization); 58 59 front.Insert(idx, point); 59 60 return contribution; 60 }61 private static double[] BuildReferencePoint(IEnumerable<double[]> front, IReadOnlyList<bool> maximization) {62 var refPoint = new double[maximization.Count];63 foreach (var point in front)64 for (var i = 0; i < maximization.Count; i++)65 refPoint[i] = maximization[i] ? Math.Min(refPoint[i], point[i]) : Math.Max(refPoint[i], point[i]);66 return refPoint;67 61 } 68 62 #endregion -
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/Indicators/MinimalDistanceIndicator.cs
r16807 r17225 33 33 [StorableType("FBBD4517-164C-4DEE-B87D-49B99172EDF4")] 34 34 internal class MinimalDistanceIndicator : Item, IIndicator { 35 36 35 #region Constructor and Cloning 37 36 [StorableConstructor] 38 37 protected MinimalDistanceIndicator(StorableConstructorFlag _) : base(_) { } 39 38 protected MinimalDistanceIndicator(MinimalDistanceIndicator original, Cloner cloner) : base(original, cloner) { } 40 public override IDeepCloneable Clone(Cloner cloner) { return new MinimalDistanceIndicator(this, cloner); } 39 public override IDeepCloneable Clone(Cloner cloner) { 40 return new MinimalDistanceIndicator(this, cloner); 41 } 41 42 public MinimalDistanceIndicator() { } 42 43 #endregion 43 44 44 public int LeastContributer(IReadOnlyList<Individual> front, MultiObjectiveProblem<RealVectorEncoding, RealVector>problem) {45 public int LeastContributer(IReadOnlyList<Individual> front, IMultiObjectiveProblemDefinition problem) { 45 46 var extracted = front.Select(x => x.PenalizedFitness).ToArray(); 46 47 if (extracted.Length <= 2) return 0; … … 88 89 var res = new double[extracted.Count, extracted.Count]; 89 90 for (var i = 0; i < extracted.Count; i++) 90 91 91 for (var j = 0; j < i; j++) 92 res[i, j] = res[j, i] = Dist(extracted[i], extracted[j]); 92 93 return res; 93 94 } -
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/MOCMAEvolutionStrategy.cs
r16807 r17225 52 52 } 53 53 54 public RealVectorEncoding Encoding { 55 get { return Problem.Encoding; } 56 } 57 54 58 #region Storable fields 55 59 [Storable] … … 64 68 private double stepSizeDampeningFactor; //d 65 69 [Storable] 66 private double targetSuccessProbability; // p^target_succ67 [Storable] 68 private double evolutionPathLearningRate; //cc69 [Storable] 70 private double covarianceMatrixLearningRate; //ccov70 private double targetSuccessProbability; // p^target_succ 71 [Storable] 72 private double evolutionPathLearningRate; //cc 73 [Storable] 74 private double covarianceMatrixLearningRate; //ccov 71 75 [Storable] 72 76 private double covarianceMatrixUnlearningRate; 73 77 [Storable] 74 78 private double successThreshold; //ptresh 75 76 79 #endregion 77 80 … … 162 165 } 163 166 164 public double StepSizeLearningRate { get { return stepSizeLearningRate; } } 165 public double StepSizeDampeningFactor { get { return stepSizeDampeningFactor; } } 166 public double TargetSuccessProbability { get { return targetSuccessProbability; } } 167 public double EvolutionPathLearningRate { get { return evolutionPathLearningRate; } } 168 public double CovarianceMatrixLearningRate { get { return covarianceMatrixLearningRate; } } 169 public double CovarianceMatrixUnlearningRate { get { return covarianceMatrixUnlearningRate; } } 170 public double SuccessThreshold { get { return successThreshold; } } 167 public double StepSizeLearningRate { 168 get { return stepSizeLearningRate; } 169 } 170 public double StepSizeDampeningFactor { 171 get { return stepSizeDampeningFactor; } 172 } 173 public double TargetSuccessProbability { 174 get { return targetSuccessProbability; } 175 } 176 public double EvolutionPathLearningRate { 177 get { return evolutionPathLearningRate; } 178 } 179 public double CovarianceMatrixLearningRate { 180 get { return covarianceMatrixLearningRate; } 181 } 182 public double CovarianceMatrixUnlearningRate { 183 get { return covarianceMatrixUnlearningRate; } 184 } 185 public double SuccessThreshold { 186 get { return successThreshold; } 187 } 171 188 #endregion 172 189 … … 238 255 get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; } 239 256 set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; } 240 241 257 } 242 258 //Solutions … … 257 273 Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); 258 274 Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20))); 259 Parameters.Add(new ValueParameter<DoubleArray>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleArray(new[] { 0.5})));275 Parameters.Add(new ValueParameter<DoubleArray>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleArray(new[] {0.5}))); 260 276 Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000))); 261 277 Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue))); 262 var set = new ItemSet<IIndicator> { new HypervolumeIndicator(), new CrowdingIndicator(), new MinimalDistanceIndicator()};278 var set = new ItemSet<IIndicator> {new HypervolumeIndicator(), new CrowdingIndicator(), new MinimalDistanceIndicator()}; 263 279 Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, set.First())); 264 280 } … … 280 296 } 281 297 282 public override IDeepCloneable Clone(Cloner cloner) { return new MOCMAEvolutionStrategy(this, cloner); } 298 public override IDeepCloneable Clone(Cloner cloner) { 299 return new MOCMAEvolutionStrategy(this, cloner); 300 } 283 301 #endregion 284 302 … … 309 327 solutions = new Individual[PopulationSize]; 310 328 for (var i = 0; i < PopulationSize; i++) { 311 var x = new RealVector( Problem.Encoding.Length); // Uniform distibution in all dimensions assumed.312 var bounds = Problem.Encoding.Bounds;313 for (var j = 0; j < Problem.Encoding.Length; j++) {329 var x = new RealVector(Encoding.Length); // Uniform distibution in all dimensions assumed. 330 var bounds = Encoding.Bounds; 331 for (var j = 0; j < Encoding.Length; j++) { 314 332 var dim = j % bounds.Rows; 315 333 x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0]; … … 322 340 private void InitStrategy() { 323 341 const int lambda = 1; 324 double n = Problem.Encoding.Length;342 double n = Encoding.Length; 325 343 targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0); 326 344 stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda); … … 355 373 Results.Add(new Result(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ParetoFrontScatterPlot())); 356 374 357 var problem = Problem as MultiObjectiveTestFunctionProblem;375 var problem = Problem; 358 376 if (problem == null) return; 359 if (problem.BestKnownFront != null) { 360 ResultsBestKnownHypervolume = Hypervolume.Calculate(problem.BestKnownFront.ToJaggedArray(), problem.TestFunction.ReferencePoint(problem.Objectives), Problem.Maximization); 377 var bkf = problem.BestKnownFront == null ? null : problem.BestKnownFront.ToArray(); 378 if (bkf != null && problem.ReferencePoint != null) { 379 ResultsBestKnownHypervolume = HypervolumeCalculator.CalculateHypervolume(bkf, problem.ReferencePoint, Problem.Maximization); 361 380 ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume; 362 381 } 363 ResultsScatterPlot = new ParetoFrontScatterPlot(new double[0][], new double[0][], problem.BestKnownFront.ToJaggedArray(), Problem.Objectives, Problem.Encoding.Length);382 ResultsScatterPlot = new ParetoFrontScatterPlot(new double[0][], new double[0][], bkf, Problem.Objectives, Problem.Encoding.Length); 364 383 } 365 384 #endregion … … 417 436 } 418 437 private RealVector ClosestFeasible(RealVector x) { 419 var bounds = Problem.Encoding.Bounds;438 var bounds = Encoding.Bounds; 420 439 var r = new RealVector(x.Length); 421 440 for (var i = 0; i < x.Length; i++) { … … 426 445 } 427 446 private bool IsFeasable(RealVector offspring) { 428 var bounds = Problem.Encoding.Bounds;447 var bounds = Encoding.Bounds; 429 448 for (var i = 0; i < offspring.Length; i++) { 430 449 var dim = i % bounds.Rows; … … 438 457 //perform a nondominated sort to assign the rank to every element 439 458 int[] ranks; 440 var fronts = DominationCalculator <Individual>.CalculateAllParetoFronts(parents.ToArray(), parents.Select(i => i.PenalizedFitness).ToArray(), Problem.Maximization, out ranks);459 var fronts = DominationCalculator.CalculateAllParetoFronts(parents.ToArray(), parents.Select(i => i.PenalizedFitness).ToArray(), Problem.Maximization, out ranks); 441 460 442 461 //deselect the highest rank fronts until we would end up with less or equal mu elements … … 470 489 471 490 private void Analyze() { 472 ResultsScatterPlot = new ParetoFrontScatterPlot(solutions.Select(x => x.Fitness).ToArray(), solutions.Select(x => x.Mean.ToArray()).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives, ResultsScatterPlot.ProblemSize); 491 var qualities = solutions.Select(x => x.Fitness).ToArray(); 492 493 //to do check for side effects 494 ResultsScatterPlot = new ParetoFrontScatterPlot(qualities, solutions.Select(x => x.Mean.ToArray()).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives, ResultsScatterPlot.ProblemSize); 473 495 ResultsSolutions = solutions.Select(x => x.Mean.ToArray()).ToMatrix(); 474 496 475 var problem = Problem as MultiObjective TestFunctionProblem;497 var problem = Problem as MultiObjectiveProblem<RealVectorEncoding, RealVector>; 476 498 if (problem == null) return; 477 499 478 var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.Fitness), problem.ReferencePoint.CloneAsArray(), Problem.Maximization, true).ToArray(); 479 if (front.Length == 0) return; 480 var bounds = problem.Bounds.CloneAsMatrix(); 481 ResultsCrowding = Crowding.Calculate(front, bounds); 482 ResultsSpacing = Spacing.Calculate(front); 483 ResultsGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN; 484 ResultsInvertedGenerationalDistance = problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN; 485 ResultsHypervolume = Hypervolume.Calculate(front, problem.ReferencePoint.CloneAsArray(), Problem.Maximization); 500 501 if (qualities.Length == 0) return; 502 ResultsCrowding = CrowdingCalculator.CalculateCrowding(qualities); 503 ResultsSpacing = Spacing.Calculate(qualities); 504 505 506 ResultsGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistance.Calculate(qualities, problem.BestKnownFront, 1) : double.NaN; 507 ResultsInvertedGenerationalDistance = problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(qualities, problem.BestKnownFront, 1) : double.NaN; 508 ResultsHypervolume = problem.ReferencePoint != null ? HypervolumeCalculator.CalculateHypervolume(qualities, problem.ReferencePoint, Problem.Maximization) : double.NaN; 486 509 ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume); 487 510 ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume;
Note: See TracChangeset
for help on using the changeset viewer.