using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading; using HEAL.Attic; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Encodings.RealVectorEncoding; using HeuristicLab.Optimization; using HeuristicLab.Parameters; using HeuristicLab.Random; namespace HeuristicLab.Algorithms.NSGA3 { /// /// The Reference Point Based Non-dominated Sorting Genetic Algorithm III was introduced in Deb /// et al. 2013. An Evolutionary Many-Objective Optimization Algorithm Using Reference Point /// Based Non-dominated Sorting Approach. IEEE Transactions on Evolutionary Computation, 18(4), /// pp. 577-601. /// [Item("NSGA-III", "The Reference Point Based Non-dominated Sorting Genetic Algorithm III was introduced in Deb et al. 2013. An Evolutionary Many-Objective Optimization Algorithm Using Reference Point Based Non-dominated Sorting Approach. IEEE Transactions on Evolutionary Computation, 18(4), pp. 577-601.")] [Creatable(Category = CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 136)] [StorableType("07C745F7-A8A3-4F99-8B2C-F97E639F9AC3")] public class NSGA3 : BasicAlgorithm { private const double EPSILON = 10e-6; // a tiny number that is greater than 0 public override bool SupportsPause => false; #region ProblemProperties public override Type ProblemType { get { return typeof(MultiObjectiveBasicProblem); } } public new MultiObjectiveBasicProblem Problem { get { return (MultiObjectiveBasicProblem)base.Problem; } set { base.Problem = value; } } public int NumberOfObjectives => Problem.Maximization.Length; #endregion ProblemProperties #region Storable fields [Storable] private IRandom random; [Storable] private List solutions; #endregion Storable fields #region ParameterAndResultsNames // Parameter Names private const string SeedName = "Seed"; private const string SetSeedRandomlyName = "SetSeedRandomly"; private const string PopulationSizeName = "PopulationSize"; private const string CrossoverProbabilityName = "CrossoverProbability"; private const string CrossoverContiguityName = "CrossoverContiguity"; private const string MutationProbabilityName = "MutationProbability"; private const string MaximumGenerationsName = "MaximumGenerations"; private const string DominateOnEqualQualitiesName = "DominateOnEqualQualities"; // Results Names private const string GeneratedReferencePointsResultName = "Generated Reference Points"; private const string CurrentGenerationResultName = "Generations"; private const string CurrentFrontResultName = "Pareto Front"; // Do not touch this #endregion ParameterAndResultsNames #region ParameterProperties private IFixedValueParameter SeedParameter { get { return (IFixedValueParameter)Parameters[SeedName]; } } private IFixedValueParameter SetSeedRandomlyParameter { get { return (IFixedValueParameter)Parameters[SetSeedRandomlyName]; } } private IFixedValueParameter PopulationSizeParameter { get { return (IFixedValueParameter)Parameters[PopulationSizeName]; } } private IFixedValueParameter CrossoverProbabilityParameter { get { return (IFixedValueParameter)Parameters[CrossoverProbabilityName]; } } private IFixedValueParameter CrossoverContiguityParameter { get { return (IFixedValueParameter)Parameters[CrossoverContiguityName]; } } private IFixedValueParameter MutationProbabilityParameter { get { return (IFixedValueParameter)Parameters[MutationProbabilityName]; } } private IFixedValueParameter MaximumGenerationsParameter { get { return (IFixedValueParameter)Parameters[MaximumGenerationsName]; } } private IFixedValueParameter DominateOnEqualQualitiesParameter { get { return (IFixedValueParameter)Parameters[DominateOnEqualQualitiesName]; } } #endregion ParameterProperties #region Properties public IntValue Seed => SeedParameter.Value; public BoolValue SetSeedRandomly => SetSeedRandomlyParameter.Value; public IntValue PopulationSize => PopulationSizeParameter.Value; public PercentValue CrossoverProbability => CrossoverProbabilityParameter.Value; public DoubleValue CrossoverContiguity => CrossoverContiguityParameter.Value; public PercentValue MutationProbability => MutationProbabilityParameter.Value; public IntValue MaximumGenerations => MaximumGenerationsParameter.Value; public BoolValue DominateOnEqualQualities => DominateOnEqualQualitiesParameter.Value; public List> Fronts { get; private set; } public List ReferencePoints { get; private set; } // todo: create one property for the Generated Reference Points and one for the current // generations reference points #endregion Properties #region ResultsProperties public DoubleMatrix ResultsGeneratedReferencePoints { get { return (DoubleMatrix)Results[GeneratedReferencePointsResultName].Value; } set { Results[GeneratedReferencePointsResultName].Value = value; } } public DoubleMatrix ResultsSolutions { get { return (DoubleMatrix)Results[CurrentFrontResultName].Value; } set { Results[CurrentFrontResultName].Value = value; } } public IntValue ResultsCurrentGeneration { get { return (IntValue)Results[CurrentGenerationResultName].Value; } set { Results[CurrentGenerationResultName].Value = value; } } #endregion ResultsProperties #region Constructors public NSGA3() : base() { Parameters.Add(new FixedValueParameter(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new FixedValueParameter(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new FixedValueParameter(PopulationSizeName, "The size of the population of Individuals.", new IntValue(200))); Parameters.Add(new FixedValueParameter(CrossoverProbabilityName, "The probability that the crossover operator is applied on two parents.", new PercentValue(0.9))); Parameters.Add(new FixedValueParameter(CrossoverContiguityName, "The contiguity value for the Simulated Binary Crossover that specifies how close a child should be to its parents (larger value means closer). The value must be greater than or equal than 0. Typical values are in the range [2;5].")); Parameters.Add(new FixedValueParameter(MutationProbabilityName, "The probability that the mutation operator is applied on a Individual.", new PercentValue(0.05))); Parameters.Add(new FixedValueParameter(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000))); Parameters.Add(new FixedValueParameter(DominateOnEqualQualitiesName, "Flag which determines wether Individuals with equal quality values should be treated as dominated.", new BoolValue(false))); } // Persistence uses this ctor to improve deserialization efficiency. If we would use the // default ctor instead this would completely initialize the object (e.g. creating // parameters) even though the data is later overwritten by the stored data. [StorableConstructor] public NSGA3(StorableConstructorFlag _) : base(_) { } // Each clonable item must have a cloning ctor (deep cloning, the cloner is used to handle // cyclic object references). Don't forget to call the cloning ctor of the base class public NSGA3(NSGA3 original, Cloner cloner) : base(original, cloner) { // todo: don't forget to clone storable fields random = cloner.Clone(original.random); solutions = new List(original.solutions?.Select(cloner.Clone)); } public override IDeepCloneable Clone(Cloner cloner) { return new NSGA3(this, cloner); } #endregion Constructors #region Initialization protected override void Initialize(CancellationToken cancellationToken) { base.Initialize(cancellationToken); PopulationSize.Value = ReferencePoint.GetNumberOfGeneratedReferencePoints(Problem.Maximization.Length); InitResults(); InitReferencePoints(); InitFields(); Analyze(); } private void InitReferencePoints() { // Generate reference points and add them to results ReferencePoints = ReferencePoint.GenerateReferencePoints(random, NumberOfObjectives); ResultsGeneratedReferencePoints = Utility.ConvertToDoubleMatrix(ReferencePoints); } private void InitFields() { random = new MersenneTwister(); InitSolutions(); } private void InitSolutions() { int minBound = 0; int maxBound = 1; // Initialise solutions solutions = new List(PopulationSize.Value); for (int i = 0; i < PopulationSize.Value; i++) { RealVector randomRealVector = new RealVector(Problem.Encoding.Length, random, minBound, maxBound); solutions.Add(new Solution(randomRealVector)); solutions[i].Fitness = Evaluate(solutions[i].Chromosome); } } private void InitResults() { Results.Add(new Result(GeneratedReferencePointsResultName, "The initially generated reference points", new DoubleMatrix())); Results.Add(new Result(CurrentFrontResultName, "The Pareto Front", new DoubleMatrix())); Results.Add(new Result(CurrentGenerationResultName, "The current generation", new IntValue(0))); } #endregion Initialization #region Overriden Methods protected override void Run(CancellationToken cancellationToken) { while (ResultsCurrentGeneration.Value < MaximumGenerations.Value) { // create copies of generated reference points (to preserve the original ones for // the next generation) maybe todo: use cloner? ToNextGeneration(CreateCopyOfReferencePoints()); ResultsCurrentGeneration.Value++; } } #endregion Overriden Methods #region Private Methods private List CreateCopyOfReferencePoints() { if (ReferencePoints == null) return null; List referencePoints = new List(); foreach (var referencePoint in ReferencePoints) referencePoints.Add(new ReferencePoint(referencePoint)); return referencePoints; } private void Analyze() { ResultsSolutions = solutions.Select(s => s.Chromosome.ToArray()).ToMatrix(); Problem.Analyze( solutions.Select(s => (Individual)new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, s.Chromosome) } })).ToArray(), solutions.Select(s => s.Fitness).ToArray(), Results, random ); } /// /// Returns the fitness of the given by applying the Evaluate /// method of the Problem. /// /// /// private double[] Evaluate(RealVector chromosome) { return Problem.Evaluate(new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, chromosome) } }), random); } private void ToNextGeneration(List referencePoints) { List st = new List(); List qt = Mutate(Recombine(solutions)); List rt = Utility.Concat(solutions, qt); List nextGeneration; // Do non-dominated sort var qualities = Utility.ToFitnessMatrix(rt); // compute the pareto fronts using the DominationCalculator and discard the qualities // part in the inner tuples Fronts = DominationCalculator.CalculateAllParetoFronts(rt.ToArray(), qualities, Problem.Maximization, out int[] rank, true) .Select(list => new List(list.Select(pair => pair.Item1))).ToList(); int i = 0; List lf = null; // last front to be included while (i < Fronts.Count && st.Count < PopulationSize.Value) { lf = Fronts[i]; st = Utility.Concat(st, lf); i++; } if (st.Count == PopulationSize.Value) // no selection needs to be done nextGeneration = st; else { int l = i - 1; nextGeneration = new List(); for (int f = 0; f < l; f++) nextGeneration = Utility.Concat(nextGeneration, Fronts[f]); int k = PopulationSize.Value - nextGeneration.Count; Normalize(st); Associate(referencePoints); List solutionsToAdd = Niching(k, referencePoints); nextGeneration = Utility.Concat(nextGeneration, solutionsToAdd); } } private void Normalize(List population) { // Find the ideal point double[] idealPoint = new double[NumberOfObjectives]; for (int j = 0; j < NumberOfObjectives; j++) { // Compute ideal point idealPoint[j] = Utility.Min(s => s.Fitness[j], population); // Translate objectives foreach (var solution in population) solution.Fitness[j] -= idealPoint[j]; } // Find the extreme points Solution[] extremePoints = new Solution[NumberOfObjectives]; for (int j = 0; j < NumberOfObjectives; j++) { // Compute extreme points double[] weights = new double[NumberOfObjectives]; for (int i = 0; i < NumberOfObjectives; i++) weights[i] = EPSILON; weights[j] = 1; double func(Solution s) => ASF(s.Fitness, weights); extremePoints[j] = Utility.ArgMin(func, population); } // Compute intercepts List intercepts = GetIntercepts(extremePoints.ToList()); // Normalize objectives NormalizeObjectives(intercepts, idealPoint); } private void NormalizeObjectives(List intercepts, double[] idealPoint) { for (int f = 0; f < Fronts.Count; f++) { foreach (var solution in Fronts[f]) { for (int i = 0; i < NumberOfObjectives; i++) { if (Math.Abs(intercepts[i] - idealPoint[i]) > EPSILON) { solution.Fitness[i] = solution.Fitness[i] / (intercepts[i] - idealPoint[i]); } else { solution.Fitness[i] = solution.Fitness[i] / EPSILON; } } } } } private void Associate(List referencePoints) { for (int f = 0; f < Fronts.Count; f++) { foreach (var solution in Fronts[f]) { // find reference point for which the perpendicular distance to the current // solution is the lowest var rpAndDist = Utility.MinArgMin(rp => GetPerpendicularDistance(rp.Values, solution.Fitness), referencePoints); // associated reference point var arp = rpAndDist.Item1; // distance to that reference point var dist = rpAndDist.Item2; if (f + 1 != Fronts.Count) { // Todo: Add member for reference point on index min_rp arp.NumberOfAssociatedSolutions++; } else { // Todo: Add potential member for reference point on index min_rp arp.AddPotentialAssociatedSolution(solution, dist); } } } } private List Niching(int k, List referencePoints) { List solutions = new List(); while (solutions.Count != k) { ReferencePoint min_rp = FindNicheReferencePoint(referencePoints); Solution chosen = SelectClusterMember(min_rp); if (chosen == null) { referencePoints.Remove(min_rp); } else { min_rp.NumberOfAssociatedSolutions++; min_rp.RemovePotentialAssociatedSolution(chosen); solutions.Add(chosen); } } return solutions; } private ReferencePoint FindNicheReferencePoint(List referencePoints) { // the minimum number of associated solutions for a reference point over all reference points int minNumber = Utility.Min(rp => rp.NumberOfAssociatedSolutions, referencePoints); // the reference points that share the number of associated solutions where that number // is equal to minNumber List minAssociatedReferencePoints = new List(); foreach (var referencePoint in referencePoints) if (referencePoint.NumberOfAssociatedSolutions == minNumber) minAssociatedReferencePoints.Add(referencePoint); if (minAssociatedReferencePoints.Count > 1) return minAssociatedReferencePoints[random.Next(minAssociatedReferencePoints.Count)]; else return minAssociatedReferencePoints.Single(); } private Solution SelectClusterMember(ReferencePoint referencePoint) { Solution chosen = null; if (referencePoint.HasPotentialMember()) { if (referencePoint.NumberOfAssociatedSolutions == 0) chosen = referencePoint.FindClosestMember(); else chosen = referencePoint.RandomMember(); } return chosen; } private double GetPerpendicularDistance(double[] values, double[] fitness) { double numerator = 0; double denominator = 0; for (int i = 0; i < values.Length; i++) { numerator += values[i] * fitness[i]; denominator += Math.Pow(values[i], 2); } double k = numerator / denominator; double d = 0; for (int i = 0; i < values.Length; i++) { d += Math.Pow(k * values[i] - fitness[i], 2); } return Math.Sqrt(d); } private double ASF(double[] x, double[] weight) { List dimensions = new List(); for (int i = 0; i < NumberOfObjectives; i++) dimensions.Add(i); double f(int dim) => x[dim] / weight[dim]; return Utility.Max(f, dimensions); } private List GetIntercepts(List extremePoints) { // Check whether there are duplicate extreme points. This might happen but the original // paper does not mention how to deal with it. bool duplicate = false; for (int i = 0; !duplicate && i < extremePoints.Count; i++) { for (int j = i + 1; !duplicate && j < extremePoints.Count; j++) { // maybe todo: override Equals method of solution? duplicate = extremePoints[i].Equals(extremePoints[j]); } } List intercepts = new List(); if (duplicate) { // cannot construct the unique hyperplane (this is a casual method to deal with the condition) for (int f = 0; f < NumberOfObjectives; f++) { // extreme_points[f] stands for the individual with the largest value of // objective f intercepts.Add(extremePoints[f].Fitness[f]); } } else { // Find the equation of the hyperplane List b = new List(); //(pop[0].objs().size(), 1.0); for (int i = 0; i < NumberOfObjectives; i++) { b.Add(1.0); } List> a = new List>(); foreach (Solution s in extremePoints) { List aux = new List(); for (int i = 0; i < NumberOfObjectives; i++) aux.Add(s.Fitness[i]); a.Add(aux); } List x = GaussianElimination(a, b); // Find intercepts for (int f = 0; f < NumberOfObjectives; f++) { intercepts.Add(1.0 / x[f]); } } return intercepts; } private List GaussianElimination(List> a, List b) { List x = new List(); int n = a.Count; for (int i = 0; i < n; i++) a[i].Add(b[i]); for (int @base = 0; @base < n - 1; @base++) for (int target = @base + 1; target < n; target++) { double ratio = a[target][@base] / a[@base][@base]; for (int term = 0; term < a[@base].Count; term++) a[target][term] = a[target][term] - a[@base][term] * ratio; } for (int i = 0; i < n; i++) x.Add(0.0); for (int i = n - 1; i >= 0; i--) { for (int known = i + 1; known < n; known++) a[i][n] = a[i][n] - a[i][known] * x[known]; x[i] = a[i][n] / a[i][i]; } return x; } private List Recombine(List solutions) { List childSolutions = new List(); for (int i = 0; i < solutions.Count; i += 2) { int parentIndex1 = random.Next(solutions.Count); int parentIndex2 = random.Next(solutions.Count); // ensure that the parents are not the same object if (parentIndex1 == parentIndex2) parentIndex2 = (parentIndex2 + 1) % solutions.Count; var parent1 = solutions[parentIndex1]; var parent2 = solutions[parentIndex2]; // Do crossover with crossoverProbabilty == 1 in order to guarantee that a crossover happens var children = SimulatedBinaryCrossover.Apply(random, Problem.Encoding.Bounds, parent1.Chromosome, parent2.Chromosome, 1); Debug.Assert(children != null); var child1 = new Solution(children.Item1); var child2 = new Solution(children.Item2); child1.Fitness = Evaluate(child1.Chromosome); child2.Fitness = Evaluate(child1.Chromosome); childSolutions.Add(child1); childSolutions.Add(child2); } return childSolutions; } private List Mutate(List solutions) { foreach (var solution in solutions) { UniformOnePositionManipulator.Apply(random, solution.Chromosome, Problem.Encoding.Bounds); } return solutions; } #endregion Private Methods } }