#region License Information /* HeuristicLab * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL) * and the BEACON Center for the Study of Evolution in Action. * * This file is part of HeuristicLab. * * HeuristicLab is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * HeuristicLab is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HeuristicLab. If not, see . */ #endregion using System; using System.Collections.Generic; using System.Threading; using HeuristicLab.Analysis; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Encodings.RealVectorEncoding; using HeuristicLab.Optimization; using HeuristicLab.Parameters; using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; using HeuristicLab.Random; using System.Linq; using HeuristicLab.Problems.TestFunctions.MultiObjective; using HeuristicLab.Algorithms.MOCMAEvolutionStrategy; namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy { [Item("MOCMAS Evolution Strategy (MOCMASES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")] [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)] [StorableClass] public class MOCMASEvolutionStrategy : BasicAlgorithm { public override Type ProblemType { get { return typeof(MultiObjectiveTestFunctionProblem); } } public new MultiObjectiveTestFunctionProblem Problem { get { return (MultiObjectiveTestFunctionProblem)base.Problem; } set { base.Problem = value; } } #region ParameterNames private const string MaximumRuntimeName = "Maximum Runtime"; private const string SeedName = "Seed"; private const string SetSeedRandomlyName = "SetSeedRandomly"; private const string PopulationSizeName = "PopulationSize"; private const string MaximumGenerationsName = "MaximumGenerations"; private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions"; private const string InitialSigmaName = "InitialSigma"; private const string IndicatorName = "Indicator"; private const string EvaluationsResultName = "Evaluations"; private const string IterationsResultName = "Generations"; private const string TimetableResultName = "Timetable"; private const string HypervolumeResultName = "Hypervolume"; private const string GenerationalDistanceResultName = "Generational Distance"; private const string InvertedGenerationalDistanceResultName = "Inverted Generational Distance"; private const string CrowdingResultName = "Crowding"; private const string SpacingResultName = "Spacing"; private const string SolutionsResultName = "Pareto Front"; private const string BestHypervolumeResultName = "Best Hypervolume"; private const string BestKnownHypervolumeResultName = "Best known hypervolume"; private const string DifferenceToBestKnownHypervolumeResultName = "Absolute Distance to BestKnownHypervolume"; private const string ScatterPlotResultName = "ScatterPlot"; #endregion #region internal variables private readonly IRandom random = new MersenneTwister(); private NormalDistributedRandom gauss; private MOCMAESIndividual[] solutions; private const double penalizeFactor = 1e-6; //TODO OptionalValueParameter private double stepSizeLearningRate; //=cp learning rate in [0,1] private double stepSizeDampeningFactor; //d private double targetSuccessProbability;// p^target_succ private double evolutionPathLearningRate;//cc private double covarianceMatrixLearningRate;//ccov private double covarianceMatrixUnlearningRate; //from shark private double successThreshold; //ptresh #endregion #region ParameterProperties public IFixedValueParameter MaximumRuntimeParameter { get { return (IFixedValueParameter)Parameters[MaximumRuntimeName]; } } public IFixedValueParameter SeedParameter { get { return (IFixedValueParameter)Parameters[SeedName]; } } public FixedValueParameter SetSeedRandomlyParameter { get { return (FixedValueParameter)Parameters[SetSeedRandomlyName]; } } private IFixedValueParameter PopulationSizeParameter { get { return (IFixedValueParameter)Parameters[PopulationSizeName]; } } private IFixedValueParameter MaximumGenerationsParameter { get { return (IFixedValueParameter)Parameters[MaximumGenerationsName]; } } private IFixedValueParameter MaximumEvaluatedSolutionsParameter { get { return (IFixedValueParameter)Parameters[MaximumEvaluatedSolutionsName]; } } private IFixedValueParameter InitialSigmaParameter { get { return (IFixedValueParameter)Parameters[InitialSigmaName]; } } private IConstrainedValueParameter IndicatorParameter { get { return (IConstrainedValueParameter)Parameters[IndicatorName]; } } #endregion #region Properties public int MaximumRuntime { get { return MaximumRuntimeParameter.Value.Value; } set { MaximumRuntimeParameter.Value.Value = value; } } public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } } public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } } public int PopulationSize { get { return PopulationSizeParameter.Value.Value; } set { PopulationSizeParameter.Value.Value = value; } } public int MaximumGenerations { get { return MaximumGenerationsParameter.Value.Value; } set { MaximumGenerationsParameter.Value.Value = value; } } public int MaximumEvaluatedSolutions { get { return MaximumEvaluatedSolutionsParameter.Value.Value; } set { MaximumEvaluatedSolutionsParameter.Value.Value = value; } } public double InitialSigma { get { return InitialSigmaParameter.Value.Value; } set { InitialSigmaParameter.Value.Value = value; } } public IIndicator Indicator { get { return IndicatorParameter.Value; } set { IndicatorParameter.Value = value; } } #endregion #region ResultsProperties private int ResultsEvaluations { get { return ((IntValue)Results[EvaluationsResultName].Value).Value; } set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; } } private int ResultsIterations { get { return ((IntValue)Results[IterationsResultName].Value).Value; } set { ((IntValue)Results[IterationsResultName].Value).Value = value; } } #region Datatable private DataTable ResultsQualities { get { return (DataTable)Results[TimetableResultName].Value; } } private DataRow ResultsBestHypervolumeDataLine { get { return ResultsQualities.Rows[BestHypervolumeResultName]; } } private DataRow ResultsHypervolumeDataLine { get { return ResultsQualities.Rows[HypervolumeResultName]; } } private DataRow ResultsGenerationalDistanceDataLine { get { return ResultsQualities.Rows[GenerationalDistanceResultName]; } } private DataRow ResultsInvertedGenerationalDistanceDataLine { get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; } } private DataRow ResultsCrowdingDataLine { get { return ResultsQualities.Rows[CrowdingResultName]; } } private DataRow ResultsSpacingDataLine { get { return ResultsQualities.Rows[SpacingResultName]; } } private DataRow ResultsHypervolumeDifferenceDataLine { get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; } } #endregion //QualityIndicators private double ResultsHypervolume { get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; } set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; } } private double ResultsGenerationalDistance { get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; } set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; } } private double ResultsInvertedGenerationalDistance { get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; } set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; } } private double ResultsCrowding { get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; } set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; } } private double ResultsSpacing { get { return ((DoubleValue)Results[SpacingResultName].Value).Value; } set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; } } private double ResultsBestHypervolume { get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; } set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; } } private double ResultsBestKnownHypervolume { get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; } set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; } } private double ResultsDifferenceBestKnownHypervolume { get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; } set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; } } //Solutions private DoubleMatrix ResultsSolutions { get { return ((DoubleMatrix)Results[SolutionsResultName].Value); } set { Results[SolutionsResultName].Value = value; } } private ScatterPlotContent ResultsScatterPlot { get { return ((ScatterPlotContent)Results[ScatterPlotResultName].Value); } set { Results[ScatterPlotResultName].Value = value; } } #endregion #region constructors and hlBoilerPlate-code [StorableConstructor] protected MOCMASEvolutionStrategy(bool deserializing) : base(deserializing) { } protected MOCMASEvolutionStrategy(MOCMASEvolutionStrategy original, Cloner cloner) : base(original, cloner) { } public override IDeepCloneable Clone(Cloner cloner) { return new MOCMASEvolutionStrategy(this, cloner); } public MOCMASEvolutionStrategy() { Parameters.Add(new FixedValueParameter(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600))); Parameters.Add(new FixedValueParameter(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); Parameters.Add(new FixedValueParameter(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); Parameters.Add(new FixedValueParameter(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20))); Parameters.Add(new FixedValueParameter(InitialSigmaName, "The initial sigma is a single value > 0.", new DoubleValue(0.5))); Parameters.Add(new FixedValueParameter(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000))); Parameters.Add(new FixedValueParameter(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue))); ItemSet set = new ItemSet(); var default_ = new HypervolumeIndicator(); set.Add(default_); set.Add(new CrowdingIndicator()); Parameters.Add(new ConstrainedValueParameter(IndicatorName, "The selection mechanism on non-dominated solutions", set, default_)); } #endregion private void AddResult(String name, String desc, T defaultValue) where T : class, IItem { Results.Add(new Result(name, desc, defaultValue)); } #region updates private void UpdatePopulation(MOCMAESIndividual[] parents) { int[] offspringSucess = new int[solutions.Length]; int offspringLength = parents.Length - solutions.Length; for (int i = 0; i < offspringLength; i++) { if (parents[i + solutions.Length].selected) { UpdateAsOffspring(parents[i + solutions.Length]); //TODO this may change if more offspring per parent is allowed offspringSucess[i] += MOCMAESIndividual.success; } } for (int i = 0; i < solutions.Length; i++) { if (parents[i].selected) { UpdateAsParent(parents[i], offspringSucess[i]); } } solutions = new MOCMAESIndividual[solutions.Length]; int j = 0; foreach (MOCMAESIndividual ind in parents) { if (ind.selected) { solutions[j++] = ind; } } } private void UpdateAsParent(MOCMAESIndividual c, int v) { c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate * (v == MOCMAESIndividual.success ? 1 : 0); c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability)); if (v != MOCMAESIndividual.failure) return; if (c.successProbability < successThreshold) { double stepNormSqr = c.GetSetpNormSqr(); double rate = covarianceMatrixUnlearningRate; if (stepNormSqr > 1 && 1 < covarianceMatrixUnlearningRate * (2 * stepNormSqr - 1)) { rate = 1 / (2 * stepNormSqr - 1); } RankOneUpdate(c, 1 + rate, -rate, c.lastStep); } else { RoundUpdate(c); } } private void UpdateAsOffspring(MOCMAESIndividual c) { c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate; c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability)); double evolutionpathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate); if (c.successProbability < successThreshold) { c.UpdateEvolutionPath(1 - evolutionPathLearningRate, evolutionpathUpdateWeight); RankOneUpdate(c, 1 - covarianceMatrixLearningRate, covarianceMatrixLearningRate, c.evolutionPath); } else { RoundUpdate(c); } } private void RankOneUpdate(MOCMAESIndividual c, double v1, double v2, RealVector lastStep) { c.CholeskyUpdate(lastStep, v1, v2); } private void RoundUpdate(MOCMAESIndividual c) { double evolutionPathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate); c.UpdateEvolutionPath(1 - evolutionPathLearningRate, 0); RankOneUpdate(c, 1 - covarianceMatrixLearningRate + evolutionPathUpdateWeight, covarianceMatrixLearningRate, c.evolutionPath); } #endregion #region selection private void Selection(MOCMAESIndividual[] parents, int length) { //perform a nondominated sort to assign the rank to every element var fronts = NonDominatedSort(parents); //deselect the highest rank fronts until we would end up with less or equal mu elements int rank = fronts.Count - 1; int popSize = parents.Length; while (popSize - fronts[rank].Count >= length) { var front = fronts[rank]; foreach (var i in front) i.selected = false; popSize -= front.Count; rank--; } //now use the indicator to deselect the worst approximating elements of the last selected front var front_ = fronts[rank]; for (; popSize > length; popSize--) { int lc = Indicator.LeastContributer(front_.ToArray(), x => x.penalizedFitness, Problem); front_[lc].selected = false; front_.Swap(lc, front_.Count - 1); front_.RemoveAt(front_.Count - 1); } } #endregion #region penalize Box-Constraints private void PenalizeEvaluate(IEnumerable offspring) { foreach (MOCMAESIndividual child in offspring) PenalizeEvaluate(child); } private void PenalizeEvaluate(MOCMAESIndividual offspring) { if (IsFeasable(offspring.x)) { offspring.fitness = Evaluate(offspring.x); offspring.penalizedFitness = offspring.fitness; } else { RealVector t = ClosestFeasible(offspring.x); offspring.fitness = Evaluate(t); offspring.penalizedFitness = Penalize(offspring.x, t, offspring.fitness); } } private double[] Evaluate(RealVector x) { double[] res = Problem.Evaluate(x); ResultsEvaluations++; return res; } private double[] Penalize(RealVector x, RealVector t, double[] fitness) { double penalty = Penalize(x, t); return fitness.Select(v => v + penalty).ToArray(); } private double Penalize(RealVector x, RealVector t) { double sum = 0; for (int i = 0; i < x.Length; i++) { double d = x[i] - t[i]; sum += d * d; } return sum; } private RealVector ClosestFeasible(RealVector x) { DoubleMatrix bounds = Problem.Bounds; RealVector r = new RealVector(x.Length); for (int i = 0; i < x.Length; i++) { int dim = i % bounds.Rows; r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]); } return r; } private bool IsFeasable(RealVector offspring) { DoubleMatrix bounds = Problem.Bounds; for (int i = 0; i < offspring.Length; i++) { int dim = i % bounds.Rows; if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false; } return true; } #endregion #region mutation private MOCMAESIndividual[] GenerateOffspring() { MOCMAESIndividual[] offspring = new MOCMAESIndividual[PopulationSize]; for (int i = 0; i < PopulationSize; i++) { offspring[i] = new MOCMAESIndividual(solutions[i]); offspring[i].Mutate(gauss); } return offspring; } #endregion #region initialization private MOCMAESIndividual InitializeIndividual(RealVector x) { var zeros = new RealVector(x.Length); var identity = new double[x.Length, x.Length]; for (int i = 0; i < x.Length; i++) { identity[i, i] = 1; } return new MOCMAESIndividual(x, targetSuccessProbability, InitialSigma, zeros, identity); } private void InitSolutions() { solutions = new MOCMAESIndividual[PopulationSize]; for (int i = 0; i < PopulationSize; i++) { RealVector x = new RealVector(Problem.ProblemSize); // Uniform distibution in all dimesions assumed. // There is the UniformSolutionCreater associated with the Encoding, but it was considered not usable here var bounds = Problem.Bounds; for (int j = 0; j < Problem.Objectives; j++) { int dim = j % bounds.Rows; x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0]; } solutions[i] = InitializeIndividual(x); } PenalizeEvaluate(solutions); } private void InitStrategy() { int lambda = 1; double n = Problem.ProblemSize; gauss = new NormalDistributedRandom(random, 0, 1); targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0); stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda); stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda); evolutionPathLearningRate = 2.0 / (n + 2.0); covarianceMatrixLearningRate = 2.0 / (n * n + 6.0); covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1); successThreshold = 0.44; } private void InitResults() { AddResult(IterationsResultName, "The number of gererations evaluated", new IntValue(0)); AddResult(EvaluationsResultName, "The number of function evaltions performed", new IntValue(0)); AddResult(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0)); AddResult(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0)); AddResult(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(Double.NaN)); AddResult(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(Double.NaN)); AddResult(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(Double.NaN)); AddResult(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(Double.NaN)); AddResult(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0)); AddResult(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0)); var table = new DataTable("QualityIndicators"); table.Rows.Add(new DataRow(BestHypervolumeResultName)); table.Rows.Add(new DataRow(HypervolumeResultName)); table.Rows.Add(new DataRow(CrowdingResultName)); table.Rows.Add(new DataRow(GenerationalDistanceResultName)); table.Rows.Add(new DataRow(InvertedGenerationalDistanceResultName)); table.Rows.Add(new DataRow(DifferenceToBestKnownHypervolumeResultName)); table.Rows.Add(new DataRow(SpacingResultName)); AddResult(TimetableResultName, "Different quality meassures in a timeseries", table); AddResult(SolutionsResultName, "The current front", new DoubleMatrix()); AddResult(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ScatterPlotContent(null, null, null, 2)); if (Problem.BestKnownFront != null) { ResultsBestKnownHypervolume = Hypervolume.Calculate(Utilities.ToArray(Problem.BestKnownFront), Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization); ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume; } ResultsScatterPlot = new ScatterPlotContent(null, null, Utilities.ToArray(Problem.BestKnownFront), Problem.Objectives); } #endregion #region analyze private void AnalyzeSolutions() { //solutions ResultsScatterPlot = new ScatterPlotContent(solutions.Select(x => x.fitness).ToArray(), solutions.Select(x => ToArray(x.x)).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives); ResultsSolutions = ToMatrix(solutions.Select(x => ToArray(x.x))); AnalyzeQualityIndicators(); } private void AnalyzeQualityIndicators() { //var front = NonDominatedSelect.selectNonDominated(solutions.Select(x => x.fitness), Problem.Maximization, true); var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.fitness), Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, true); //front = NonDominatedSelect.removeNonReferenceDominatingVectors(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, false); var bounds = ToArray(Problem.Bounds); ResultsCrowding = Crowding.Calculate(front, bounds); ResultsSpacing = Spacing.Calculate(front); ResultsGenerationalDistance = Problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : Double.NaN; ResultsInvertedGenerationalDistance = Problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : Double.NaN; ResultsHypervolume = Hypervolume.Calculate(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization); ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume); ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume; //Take best of this run or current HV??? //Datalines ResultsBestHypervolumeDataLine.Values.Add(ResultsBestHypervolume); ResultsHypervolumeDataLine.Values.Add(ResultsHypervolume); ResultsCrowdingDataLine.Values.Add(ResultsCrowding); ResultsGenerationalDistanceDataLine.Values.Add(ResultsGenerationalDistance); ResultsInvertedGenerationalDistanceDataLine.Values.Add(ResultsInvertedGenerationalDistance); ResultsSpacingDataLine.Values.Add(ResultsSpacing); ResultsHypervolumeDifferenceDataLine.Values.Add(ResultsDifferenceBestKnownHypervolume); } #endregion //MU = populationSize #region mainloop protected override void Run(CancellationToken cancellationToken) { // Set up the algorithm if (SetSeedRandomly) Seed = new System.Random().Next(); random.Reset(Seed); InitResults(); InitStrategy(); InitSolutions(); // Loop until iteration limit reached or canceled. for (ResultsIterations = 1; ResultsIterations < MaximumGenerations; ResultsIterations++) { try { Iterate(); cancellationToken.ThrowIfCancellationRequested(); } finally { AnalyzeSolutions(); } } } protected override void OnExecutionTimeChanged() { base.OnExecutionTimeChanged(); if (CancellationTokenSource == null) return; if (MaximumRuntime == -1) return; if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel(); } private void Iterate() { MOCMAESIndividual[] offspring = GenerateOffspring(); PenalizeEvaluate(offspring); var parents = Merge(solutions, offspring); Selection(parents, solutions.Length); UpdatePopulation(parents); } #endregion private class MOCMAESIndividual { public static readonly int success = 1; public static readonly int noSuccess = 2; public static readonly int failure = 3; //Chromosome public RealVector x; public double successProbability; public double sigma;//stepsize public RealVector evolutionPath; // pc public RealVector lastStep; public RealVector lastZ; private double[,] lowerCholesky; //Phenotype public double[] fitness; public double[] penalizedFitness; public bool selected = true; internal double rank; /// /// /// /// has to be 0-vector with correct lenght /// has to be ptargetsucc /// initialSigma /// has to be 0-vector with correct lenght /// has to be a symmetric positive definit Covariance matrix public MOCMAESIndividual(RealVector x, double p_succ, double sigma, RealVector pc, double[,] C) { this.x = x; this.successProbability = p_succ; this.sigma = sigma; this.evolutionPath = pc; CholeskyDecomposition(C); } private void CholeskyDecomposition(double[,] C) { if (!alglib.spdmatrixcholesky(ref C, C.GetLength(0), false)) { throw new ArgumentException("Covariancematrix is not symmetric positiv definit"); } lowerCholesky = (double[,])C.Clone(); } public MOCMAESIndividual(MOCMAESIndividual other) { this.successProbability = other.successProbability; this.sigma = other.sigma; this.evolutionPath = (RealVector)other.evolutionPath.Clone(); this.x = (RealVector)other.x.Clone(); this.lowerCholesky = (double[,])other.lowerCholesky.Clone(); } public void UpdateEvolutionPath(double learningRate, double updateWeight) { updateWeight = Math.Sqrt(updateWeight); for (int i = 0; i < evolutionPath.Length; i++) { evolutionPath[i] *= learningRate; evolutionPath[i] += updateWeight * lastStep[i]; } } public double GetSetpNormSqr() { double sum = 0; foreach (double d in lastZ) { sum += d * d; } return sum; } public void CholeskyUpdate(RealVector v, double alpha, double beta) { int n = v.Length; double[] temp = new double[n]; for (int i = 0; i < n; i++) temp[i] = v[i]; double betaPrime = 1; double a = Math.Sqrt(alpha); for (int j = 0; j < n; j++) { double Ljj = a * lowerCholesky[j, j]; double dj = Ljj * Ljj; double wj = temp[j]; double swj2 = beta * wj * wj; double gamma = dj * betaPrime + swj2; double x = dj + swj2 / betaPrime; if (x < 0.0) throw new ArgumentException("Update makes Covariancematrix indefinite"); double nLjj = Math.Sqrt(x); lowerCholesky[j, j] = nLjj; betaPrime += swj2 / dj; if (j + 1 < n) { for (int i = j + 1; i < n; i++) { lowerCholesky[i, j] *= a; } for (int i = j + 1; i < n; i++) { temp[i] = wj / Ljj * lowerCholesky[i, j]; } if (gamma == 0) continue; for (int i = j + 1; i < n; i++) { lowerCholesky[i, j] *= nLjj / Ljj; } for (int i = j + 1; i < n; i++) { lowerCholesky[i, j] += (nLjj * beta * wj / gamma) * temp[i]; } } } } public void Mutate(NormalDistributedRandom gauss) { //sampling a random z from N(0,I) where I is the Identity matrix; lastZ = new RealVector(x.Length); int n = lastZ.Length; for (int i = 0; i < n; i++) { lastZ[i] = gauss.NextDouble(); } //Matrixmultiplication: lastStep = lowerCholesky * lastZ; lastStep = new RealVector(x.Length); for (int i = 0; i < n; i++) { double sum = 0; for (int j = 0; j <= i; j++) { sum += lowerCholesky[i, j] * lastZ[j]; } lastStep[i] = sum; } //add the step to x weighted by stepsize; for (int i = 0; i < x.Length; i++) { x[i] += sigma * lastStep[i]; } } } //blatantly stolen form HeuristicLab.Optimization.Operators.FastNonDominatedSort #region FastNonDominatedSort private enum DominationResult { Dominates, IsDominated, IsNonDominated }; private List> NonDominatedSort(MOCMAESIndividual[] individuals) { bool dominateOnEqualQualities = false; bool[] maximization = Problem.Maximization; if (individuals == null) throw new InvalidOperationException(Name + ": No qualities found."); int populationSize = individuals.Length; List> fronts = new List>(); Dictionary> dominatedScopes = new Dictionary>(); int[] dominationCounter = new int[populationSize]; for (int pI = 0; pI < populationSize - 1; pI++) { MOCMAESIndividual p = individuals[pI]; List dominatedScopesByp; if (!dominatedScopes.TryGetValue(p, out dominatedScopesByp)) dominatedScopes[p] = dominatedScopesByp = new List(); for (int qI = pI + 1; qI < populationSize; qI++) { DominationResult test = Dominates(individuals[pI], individuals[qI], maximization, dominateOnEqualQualities); if (test == DominationResult.Dominates) { dominatedScopesByp.Add(qI); dominationCounter[qI] += 1; } else if (test == DominationResult.IsDominated) { dominationCounter[pI] += 1; if (!dominatedScopes.ContainsKey(individuals[qI])) dominatedScopes.Add(individuals[qI], new List()); dominatedScopes[individuals[qI]].Add(pI); } if (pI == populationSize - 2 && qI == populationSize - 1 && dominationCounter[qI] == 0) { AddToFront(individuals[qI], fronts, 0); } } if (dominationCounter[pI] == 0) { AddToFront(p, fronts, 0); } } int i = 0; while (i < fronts.Count && fronts[i].Count > 0) { List nextFront = new List(); foreach (MOCMAESIndividual p in fronts[i]) { List dominatedScopesByp; if (dominatedScopes.TryGetValue(p, out dominatedScopesByp)) { for (int k = 0; k < dominatedScopesByp.Count; k++) { int dominatedScope = dominatedScopesByp[k]; dominationCounter[dominatedScope] -= 1; if (dominationCounter[dominatedScope] == 0) { nextFront.Add(individuals[dominatedScope]); } } } } i += 1; fronts.Add(nextFront); } MOCMAESIndividual[] result = new MOCMAESIndividual[individuals.Length]; for (i = 0; i < fronts.Count; i++) { foreach (var p in fronts[i]) { p.rank = i; } } return fronts; } private static void AddToFront(MOCMAESIndividual p, List> fronts, int i) { if (i == fronts.Count) fronts.Add(new List()); fronts[i].Add(p); } private static DominationResult Dominates(MOCMAESIndividual left, MOCMAESIndividual right, bool[] maximizations, bool dominateOnEqualQualities) { return Dominates(left.penalizedFitness, right.penalizedFitness, maximizations, dominateOnEqualQualities); } private static DominationResult Dominates(double[] left, double[] right, bool[] maximizations, bool dominateOnEqualQualities) { //mkommend Caution: do not use LINQ.SequenceEqual for comparing the two quality arrays (left and right) due to performance reasons if (dominateOnEqualQualities) { var equal = true; for (int i = 0; i < left.Length; i++) { if (left[i] != right[i]) { equal = false; break; } } if (equal) return DominationResult.Dominates; } bool leftIsBetter = false, rightIsBetter = false; for (int i = 0; i < left.Length; i++) { if (IsDominated(left[i], right[i], maximizations[i])) rightIsBetter = true; else if (IsDominated(right[i], left[i], maximizations[i])) leftIsBetter = true; if (leftIsBetter && rightIsBetter) break; } if (leftIsBetter && !rightIsBetter) return DominationResult.Dominates; if (!leftIsBetter && rightIsBetter) return DominationResult.IsDominated; return DominationResult.IsNonDominated; } private static bool IsDominated(double left, double right, bool maximization) { return maximization && left < right || !maximization && left > right; } #endregion #region conversions private T[] Merge(T[] parents, T[] offspring) { T[] merged = new T[parents.Length + offspring.Length]; for (int i = 0; i < parents.Length; i++) { merged[i] = parents[i]; } for (int i = 0; i < offspring.Length; i++) { merged[i + parents.Length] = offspring[i]; } return merged; } public double[] ToArray(RealVector r) { double[] d = new double[r.Length]; for (int i = 0; i < r.Length; i++) { d[i] = r[i]; } return d; } public DoubleMatrix ToMatrix(IEnumerable data) { var d2 = data.ToArray(); DoubleMatrix mat = new DoubleMatrix(d2.Length, d2[0].Length); for (int i = 0; i < mat.Rows; i++) { for (int j = 0; j < mat.Columns; j++) { mat[i, j] = d2[i][j]; } } return mat; } public double[,] ToArray(DoubleMatrix data) { double[,] mat = new double[data.Rows, data.Columns]; for (int i = 0; i < data.Rows; i++) { for (int j = 0; j < data.Columns; j++) { mat[i, j] = data[i, j]; } } return mat; } #endregion } }