#region License Information
/* HeuristicLab
* Author: Kaifeng Yang
*
* Copyright (C) 2002-2019 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*\
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
// SMS-EMOA
/*
Implemenetation of a real-coded SMS_EMOA algorithm.
This implementation follows the description of: 'M. Emmerich, N. Beume, and B. Naujoks.
An EMO Algorithm Using the Hypervolume Measure as Selection Criterion.EMO 2005.'
*/
#endregion
using HEAL.Attic;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Random;
using System.Linq;
using System;
using CancellationToken = System.Threading.CancellationToken;
using HeuristicLab.Analysis;
using HeuristicLab.Problems.TestFunctions.MultiObjective;
using HeuristicLab.Optimization;
using System.Collections.Generic;
/* This algorithm is SMS-EMOA implementation on HL. The main structure and interfaces with HL are copied from MOEA/D on HL, which was written by Dr. Bogdan Burlacu. The S-metric selection operator was adapted from Kaifeng's MATLAB toolbox in SMS-EMOA. The computational complexity of HVC is AT LEAST $O (n^2 \log n)$ in 2-D and 3-D cases. HVC should definitely be reduced to $\Theta (n \times \log n)$.
*
* This algorithm assumes:
* 1. minimization problems. For maximization problems, it is better to add "-" symbol.
*
* This algorithm works on:
* 1. continuous, discrete, mixed-integer MOO problems. For different types of problems, the operators should be adjusted accordingly.
* 2. both multi-objective and many-objective problems. For many-objective problems, the bottleneck is the computational complexity of HV.
*
* This algorithm is the basic implementation of SMS-EMOA, proposed by Michael Emmerich et. al. Some potential improvements can be:
* 1. Dynamic reference point strategy
* 2. Normalized fitness value strategy ---- desirability function. See, Yali, Longmei, Kaifeng, Michael Emmerich CEC paper.
* 3. HVC calculation should definitely be improved, at least in the 2D and 3D cases.
* 4. multiple point strategy when $\lambda>1$
* 5. multiple reference points strategy, in ICNC 2016, Zhiwei Yang et. al.
* 6. HVC approximation by R2 for MANY OBJECTIVE cases, by Ishibushi 2019, IEEE TEC
* 7. Maybe: See maps
*
* Global parameters:
* 1. population
*
* Many thanks for Bogdan Burlacu and Johannes Karder, especially Bogdan for his explanation, help, and supports.
*/
namespace HeuristicLab.Algorithms.DynamicALPS {
// Format:
// The indexed name of the algorithm/item, Description of the algorithm/item in HL
[Item("DynamicALPS-MainLoop", "DynamicALPS-MainLoop implementation adapted from SMS-EMOA in HL.")]
// Call "HeuristicLab.Core"
// Define the category of this class.
[Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 125)]
// Call "HEAL.Attic"
// Define GUID for this Class
[StorableType("A7F33D16-3495-43E8-943C-8A919123F541")]
public class DynamicALPSAlgorithm : DynamicALPSAlgorithmBase {
public DynamicALPSAlgorithm() { }
protected DynamicALPSAlgorithm(DynamicALPSAlgorithm original, Cloner cloner) : base(original, cloner) { }
public override IDeepCloneable Clone(Cloner cloner) {
return new DynamicALPSAlgorithm(this, cloner);
}
[StorableConstructor]
protected DynamicALPSAlgorithm(StorableConstructorFlag deserializing) : base(deserializing) { }
protected override void Run(CancellationToken cancellationToken) {
if (previousExecutionState != ExecutionState.Paused) { // Call "base" class, DynamicALPSAlgorithmBase
InitializeAlgorithm(cancellationToken);
}
var populationSize = PopulationSize.Value;
bool[] maximization = ((BoolArray)Problem.MaximizationParameter.ActualValue).CloneAsArray();
var crossover = Crossover;
var crossoverProbability = CrossoverProbability.Value;
var mutator = Mutator;
var mutationProbability = MutationProbability.Value;
var evaluator = Problem.Evaluator;
var analyzer = Analyzer;
var rand = RandomParameter.Value;
var maximumEvaluatedSolutions = MaximumEvaluatedSolutions.Value;
int lambda = 1; // the size of offspring
int nLayerALPS = ALPSLayers.Value;
int counterLayerALPS = 0;
// IMPROVE: ........
// 1 2 4 8 16 32 64 128 256 512
int[] ageGapArray = new int[] { 20, 40, 80, 160, 320, 640, 1280, 2560, 5120, 10240 };
int[] numberDiscard = new int[10];
activeLayer = new bool[nLayerALPS];
layerCrossoverProbability = new double[nLayerALPS];
int[][] ageMatrix = new int[nLayerALPS][]; // layer * population size
// cancellation token for the inner operations which should not be immediately cancelled
var innerToken = new CancellationToken();
// 12022020
layerPopulation = new IDynamicALPSSolution[nLayerALPS][];
layerOffspringPopulation = new IDynamicALPSSolution[nLayerALPS][];
layerJointPopulation = new IDynamicALPSSolution[nLayerALPS][];
layerDiscardPopulation = new IDynamicALPSSolution[nLayerALPS][];
layerDiscardIndivdual = new IDynamicALPSSolution[nLayerALPS];
layerPopulation[counterLayerALPS] = new IDynamicALPSSolution[populationSize];
// BUG: The size of offspring should vary in different layers!!!!
layerOffspringPopulation[counterLayerALPS] = new IDynamicALPSSolution[lambda];
// layerDiscardPopulation[counterLayerALPS] = new IDynamicALPSSolution[populationSize]; // for the previous version, is used to store the individuals whose age is older than the age gap
layerDiscardPopulation[counterLayerALPS] = new IDynamicALPSSolution[populationSize];
population.CopyTo(layerPopulation[counterLayerALPS], 0);
activeLayer[counterLayerALPS] = true;
layerCrossoverProbability[counterLayerALPS] = 0;
var test = UseAverageAge.Value;
for (int i = 0; i < nLayerALPS; i++) {
if (i == 0) {
activeLayer[i] = true;
}
else { activeLayer[i] = false; }
numberDiscard[i] = 0;
layerCrossoverProbability[i] = CrossoverProbability.Value;
}
int bottomLayerIDX = 0;
int godScope = 0;
// Mainloop
while (evaluatedSolutions < maximumEvaluatedSolutions && !cancellationToken.IsCancellationRequested) {
for (int i = 0; i < nLayerALPS; i++) { // loop for every layer
int discardedIndividualIndex = 0;
var currentLayerIDX = i;
var nextLayerIDX = i + 1;
if (nextLayerIDX == nLayerALPS) {
nextLayerIDX = bottomLayerIDX;
godScope = 1;
}
else { godScope = 0; }
if (activeLayer[currentLayerIDX] == true) { // check the current layer is active or not.
evaluatedSolutions = SMSEMOA(populationSize, lambda, currentLayerIDX); // get the offspring -- layerJointPopulation
if (evaluatedSolutions >= maximumEvaluatedSolutions) { break; } // check evaluation budget
ageMatrix[currentLayerIDX] = layerJointPopulation[currentLayerIDX].Select(x => x.Age).ToArray(); // get age info of the current layer joint population
#region version 1: use average to initialize the layer population
if (UseAverageAge.Value == true) {
if (activeLayer[nextLayerIDX] == false) {// next layer is not active yet
if (ageMatrix[currentLayerIDX].Average() > ageGapArray[currentLayerIDX]) { // the next layer is initialized
InitializeLayer(nextLayerIDX, populationSize, lambda); // initilizae the layerPopulation for the next layer && ACTIVE FLAGE
layerJointPopulation[currentLayerIDX].CopyTo(layerJointPopulation[nextLayerIDX], 0);
SmetricSelection(lambda, nextLayerIDX); // layerpopulation is updated here,
}
else {// the next layer is not initialized
SmetricSelection(lambda, currentLayerIDX);
}
}
else { // next layer is active
if (activeLayer.All(x => x) && godScope == 1) { // all the layers are active and the current layer is the top layer, move the discarded individual from the top to bottom, and reset the age
SmetricSelection(lambda, currentLayerIDX);
layerPopulation[bottomLayerIDX].CopyTo(layerJointPopulation[bottomLayerIDX], 0);
layerDiscardIndivdual[currentLayerIDX].Age = 0;
layerJointPopulation[bottomLayerIDX][populationSize] = layerDiscardIndivdual[currentLayerIDX];
SmetricSelection(lambda, bottomLayerIDX);
}
else {
if (ageMatrix[currentLayerIDX].Max() > ageGapArray[currentLayerIDX]) { // moving
discardedIndividualIndex = ageMatrix[currentLayerIDX].ToList().IndexOf(ageMatrix[currentLayerIDX].Max());
layerPopulation[nextLayerIDX].CopyTo(layerJointPopulation[nextLayerIDX], 0);
layerJointPopulation[currentLayerIDX].Where((x, idx) => idx == discardedIndividualIndex).ToArray().CopyTo(layerJointPopulation[nextLayerIDX], populationSize);
SmetricSelection(lambda, nextLayerIDX); // AGE and HVC
layerJointPopulation[currentLayerIDX].Where((x, idx) => idx != discardedIndividualIndex).ToArray().CopyTo(layerPopulation[currentLayerIDX], 0); // dicard the individual in the current layer
}
else { // next layer is active, but the age is not mature.
SmetricSelection(lambda, currentLayerIDX);
}
}
}
#endregion
}
else {
#region version 2: use individual age to to initialize the next layer
if (ageMatrix[currentLayerIDX].Max() > ageGapArray[currentLayerIDX]) { // mature: moving
discardedIndividualIndex = ageMatrix[currentLayerIDX].ToList().IndexOf(ageMatrix[currentLayerIDX].Max()); // BUG when two individual has the same maximal age???? NOT POSSBILE IN SMS-EMOA
layerDiscardPopulation[currentLayerIDX][numberDiscard[currentLayerIDX]] = layerJointPopulation[currentLayerIDX][discardedIndividualIndex]; // move the individual to the next layer
layerJointPopulation[currentLayerIDX].Where((x, idx) => idx != discardedIndividualIndex).ToArray().CopyTo(layerPopulation[currentLayerIDX], 0); // discard the indivudal in the current layer
numberDiscard[currentLayerIDX] += 1;
if (activeLayer[nextLayerIDX] == false) { // next layer is not active // bug, if i == number of layer, out of range .
if (numberDiscard[currentLayerIDX] == populationSize) {
InitializeLayer(nextLayerIDX, populationSize, lambda); // initilizae the layerPopulation for the next layer && ACTIVE FLAGE
layerDiscardPopulation[currentLayerIDX].CopyTo(layerPopulation[nextLayerIDX], 0);
numberDiscard[currentLayerIDX] = 0;
}
else {
// number of matured individuals < population size in the next layer
}
}
else { // next layer is active
layerPopulation[nextLayerIDX].CopyTo(layerJointPopulation[nextLayerIDX], 0);
layerJointPopulation[currentLayerIDX].Where((x, idx) => idx == discardedIndividualIndex).ToArray().CopyTo(layerJointPopulation[nextLayerIDX], populationSize);
SmetricSelection(lambda, nextLayerIDX); // AGE and HVC
numberDiscard[currentLayerIDX] = 0;
}
}
layerPopulation[currentLayerIDX].CopyTo(population, 0); // BUG: should copy all the active layers to population.
#endregion
}
}
else {
// Some thing wrong? lol nothing wrong here ^_^
}
}
int numberOfActiveLayer = activeLayer.Where(c => c).Count();
population = new IDynamicALPSSolution[populationSize * numberOfActiveLayer];
for (int i = 0; i < numberOfActiveLayer; i++) {
layerPopulation[i].CopyTo(population, i * populationSize);
}
// run analyzer
var analyze = executionContext.CreateChildOperation(analyzer, globalScope);
ExecuteOperation(executionContext, innerToken, analyze);
// update Pareto-front approximation sets
// UpdateParetoFronts();
// Show some results in the GUI.
Results.AddOrUpdateResult("IdealPoint", new DoubleArray(IdealPoint));
Results.AddOrUpdateResult("NadirPoint", new DoubleArray(NadirPoint));
Results.AddOrUpdateResult("Evaluated Solutions", new IntValue(evaluatedSolutions));
// see if we already have a result collection for the layer results, and reuse that one
ResultCollection allLayerResults;
if (Results.TryGetValue("LayerResults", out IResult allLayerResultsResult)) {
allLayerResults = (ResultCollection)allLayerResultsResult.Value;
}
else {
allLayerResults = new ResultCollection();
Results.AddOrUpdateResult("LayerResults", allLayerResults);
}
// run layer analyzers
for (int i = 0; i < activeLayer.Length; ++i) {
if (!activeLayer[i]) continue;
var scope = new Scope($"Layer {i}");
var layer = layerPopulation[i];
var tmp = UpdateParetoFronts(layer, IdealPoint);
// update the results in a way that avoids creating a new result collection at each iteration
if (allLayerResults.TryGetValue(scope.Name, out IResult lRes)) {
var lr = (ResultCollection)lRes.Value;
foreach (var result in tmp) {
lr.AddOrUpdateResult(result.Name, result.Value);
}
}
else {
allLayerResults.AddOrUpdateResult(scope.Name, tmp);
}
var layerResults = (ResultCollection)allLayerResults[scope.Name].Value;
//var layerQualities = new ItemArray(layer.Select(x => new DoubleArray(x.Qualities)));
// var layerSolutions = new ItemArray(layer.Select(x => (IItem)x.Individual.Clone()));
// only store the decision vectors
var layerSolutions = new ItemArray(layer.Select(x => (IItem)((IScope)x.Individual).Variables["RealVector"].Value.Clone()));
var layerAges = new ItemArray(layer.Select(x => new IntValue(x.Age)));
//layerResults.AddOrUpdateResult("Objective values", layerQualities);
layerResults.AddOrUpdateResult("Decision vectors", layerSolutions);
layerResults.AddOrUpdateResult("Age", layerAges);
var tableObjectives = new DataTable("Objective values");
for (int j = 0; j < IdealPoint.Length; ++j) {
var row = new DataRow($"Objective {j + 1}");
row.Values.AddRange(layer.Select(x => x.Qualities[j]));
tableObjectives.Rows.Add(row);
}
layerResults.AddOrUpdateResult("Objective values", tableObjectives);
// historical HV
DataTable hyperVolumeHistory;
if (layerResults.TryGetValue("Layer Hypervolume History", out IResult res)) {
hyperVolumeHistory = (DataTable)res.Value;
}
else {
hyperVolumeHistory = new DataTable("Layer Hypervolume History");
var hrow = new DataRow($"Layer {i}");
hrow.VisualProperties = new DataRowVisualProperties {
StartIndexZero = false,
};
hyperVolumeHistory.Rows.Add(hrow);
layerResults.AddOrUpdateResult("Layer Hypervolume History", hyperVolumeHistory);
}
//var front = layer.Select(x => x.Qualities);
var reference = ReferencePoint.ToArray();
//var hv = double.MinValue;
var layerQualities = layer.Select(x => x.Qualities).ToArray();
var layerPF = DominationCalculator.CalculateBestParetoFront(layer, layerQualities, maximization);
var nondominatedLayer = NonDominatedSelect.GetDominatingVectors(layerPF.Select(x => x.Item2), reference, maximization, false);
var layerHV = nondominatedLayer.Any() ? Hypervolume.Calculate(nondominatedLayer, reference, maximization) : 0;
hyperVolumeHistory.Rows[$"Layer {i}"].Values.Add(layerHV);
// historical crossover probability
DataTable crossoverProbabilityHistory;
if (layerResults.TryGetValue("CrossoverProbability History", out IResult resPm)) {
crossoverProbabilityHistory = (DataTable)resPm.Value;
}
else {
crossoverProbabilityHistory = new DataTable("CrossoverProbability History");
var hrowPm = new DataRow($"Layer {i}");
hrowPm.VisualProperties = new DataRowVisualProperties {
StartIndexZero = false,
};
crossoverProbabilityHistory.Rows.Add(hrowPm);
layerResults.AddOrUpdateResult("CrossoverProbability History", crossoverProbabilityHistory);
}
crossoverProbabilityHistory.Rows[$"Layer {i}"].Values.Add(layerCrossoverProbability[i]);
if (i == 1) {
DataTable wholeLayerHypervolumeHistrory;
var qualities = population.Select(x => x.Qualities).ToArray();
var pf = DominationCalculator.CalculateBestParetoFront(population, qualities, maximization);
var nondominatedWhole = NonDominatedSelect.GetDominatingVectors(pf.Select(x => x.Item2), reference, maximization, false);
var hvWhole = nondominatedWhole.Any() ? Hypervolume.Calculate(nondominatedWhole, reference, maximization) : 0;
if (layerResults.TryGetValue("Hypervolume of the entire layers -- History", out IResult resHVWhole)) {
wholeLayerHypervolumeHistrory = (DataTable)resHVWhole.Value;
}
else {
wholeLayerHypervolumeHistrory = new DataTable("Hypervolume of the entire layers -- History");
var hrowWhole = new DataRow($"Layer {i}");
hrowWhole.VisualProperties = new DataRowVisualProperties {
StartIndexZero = false,
};
wholeLayerHypervolumeHistrory.Rows.Add(hrowWhole);
layerResults.AddOrUpdateResult("Hypervolume of the entire layers -- History", wholeLayerHypervolumeHistrory);
}
wholeLayerHypervolumeHistrory.Rows[$"Layer {i}"].Values.Add(hvWhole);
}
else {
}
}
// Update globalScope
globalScope.SubScopes.Replace(population.Select(x => (IScope)x.Individual));
}
}
}
}