Free cookie consent management tool by TermsFeed Policy Generator

source: branches/3057_DynamicALPS/HeuristicLab.Algorithms.DynamicALPS/3.4/DynamicALPSAlgorithm.cs @ 17593

Last change on this file since 17593 was 17479, checked in by kyang, 5 years ago

#3057

  1. upload the latest version of ALPS with SMS-EMOA
  2. upload the related dynamic test problems (dynamic, single-objective symbolic regression), written by David Daninel.
File size: 19.7 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Author: Kaifeng Yang
4 *
5 * Copyright (C) 2002-2019 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
6 *
7 * This file is part of HeuristicLab.
8 *\
9 * HeuristicLab is free software: you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation, either version 3 of the License, or
12 * (at your option) any later version.
13 *
14 * HeuristicLab is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
21 */
22
23// SMS-EMOA
24/*
25Implemenetation of a real-coded SMS_EMOA algorithm.
26This implementation follows the description of: 'M. Emmerich, N. Beume, and B. Naujoks.
27An EMO Algorithm Using the Hypervolume Measure as Selection Criterion.EMO 2005.'
28*/
29#endregion
30
31using HEAL.Attic;
32using HeuristicLab.Common;
33using HeuristicLab.Core;
34using HeuristicLab.Data;
35using HeuristicLab.Random;
36using System.Linq;
37using System;
38using CancellationToken = System.Threading.CancellationToken;
39using HeuristicLab.Analysis;
40using HeuristicLab.Problems.TestFunctions.MultiObjective;
41using HeuristicLab.Optimization;
42using System.Collections.Generic;
43
44
45/*  This algorithm is SMS-EMOA implementation on HL. The main structure and interfaces with HL are copied from MOEA/D on HL, which was written by Dr. Bogdan Burlacu. The S-metric selection operator was adapted from Kaifeng's MATLAB toolbox in SMS-EMOA. The computational complexity of HVC is AT LEAST $O (n^2 \log n)$ in 2-D and 3-D cases. HVC should definitely be reduced to $\Theta (n \times \log n)$.
46 * 
47 *  This algorithm assumes:
48 *    1. minimization problems. For maximization problems, it is better to add "-" symbol.
49 * 
50 *  This algorithm works on:
51 *    1. continuous, discrete, mixed-integer MOO problems. For different types of problems, the operators should be adjusted accordingly.
52 *    2. both multi-objective and many-objective problems. For many-objective problems, the bottleneck is the computational complexity of HV.
53 *
54 * This algorithm is the basic implementation of SMS-EMOA, proposed by Michael Emmerich et. al. Some potential improvements can be:
55 *    1. Dynamic reference point strategy
56 *    2. Normalized fitness value strategy ---- desirability function. See, Yali, Longmei, Kaifeng, Michael Emmerich CEC paper.
57 *    3. HVC calculation should definitely be improved, at least in the 2D and 3D cases.
58 *    4. multiple point strategy when $\lambda>1$
59 *    5. multiple reference points strategy, in ICNC 2016, Zhiwei Yang et. al.
60 *    6. HVC approximation by R2 for MANY OBJECTIVE cases, by Ishibushi 2019, IEEE TEC 
61 *    7. Maybe: See maps
62 *
63 * Global parameters:
64 *    1. population 
65 *   
66 * Many thanks for Bogdan Burlacu and Johannes Karder, especially Bogdan for his explanation, help, and supports.
67 */
68
69namespace HeuristicLab.Algorithms.DynamicALPS {
70  // Format:
71  // The indexed name of the algorithm/item, Description of the algorithm/item in HL 
72  [Item("DynamicALPS-MainLoop", "DynamicALPS-MainLoop implementation adapted from SMS-EMOA in HL.")]
73
74  // Call "HeuristicLab.Core"
75  // Define the category of this class.
76  [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 125)]
77
78  // Call "HEAL.Attic"
79  // Define GUID for this Class
80  [StorableType("A7F33D16-3495-43E8-943C-8A919123F541")]
81
82  public class DynamicALPSAlgorithm : DynamicALPSAlgorithmBase {
83    public DynamicALPSAlgorithm() { }
84
85    protected DynamicALPSAlgorithm(DynamicALPSAlgorithm original, Cloner cloner) : base(original, cloner) { }
86
87    public override IDeepCloneable Clone(Cloner cloner) {
88      return new DynamicALPSAlgorithm(this, cloner);
89    }
90
91    [StorableConstructor]
92    protected DynamicALPSAlgorithm(StorableConstructorFlag deserializing) : base(deserializing) { }
93
94    protected override void Run(CancellationToken cancellationToken) {
95      if (previousExecutionState != ExecutionState.Paused) { // Call "base" class, DynamicALPSAlgorithmBase
96        InitializeAlgorithm(cancellationToken);
97      }
98
99
100      var populationSize = PopulationSize.Value;
101      bool[] maximization = ((BoolArray)Problem.MaximizationParameter.ActualValue).CloneAsArray();
102
103      var crossover = Crossover;
104      var crossoverProbability = CrossoverProbability.Value;
105      var mutator = Mutator;
106      var mutationProbability = MutationProbability.Value;
107      var evaluator = Problem.Evaluator;
108      var analyzer = Analyzer;
109      var rand = RandomParameter.Value;
110
111
112      var maximumEvaluatedSolutions = MaximumEvaluatedSolutions.Value;
113
114
115      int lambda = 1;            // the size of offspring
116
117
118      int nLayerALPS = ALPSLayers.Value;
119      int counterLayerALPS = 0;
120
121      // IMPROVE: ........
122      //                              1   2   4   8   16  32  64  128 256 512
123      int[] ageGapArray = new int[] { 20, 40, 80, 160, 320, 640, 1280, 2560, 5120, 10240 };
124      int[] numberDiscard = new int[10];
125
126
127
128      activeLayer = new bool[nLayerALPS];
129      layerCrossoverProbability = new double[nLayerALPS];
130      int[][] ageMatrix = new int[nLayerALPS][];      // layer * population size
131
132
133
134      // cancellation token for the inner operations which should not be immediately cancelled
135      var innerToken = new CancellationToken();
136
137
138      // 12022020
139      layerPopulation = new IDynamicALPSSolution[nLayerALPS][];
140      layerOffspringPopulation = new IDynamicALPSSolution[nLayerALPS][];
141      layerJointPopulation = new IDynamicALPSSolution[nLayerALPS][];
142      layerDiscardPopulation = new IDynamicALPSSolution[nLayerALPS][];
143      layerDiscardIndivdual = new IDynamicALPSSolution[nLayerALPS];
144
145
146      layerPopulation[counterLayerALPS] = new IDynamicALPSSolution[populationSize];
147      // BUG: The size of offspring should vary in different layers!!!!
148      layerOffspringPopulation[counterLayerALPS] = new IDynamicALPSSolution[lambda];
149      // layerDiscardPopulation[counterLayerALPS] = new IDynamicALPSSolution[populationSize];  // for the previous version, is used to store the individuals whose age is older than the age gap
150      layerDiscardPopulation[counterLayerALPS] = new IDynamicALPSSolution[populationSize];
151      population.CopyTo(layerPopulation[counterLayerALPS], 0);
152
153      activeLayer[counterLayerALPS] = true;
154      layerCrossoverProbability[counterLayerALPS] = 0;
155      var test = UseAverageAge.Value;
156      for (int i = 0; i < nLayerALPS; i++) {
157        if (i == 0) {
158          activeLayer[i] = true;
159        }
160        else { activeLayer[i] = false; }
161        numberDiscard[i] = 0;
162        layerCrossoverProbability[i] = CrossoverProbability.Value;
163      }
164      int bottomLayerIDX = 0;
165      int godScope = 0;
166      // Mainloop
167      while (evaluatedSolutions < maximumEvaluatedSolutions && !cancellationToken.IsCancellationRequested) {
168        for (int i = 0; i < nLayerALPS; i++) {            // loop for every layer
169          int discardedIndividualIndex = 0;
170          var currentLayerIDX = i;
171          var nextLayerIDX = i + 1;
172          if (nextLayerIDX == nLayerALPS) {
173            nextLayerIDX = bottomLayerIDX;
174            godScope = 1;
175          }
176          else { godScope = 0; }
177
178
179          if (activeLayer[currentLayerIDX] == true) {                   // check the current layer is active or not.
180            evaluatedSolutions = SMSEMOA(populationSize, lambda, currentLayerIDX);                  // get the offspring -- layerJointPopulation
181            if (evaluatedSolutions >= maximumEvaluatedSolutions) { break; }           // check evaluation budget
182            ageMatrix[currentLayerIDX] = layerJointPopulation[currentLayerIDX].Select(x => x.Age).ToArray();      // get age info of the current layer joint population
183            #region version 1: use average to initialize the layer population
184            if (UseAverageAge.Value == true) {
185              if (activeLayer[nextLayerIDX] == false) {// next layer is not active yet
186                if (ageMatrix[currentLayerIDX].Average() > ageGapArray[currentLayerIDX]) {    // the next layer is initialized
187                  InitializeLayer(nextLayerIDX, populationSize, lambda);  // initilizae the layerPopulation for the next layer && ACTIVE FLAGE
188                  layerJointPopulation[currentLayerIDX].CopyTo(layerJointPopulation[nextLayerIDX], 0);
189                  SmetricSelection(lambda, nextLayerIDX);                //   layerpopulation is updated here,
190                }
191                else {// the next layer is not initialized
192                  SmetricSelection(lambda, currentLayerIDX);
193                }
194              }
195              else { // next layer is active
196                if (activeLayer.All(x => x) && godScope == 1) {  // all the layers are active and the current layer is the top layer, move the discarded individual from the top to bottom, and reset the age
197                  SmetricSelection(lambda, currentLayerIDX);
198                  layerPopulation[bottomLayerIDX].CopyTo(layerJointPopulation[bottomLayerIDX], 0);
199                  layerDiscardIndivdual[currentLayerIDX].Age = 0;
200                  layerJointPopulation[bottomLayerIDX][populationSize] = layerDiscardIndivdual[currentLayerIDX];
201                  SmetricSelection(lambda, bottomLayerIDX);
202                }
203                else {
204                  if (ageMatrix[currentLayerIDX].Max() > ageGapArray[currentLayerIDX]) {      // moving
205                    discardedIndividualIndex = ageMatrix[currentLayerIDX].ToList().IndexOf(ageMatrix[currentLayerIDX].Max());
206                    layerPopulation[nextLayerIDX].CopyTo(layerJointPopulation[nextLayerIDX], 0);
207                    layerJointPopulation[currentLayerIDX].Where((x, idx) => idx == discardedIndividualIndex).ToArray().CopyTo(layerJointPopulation[nextLayerIDX], populationSize);
208                    SmetricSelection(lambda, nextLayerIDX); // AGE and HVC
209                    layerJointPopulation[currentLayerIDX].Where((x, idx) => idx != discardedIndividualIndex).ToArray().CopyTo(layerPopulation[currentLayerIDX], 0);  // dicard the individual in the current layer
210                  }
211                  else {   // next layer is active, but the age is not mature.
212                    SmetricSelection(lambda, currentLayerIDX);
213                  }
214                }
215              }
216              #endregion
217            }
218            else {
219            #region version 2: use individual age to to initialize the next layer
220            if (ageMatrix[currentLayerIDX].Max() > ageGapArray[currentLayerIDX]) {    // mature: moving
221              discardedIndividualIndex = ageMatrix[currentLayerIDX].ToList().IndexOf(ageMatrix[currentLayerIDX].Max());   // BUG when two individual has the same maximal age???? NOT POSSBILE IN SMS-EMOA
222              layerDiscardPopulation[currentLayerIDX][numberDiscard[currentLayerIDX]] = layerJointPopulation[currentLayerIDX][discardedIndividualIndex];                    // move the individual to the next layer
223              layerJointPopulation[currentLayerIDX].Where((x, idx) => idx != discardedIndividualIndex).ToArray().CopyTo(layerPopulation[currentLayerIDX], 0); // discard the indivudal in the current layer 
224              numberDiscard[currentLayerIDX] += 1;
225              if (activeLayer[nextLayerIDX] == false) {   // next layer is not active  // bug, if i == number of layer,  out of range .
226                if (numberDiscard[currentLayerIDX] == populationSize) {
227                  InitializeLayer(nextLayerIDX, populationSize, lambda);  // initilizae the layerPopulation for the next layer && ACTIVE FLAGE
228                  layerDiscardPopulation[currentLayerIDX].CopyTo(layerPopulation[nextLayerIDX], 0);
229                  numberDiscard[currentLayerIDX] = 0;
230                }
231                else {
232                  // number of matured individuals < population size in the next layer
233                }
234              }
235              else {  // next layer is active
236                layerPopulation[nextLayerIDX].CopyTo(layerJointPopulation[nextLayerIDX], 0);
237                layerJointPopulation[currentLayerIDX].Where((x, idx) => idx == discardedIndividualIndex).ToArray().CopyTo(layerJointPopulation[nextLayerIDX], populationSize);
238                SmetricSelection(lambda, nextLayerIDX); // AGE and HVC
239                numberDiscard[currentLayerIDX] = 0;
240              }
241            }
242            layerPopulation[currentLayerIDX].CopyTo(population, 0);  // BUG: should copy all the active layers to population.
243              #endregion
244            }
245          }
246          else {
247            // Some thing wrong? lol nothing wrong here ^_^
248          }
249
250
251        }
252
253
254
255
256        int numberOfActiveLayer = activeLayer.Where(c => c).Count();
257        population = new IDynamicALPSSolution[populationSize * numberOfActiveLayer];
258        for (int i = 0; i < numberOfActiveLayer; i++) {
259          layerPopulation[i].CopyTo(population, i * populationSize);
260        }
261
262
263        // run analyzer
264        var analyze = executionContext.CreateChildOperation(analyzer, globalScope);
265        ExecuteOperation(executionContext, innerToken, analyze);
266        // update Pareto-front approximation sets
267        // UpdateParetoFronts();
268        // Show some results in the GUI.
269        Results.AddOrUpdateResult("IdealPoint", new DoubleArray(IdealPoint));
270        Results.AddOrUpdateResult("NadirPoint", new DoubleArray(NadirPoint));
271        Results.AddOrUpdateResult("Evaluated Solutions", new IntValue(evaluatedSolutions));
272
273
274
275
276
277
278        // see if we already have a result collection for the layer results, and reuse that one
279        ResultCollection allLayerResults;
280        if (Results.TryGetValue("LayerResults", out IResult allLayerResultsResult)) {
281          allLayerResults = (ResultCollection)allLayerResultsResult.Value;
282        }
283        else {
284          allLayerResults = new ResultCollection();
285          Results.AddOrUpdateResult("LayerResults", allLayerResults);
286        }
287
288
289
290
291
292        // run layer analyzers
293        for (int i = 0; i < activeLayer.Length; ++i) {
294          if (!activeLayer[i]) continue;
295          var scope = new Scope($"Layer {i}");
296          var layer = layerPopulation[i];
297          var tmp = UpdateParetoFronts(layer, IdealPoint);
298
299          // update the results in a way that avoids creating a new result collection at each iteration
300          if (allLayerResults.TryGetValue(scope.Name, out IResult lRes)) {
301            var lr = (ResultCollection)lRes.Value;
302            foreach (var result in tmp) {
303              lr.AddOrUpdateResult(result.Name, result.Value);
304            }
305          }
306          else {
307            allLayerResults.AddOrUpdateResult(scope.Name, tmp);
308          }
309
310          var layerResults = (ResultCollection)allLayerResults[scope.Name].Value;
311
312          //var layerQualities = new ItemArray<DoubleArray>(layer.Select(x => new DoubleArray(x.Qualities)));
313          // var layerSolutions = new ItemArray<IItem>(layer.Select(x => (IItem)x.Individual.Clone()));
314
315          // only store the decision vectors
316          var layerSolutions = new ItemArray<IItem>(layer.Select(x => (IItem)((IScope)x.Individual).Variables["RealVector"].Value.Clone()));
317
318          var layerAges = new ItemArray<IntValue>(layer.Select(x => new IntValue(x.Age)));
319          //layerResults.AddOrUpdateResult("Objective values", layerQualities);
320          layerResults.AddOrUpdateResult("Decision vectors", layerSolutions);
321          layerResults.AddOrUpdateResult("Age", layerAges);
322
323          var tableObjectives = new DataTable("Objective values");
324          for (int j = 0; j < IdealPoint.Length; ++j) {
325            var row = new DataRow($"Objective {j + 1}");
326            row.Values.AddRange(layer.Select(x => x.Qualities[j]));
327            tableObjectives.Rows.Add(row);
328          }
329          layerResults.AddOrUpdateResult("Objective values", tableObjectives);
330
331
332          // historical HV
333          DataTable hyperVolumeHistory;
334          if (layerResults.TryGetValue("Layer Hypervolume History", out IResult res)) {
335            hyperVolumeHistory = (DataTable)res.Value;
336          }
337          else {
338            hyperVolumeHistory = new DataTable("Layer Hypervolume History");
339            var hrow = new DataRow($"Layer {i}");
340            hrow.VisualProperties = new DataRowVisualProperties {
341              StartIndexZero = false,
342            };
343            hyperVolumeHistory.Rows.Add(hrow);
344            layerResults.AddOrUpdateResult("Layer Hypervolume History", hyperVolumeHistory);
345          }
346          //var front = layer.Select(x => x.Qualities);
347          var reference = ReferencePoint.ToArray();
348          //var hv = double.MinValue;
349
350          var layerQualities = layer.Select(x => x.Qualities).ToArray();
351          var layerPF = DominationCalculator<IDynamicALPSSolution>.CalculateBestParetoFront(layer, layerQualities, maximization);
352          var nondominatedLayer = NonDominatedSelect.GetDominatingVectors(layerPF.Select(x => x.Item2), reference, maximization, false);
353          var layerHV = nondominatedLayer.Any() ? Hypervolume.Calculate(nondominatedLayer, reference, maximization) : 0;
354          hyperVolumeHistory.Rows[$"Layer {i}"].Values.Add(layerHV);
355
356
357          // historical crossover probability
358          DataTable crossoverProbabilityHistory;
359          if (layerResults.TryGetValue("CrossoverProbability History", out IResult resPm)) {
360            crossoverProbabilityHistory = (DataTable)resPm.Value;
361          }
362          else {
363            crossoverProbabilityHistory = new DataTable("CrossoverProbability History");
364            var hrowPm = new DataRow($"Layer {i}");
365            hrowPm.VisualProperties = new DataRowVisualProperties {
366              StartIndexZero = false,
367            };
368            crossoverProbabilityHistory.Rows.Add(hrowPm);
369            layerResults.AddOrUpdateResult("CrossoverProbability History", crossoverProbabilityHistory);
370          }
371          crossoverProbabilityHistory.Rows[$"Layer {i}"].Values.Add(layerCrossoverProbability[i]);
372
373
374
375          if (i == 1) {
376            DataTable wholeLayerHypervolumeHistrory;
377            var qualities = population.Select(x => x.Qualities).ToArray();
378            var pf = DominationCalculator<IDynamicALPSSolution>.CalculateBestParetoFront(population, qualities, maximization);
379            var nondominatedWhole = NonDominatedSelect.GetDominatingVectors(pf.Select(x => x.Item2), reference, maximization, false);
380            var hvWhole = nondominatedWhole.Any() ? Hypervolume.Calculate(nondominatedWhole, reference, maximization) : 0;
381            if (layerResults.TryGetValue("Hypervolume of the entire layers -- History", out IResult resHVWhole)) {
382              wholeLayerHypervolumeHistrory = (DataTable)resHVWhole.Value;
383            }
384            else {
385              wholeLayerHypervolumeHistrory = new DataTable("Hypervolume of the entire layers -- History");
386              var hrowWhole = new DataRow($"Layer {i}");
387              hrowWhole.VisualProperties = new DataRowVisualProperties {
388                StartIndexZero = false,
389              };
390              wholeLayerHypervolumeHistrory.Rows.Add(hrowWhole);
391              layerResults.AddOrUpdateResult("Hypervolume of the entire layers -- History", wholeLayerHypervolumeHistrory);
392            }
393            wholeLayerHypervolumeHistrory.Rows[$"Layer {i}"].Values.Add(hvWhole);
394          }
395          else {
396          }
397
398        }
399
400        // Update globalScope
401        globalScope.SubScopes.Replace(population.Select(x => (IScope)x.Individual));
402      }
403    }
404  }
405}
Note: See TracBrowser for help on using the repository browser.