1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | * and the BEACON Center for the Study of Evolution in Action.
|
---|
5 | *
|
---|
6 | * This file is part of HeuristicLab.
|
---|
7 | *
|
---|
8 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
9 | * it under the terms of the GNU General Public License as published by
|
---|
10 | * the Free Software Foundation, either version 3 of the License, or
|
---|
11 | * (at your option) any later version.
|
---|
12 | *
|
---|
13 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
14 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
15 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
16 | * GNU General Public License for more details.
|
---|
17 | *
|
---|
18 | * You should have received a copy of the GNU General Public License
|
---|
19 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
20 | */
|
---|
21 | #endregion
|
---|
22 |
|
---|
23 | using System;
|
---|
24 | using System.Collections.Generic;
|
---|
25 | using System.Linq;
|
---|
26 | using System.Threading;
|
---|
27 | using HeuristicLab.Analysis;
|
---|
28 | using HeuristicLab.Common;
|
---|
29 | using HeuristicLab.Core;
|
---|
30 | using HeuristicLab.Data;
|
---|
31 | using HeuristicLab.Encodings.RealVectorEncoding;
|
---|
32 | using HeuristicLab.Optimization;
|
---|
33 | using HeuristicLab.Parameters;
|
---|
34 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
35 | using HeuristicLab.Problems.TestFunctions.MultiObjective;
|
---|
36 | using HeuristicLab.Random;
|
---|
37 |
|
---|
38 | namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy {
|
---|
39 | [Item("Multi-Objective CMA Evolution Strategy (MOCMAES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")]
|
---|
40 | [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)]
|
---|
41 | [StorableClass]
|
---|
42 | public class MOCMAEvolutionStrategy : BasicAlgorithm {
|
---|
43 | public override Type ProblemType {
|
---|
44 | get { return typeof(MultiObjectiveBasicProblem<RealVectorEncoding>); }
|
---|
45 | }
|
---|
46 | public new MultiObjectiveBasicProblem<RealVectorEncoding> Problem {
|
---|
47 | get { return (MultiObjectiveBasicProblem<RealVectorEncoding>)base.Problem; }
|
---|
48 | set { base.Problem = value; }
|
---|
49 | }
|
---|
50 | public override bool SupportsPause {
|
---|
51 | get { return true; }
|
---|
52 | }
|
---|
53 |
|
---|
54 | #region Storable fields
|
---|
55 | [Storable]
|
---|
56 | private IRandom random = new MersenneTwister();
|
---|
57 | [Storable]
|
---|
58 | private NormalDistributedRandom gauss;
|
---|
59 | [Storable]
|
---|
60 | private Individual[] solutions;
|
---|
61 | [Storable]
|
---|
62 | private double stepSizeLearningRate; //=cp learning rate in [0,1]
|
---|
63 | [Storable]
|
---|
64 | private double stepSizeDampeningFactor; //d
|
---|
65 | [Storable]
|
---|
66 | private double targetSuccessProbability;// p^target_succ
|
---|
67 | [Storable]
|
---|
68 | private double evolutionPathLearningRate;//cc
|
---|
69 | [Storable]
|
---|
70 | private double covarianceMatrixLearningRate;//ccov
|
---|
71 | [Storable]
|
---|
72 | private double covarianceMatrixUnlearningRate;
|
---|
73 | [Storable]
|
---|
74 | private double successThreshold; //ptresh
|
---|
75 |
|
---|
76 | #endregion
|
---|
77 |
|
---|
78 | #region ParameterNames
|
---|
79 | private const string MaximumRuntimeName = "Maximum Runtime";
|
---|
80 | private const string SeedName = "Seed";
|
---|
81 | private const string SetSeedRandomlyName = "SetSeedRandomly";
|
---|
82 | private const string PopulationSizeName = "PopulationSize";
|
---|
83 | private const string MaximumGenerationsName = "MaximumGenerations";
|
---|
84 | private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions";
|
---|
85 | private const string InitialSigmaName = "InitialSigma";
|
---|
86 | private const string IndicatorName = "Indicator";
|
---|
87 |
|
---|
88 | private const string EvaluationsResultName = "Evaluations";
|
---|
89 | private const string IterationsResultName = "Generations";
|
---|
90 | private const string TimetableResultName = "Timetable";
|
---|
91 | private const string HypervolumeResultName = "Hypervolume";
|
---|
92 | private const string GenerationalDistanceResultName = "Generational Distance";
|
---|
93 | private const string InvertedGenerationalDistanceResultName = "Inverted Generational Distance";
|
---|
94 | private const string CrowdingResultName = "Crowding";
|
---|
95 | private const string SpacingResultName = "Spacing";
|
---|
96 | private const string CurrentFrontResultName = "Pareto Front";
|
---|
97 | private const string BestHypervolumeResultName = "Best Hypervolume";
|
---|
98 | private const string BestKnownHypervolumeResultName = "Best known hypervolume";
|
---|
99 | private const string DifferenceToBestKnownHypervolumeResultName = "Absolute Distance to BestKnownHypervolume";
|
---|
100 | private const string ScatterPlotResultName = "ScatterPlot";
|
---|
101 | #endregion
|
---|
102 |
|
---|
103 | #region ParameterProperties
|
---|
104 | public IFixedValueParameter<IntValue> MaximumRuntimeParameter {
|
---|
105 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; }
|
---|
106 | }
|
---|
107 | public IFixedValueParameter<IntValue> SeedParameter {
|
---|
108 | get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; }
|
---|
109 | }
|
---|
110 | public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
|
---|
111 | get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; }
|
---|
112 | }
|
---|
113 | public IFixedValueParameter<IntValue> PopulationSizeParameter {
|
---|
114 | get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; }
|
---|
115 | }
|
---|
116 | public IFixedValueParameter<IntValue> MaximumGenerationsParameter {
|
---|
117 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; }
|
---|
118 | }
|
---|
119 | public IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter {
|
---|
120 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; }
|
---|
121 | }
|
---|
122 | public IValueParameter<DoubleArray> InitialSigmaParameter {
|
---|
123 | get { return (IValueParameter<DoubleArray>)Parameters[InitialSigmaName]; }
|
---|
124 | }
|
---|
125 | public IConstrainedValueParameter<IIndicator> IndicatorParameter {
|
---|
126 | get { return (IConstrainedValueParameter<IIndicator>)Parameters[IndicatorName]; }
|
---|
127 | }
|
---|
128 | #endregion
|
---|
129 |
|
---|
130 | #region Properties
|
---|
131 | public int MaximumRuntime {
|
---|
132 | get { return MaximumRuntimeParameter.Value.Value; }
|
---|
133 | set { MaximumRuntimeParameter.Value.Value = value; }
|
---|
134 | }
|
---|
135 | public int Seed {
|
---|
136 | get { return SeedParameter.Value.Value; }
|
---|
137 | set { SeedParameter.Value.Value = value; }
|
---|
138 | }
|
---|
139 | public bool SetSeedRandomly {
|
---|
140 | get { return SetSeedRandomlyParameter.Value.Value; }
|
---|
141 | set { SetSeedRandomlyParameter.Value.Value = value; }
|
---|
142 | }
|
---|
143 | public int PopulationSize {
|
---|
144 | get { return PopulationSizeParameter.Value.Value; }
|
---|
145 | set { PopulationSizeParameter.Value.Value = value; }
|
---|
146 | }
|
---|
147 | public int MaximumGenerations {
|
---|
148 | get { return MaximumGenerationsParameter.Value.Value; }
|
---|
149 | set { MaximumGenerationsParameter.Value.Value = value; }
|
---|
150 | }
|
---|
151 | public int MaximumEvaluatedSolutions {
|
---|
152 | get { return MaximumEvaluatedSolutionsParameter.Value.Value; }
|
---|
153 | set { MaximumEvaluatedSolutionsParameter.Value.Value = value; }
|
---|
154 | }
|
---|
155 | public DoubleArray InitialSigma {
|
---|
156 | get { return InitialSigmaParameter.Value; }
|
---|
157 | set { InitialSigmaParameter.Value = value; }
|
---|
158 | }
|
---|
159 | public IIndicator Indicator {
|
---|
160 | get { return IndicatorParameter.Value; }
|
---|
161 | set { IndicatorParameter.Value = value; }
|
---|
162 | }
|
---|
163 |
|
---|
164 | public double StepSizeLearningRate { get { return stepSizeLearningRate; } }
|
---|
165 | public double StepSizeDampeningFactor { get { return stepSizeDampeningFactor; } }
|
---|
166 | public double TargetSuccessProbability { get { return targetSuccessProbability; } }
|
---|
167 | public double EvolutionPathLearningRate { get { return evolutionPathLearningRate; } }
|
---|
168 | public double CovarianceMatrixLearningRate { get { return covarianceMatrixLearningRate; } }
|
---|
169 | public double CovarianceMatrixUnlearningRate { get { return covarianceMatrixUnlearningRate; } }
|
---|
170 | public double SuccessThreshold { get { return successThreshold; } }
|
---|
171 | #endregion
|
---|
172 |
|
---|
173 | #region ResultsProperties
|
---|
174 | private int ResultsEvaluations {
|
---|
175 | get { return ((IntValue)Results[EvaluationsResultName].Value).Value; }
|
---|
176 | set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; }
|
---|
177 | }
|
---|
178 | private int ResultsIterations {
|
---|
179 | get { return ((IntValue)Results[IterationsResultName].Value).Value; }
|
---|
180 | set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
|
---|
181 | }
|
---|
182 | #region Datatable
|
---|
183 | private DataTable ResultsQualities {
|
---|
184 | get { return (DataTable)Results[TimetableResultName].Value; }
|
---|
185 | }
|
---|
186 | private DataRow ResultsBestHypervolumeDataLine {
|
---|
187 | get { return ResultsQualities.Rows[BestHypervolumeResultName]; }
|
---|
188 | }
|
---|
189 | private DataRow ResultsHypervolumeDataLine {
|
---|
190 | get { return ResultsQualities.Rows[HypervolumeResultName]; }
|
---|
191 | }
|
---|
192 | private DataRow ResultsGenerationalDistanceDataLine {
|
---|
193 | get { return ResultsQualities.Rows[GenerationalDistanceResultName]; }
|
---|
194 | }
|
---|
195 | private DataRow ResultsInvertedGenerationalDistanceDataLine {
|
---|
196 | get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; }
|
---|
197 | }
|
---|
198 | private DataRow ResultsCrowdingDataLine {
|
---|
199 | get { return ResultsQualities.Rows[CrowdingResultName]; }
|
---|
200 | }
|
---|
201 | private DataRow ResultsSpacingDataLine {
|
---|
202 | get { return ResultsQualities.Rows[SpacingResultName]; }
|
---|
203 | }
|
---|
204 | private DataRow ResultsHypervolumeDifferenceDataLine {
|
---|
205 | get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; }
|
---|
206 | }
|
---|
207 | #endregion
|
---|
208 | //QualityIndicators
|
---|
209 | private double ResultsHypervolume {
|
---|
210 | get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; }
|
---|
211 | set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; }
|
---|
212 | }
|
---|
213 | private double ResultsGenerationalDistance {
|
---|
214 | get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; }
|
---|
215 | set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; }
|
---|
216 | }
|
---|
217 | private double ResultsInvertedGenerationalDistance {
|
---|
218 | get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; }
|
---|
219 | set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; }
|
---|
220 | }
|
---|
221 | private double ResultsCrowding {
|
---|
222 | get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; }
|
---|
223 | set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; }
|
---|
224 | }
|
---|
225 | private double ResultsSpacing {
|
---|
226 | get { return ((DoubleValue)Results[SpacingResultName].Value).Value; }
|
---|
227 | set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; }
|
---|
228 | }
|
---|
229 | private double ResultsBestHypervolume {
|
---|
230 | get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; }
|
---|
231 | set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; }
|
---|
232 | }
|
---|
233 | private double ResultsBestKnownHypervolume {
|
---|
234 | get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; }
|
---|
235 | set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; }
|
---|
236 | }
|
---|
237 | private double ResultsDifferenceBestKnownHypervolume {
|
---|
238 | get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; }
|
---|
239 | set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; }
|
---|
240 |
|
---|
241 | }
|
---|
242 | //Solutions
|
---|
243 | private DoubleMatrix ResultsSolutions {
|
---|
244 | get { return (DoubleMatrix)Results[CurrentFrontResultName].Value; }
|
---|
245 | set { Results[CurrentFrontResultName].Value = value; }
|
---|
246 | }
|
---|
247 | private ParetoFrontScatterPlot ResultsScatterPlot {
|
---|
248 | get { return (ParetoFrontScatterPlot)Results[ScatterPlotResultName].Value; }
|
---|
249 | set { Results[ScatterPlotResultName].Value = value; }
|
---|
250 | }
|
---|
251 | #endregion
|
---|
252 |
|
---|
253 | #region Constructors
|
---|
254 | public MOCMAEvolutionStrategy() {
|
---|
255 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600)));
|
---|
256 | Parameters.Add(new FixedValueParameter<IntValue>(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
|
---|
257 | Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
|
---|
258 | Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20)));
|
---|
259 | Parameters.Add(new ValueParameter<DoubleArray>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleArray(new[] { 0.5 })));
|
---|
260 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000)));
|
---|
261 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue)));
|
---|
262 | var set = new ItemSet<IIndicator> { new HypervolumeIndicator(), new CrowdingIndicator(), new MinimalDistanceIndicator() };
|
---|
263 | Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, set.First()));
|
---|
264 | }
|
---|
265 |
|
---|
266 | [StorableConstructor]
|
---|
267 | protected MOCMAEvolutionStrategy(bool deserializing) : base(deserializing) { }
|
---|
268 |
|
---|
269 | protected MOCMAEvolutionStrategy(MOCMAEvolutionStrategy original, Cloner cloner) : base(original, cloner) {
|
---|
270 | random = cloner.Clone(original.random);
|
---|
271 | gauss = cloner.Clone(original.gauss);
|
---|
272 | solutions = original.solutions != null ? original.solutions.Select(cloner.Clone).ToArray() : null;
|
---|
273 | stepSizeLearningRate = original.stepSizeLearningRate;
|
---|
274 | stepSizeDampeningFactor = original.stepSizeDampeningFactor;
|
---|
275 | targetSuccessProbability = original.targetSuccessProbability;
|
---|
276 | evolutionPathLearningRate = original.evolutionPathLearningRate;
|
---|
277 | covarianceMatrixLearningRate = original.covarianceMatrixLearningRate;
|
---|
278 | covarianceMatrixUnlearningRate = original.covarianceMatrixUnlearningRate;
|
---|
279 | successThreshold = original.successThreshold;
|
---|
280 | }
|
---|
281 |
|
---|
282 | public override IDeepCloneable Clone(Cloner cloner) { return new MOCMAEvolutionStrategy(this, cloner); }
|
---|
283 | #endregion
|
---|
284 |
|
---|
285 | #region Initialization
|
---|
286 | protected override void Initialize(CancellationToken cancellationToken) {
|
---|
287 | if (SetSeedRandomly) Seed = RandomSeedGenerator.GetSeed();
|
---|
288 | random.Reset(Seed);
|
---|
289 | gauss = new NormalDistributedRandom(random, 0, 1);
|
---|
290 |
|
---|
291 | InitResults();
|
---|
292 | InitStrategy();
|
---|
293 | InitSolutions();
|
---|
294 | Analyze();
|
---|
295 |
|
---|
296 | ResultsIterations = 1;
|
---|
297 | }
|
---|
298 | private Individual InitializeIndividual(RealVector x) {
|
---|
299 | var zeros = new RealVector(x.Length);
|
---|
300 | var c = new double[x.Length, x.Length];
|
---|
301 | var sigma = InitialSigma.Max();
|
---|
302 | for (var i = 0; i < x.Length; i++) {
|
---|
303 | var d = InitialSigma[i % InitialSigma.Length] / sigma;
|
---|
304 | c[i, i] = d * d;
|
---|
305 | }
|
---|
306 | return new Individual(x, targetSuccessProbability, sigma, zeros, c, this);
|
---|
307 | }
|
---|
308 | private void InitSolutions() {
|
---|
309 | solutions = new Individual[PopulationSize];
|
---|
310 | for (var i = 0; i < PopulationSize; i++) {
|
---|
311 | var x = new RealVector(Problem.Encoding.Length); // Uniform distibution in all dimensions assumed.
|
---|
312 | var bounds = Problem.Encoding.Bounds;
|
---|
313 | for (var j = 0; j < Problem.Encoding.Length; j++) {
|
---|
314 | var dim = j % bounds.Rows;
|
---|
315 | x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0];
|
---|
316 | }
|
---|
317 | solutions[i] = InitializeIndividual(x);
|
---|
318 | PenalizeEvaluate(solutions[i]);
|
---|
319 | }
|
---|
320 | ResultsEvaluations += solutions.Length;
|
---|
321 | }
|
---|
322 | private void InitStrategy() {
|
---|
323 | const int lambda = 1;
|
---|
324 | double n = Problem.Encoding.Length;
|
---|
325 | targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);
|
---|
326 | stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);
|
---|
327 | stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);
|
---|
328 | evolutionPathLearningRate = 2.0 / (n + 2.0);
|
---|
329 | covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);
|
---|
330 | covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);
|
---|
331 | successThreshold = 0.44;
|
---|
332 | }
|
---|
333 | private void InitResults() {
|
---|
334 | Results.Add(new Result(IterationsResultName, "The number of gererations evaluated", new IntValue(0)));
|
---|
335 | Results.Add(new Result(EvaluationsResultName, "The number of function evaltions performed", new IntValue(0)));
|
---|
336 | Results.Add(new Result(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
|
---|
337 | Results.Add(new Result(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
|
---|
338 | Results.Add(new Result(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(double.NaN)));
|
---|
339 | Results.Add(new Result(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(double.NaN)));
|
---|
340 | Results.Add(new Result(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
|
---|
341 | Results.Add(new Result(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
|
---|
342 | Results.Add(new Result(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0)));
|
---|
343 | Results.Add(new Result(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0)));
|
---|
344 |
|
---|
345 | var table = new DataTable("QualityIndicators");
|
---|
346 | table.Rows.Add(new DataRow(BestHypervolumeResultName));
|
---|
347 | table.Rows.Add(new DataRow(HypervolumeResultName));
|
---|
348 | table.Rows.Add(new DataRow(CrowdingResultName));
|
---|
349 | table.Rows.Add(new DataRow(GenerationalDistanceResultName));
|
---|
350 | table.Rows.Add(new DataRow(InvertedGenerationalDistanceResultName));
|
---|
351 | table.Rows.Add(new DataRow(DifferenceToBestKnownHypervolumeResultName));
|
---|
352 | table.Rows.Add(new DataRow(SpacingResultName));
|
---|
353 | Results.Add(new Result(TimetableResultName, "Different quality meassures in a timeseries", table));
|
---|
354 | Results.Add(new Result(CurrentFrontResultName, "The current front", new DoubleMatrix()));
|
---|
355 | Results.Add(new Result(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ParetoFrontScatterPlot()));
|
---|
356 |
|
---|
357 | var problem = Problem as MultiObjectiveTestFunctionProblem;
|
---|
358 | if (problem == null) return;
|
---|
359 | if (problem.BestKnownFront != null) {
|
---|
360 | ResultsBestKnownHypervolume = Hypervolume.Calculate(problem.BestKnownFront.ToJaggedArray(), problem.TestFunction.ReferencePoint(problem.Objectives), Problem.Maximization);
|
---|
361 | ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume;
|
---|
362 | }
|
---|
363 | ResultsScatterPlot = new ParetoFrontScatterPlot(new double[0][], new double[0][], problem.BestKnownFront.ToJaggedArray(), problem.Objectives, problem.ProblemSize);
|
---|
364 | }
|
---|
365 | #endregion
|
---|
366 |
|
---|
367 | #region Mainloop
|
---|
368 | protected override void Run(CancellationToken cancellationToken) {
|
---|
369 | while (ResultsIterations < MaximumGenerations && ResultsEvaluations < MaximumEvaluatedSolutions) {
|
---|
370 | try {
|
---|
371 | Iterate();
|
---|
372 | ResultsIterations++;
|
---|
373 | cancellationToken.ThrowIfCancellationRequested();
|
---|
374 | } finally {
|
---|
375 | Analyze();
|
---|
376 | }
|
---|
377 | }
|
---|
378 | }
|
---|
379 | private void Iterate() {
|
---|
380 | var offspring = solutions.Select(i => {
|
---|
381 | var o = new Individual(i);
|
---|
382 | o.Mutate(gauss);
|
---|
383 | PenalizeEvaluate(o);
|
---|
384 | return o;
|
---|
385 | });
|
---|
386 | ResultsEvaluations += solutions.Length;
|
---|
387 | var parents = solutions.Concat(offspring).ToArray();
|
---|
388 | SelectParents(parents, solutions.Length);
|
---|
389 | UpdatePopulation(parents);
|
---|
390 | }
|
---|
391 | protected override void OnExecutionTimeChanged() {
|
---|
392 | base.OnExecutionTimeChanged();
|
---|
393 | if (CancellationTokenSource == null) return;
|
---|
394 | if (MaximumRuntime == -1) return;
|
---|
395 | if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
|
---|
396 | }
|
---|
397 | #endregion
|
---|
398 |
|
---|
399 | #region Evaluation
|
---|
400 | private void PenalizeEvaluate(Individual individual) {
|
---|
401 | if (IsFeasable(individual.Mean)) {
|
---|
402 | individual.Fitness = Evaluate(individual.Mean);
|
---|
403 | individual.PenalizedFitness = individual.Fitness;
|
---|
404 | } else {
|
---|
405 | var t = ClosestFeasible(individual.Mean);
|
---|
406 | individual.Fitness = Evaluate(t);
|
---|
407 | individual.PenalizedFitness = Penalize(individual.Mean, t, individual.Fitness);
|
---|
408 | }
|
---|
409 | }
|
---|
410 | private double[] Evaluate(RealVector x) {
|
---|
411 | var res = Problem.Evaluate(new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x) } }), random);
|
---|
412 | return res;
|
---|
413 | }
|
---|
414 | private double[] Penalize(RealVector x, RealVector t, IEnumerable<double> fitness) {
|
---|
415 | var penalty = x.Zip(t, (a, b) => (a - b) * (a - b)).Sum() * 1E-6;
|
---|
416 | return fitness.Select((v, i) => Problem.Maximization[i] ? v - penalty : v + penalty).ToArray();
|
---|
417 | }
|
---|
418 | private RealVector ClosestFeasible(RealVector x) {
|
---|
419 | var bounds = Problem.Encoding.Bounds;
|
---|
420 | var r = new RealVector(x.Length);
|
---|
421 | for (var i = 0; i < x.Length; i++) {
|
---|
422 | var dim = i % bounds.Rows;
|
---|
423 | r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]);
|
---|
424 | }
|
---|
425 | return r;
|
---|
426 | }
|
---|
427 | private bool IsFeasable(RealVector offspring) {
|
---|
428 | var bounds = Problem.Encoding.Bounds;
|
---|
429 | for (var i = 0; i < offspring.Length; i++) {
|
---|
430 | var dim = i % bounds.Rows;
|
---|
431 | if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false;
|
---|
432 | }
|
---|
433 | return true;
|
---|
434 | }
|
---|
435 | #endregion
|
---|
436 |
|
---|
437 | private void SelectParents(IReadOnlyList<Individual> parents, int length) {
|
---|
438 | //perform a nondominated sort to assign the rank to every element
|
---|
439 | int[] ranks;
|
---|
440 | var fronts = DominationCalculator<Individual>.CalculateAllParetoFronts(parents.ToArray(), parents.Select(i => i.PenalizedFitness).ToArray(), Problem.Maximization, out ranks);
|
---|
441 |
|
---|
442 | //deselect the highest rank fronts until we would end up with less or equal mu elements
|
---|
443 | var rank = fronts.Count - 1;
|
---|
444 | var popSize = parents.Count;
|
---|
445 | while (popSize - fronts[rank].Count >= length) {
|
---|
446 | var front = fronts[rank];
|
---|
447 | foreach (var i in front) i.Item1.Selected = false;
|
---|
448 | popSize -= front.Count;
|
---|
449 | rank--;
|
---|
450 | }
|
---|
451 |
|
---|
452 | //now use the indicator to deselect the approximatingly worst elements of the last selected front
|
---|
453 | var front1 = fronts[rank].OrderBy(x => x.Item1.PenalizedFitness[0]).ToList();
|
---|
454 | for (; popSize > length; popSize--) {
|
---|
455 | var lc = Indicator.LeastContributer(front1.Select(i => i.Item1).ToArray(), Problem);
|
---|
456 | front1[lc].Item1.Selected = false;
|
---|
457 | front1.Swap(lc, front1.Count - 1);
|
---|
458 | front1.RemoveAt(front1.Count - 1);
|
---|
459 | }
|
---|
460 | }
|
---|
461 |
|
---|
462 | private void UpdatePopulation(IReadOnlyList<Individual> parents) {
|
---|
463 | foreach (var p in parents.Skip(solutions.Length).Where(i => i.Selected))
|
---|
464 | p.UpdateAsOffspring();
|
---|
465 | for (var i = 0; i < solutions.Length; i++)
|
---|
466 | if (parents[i].Selected)
|
---|
467 | parents[i].UpdateAsParent(parents[i + solutions.Length].Selected);
|
---|
468 | solutions = parents.Where(p => p.Selected).ToArray();
|
---|
469 | }
|
---|
470 |
|
---|
471 | private void Analyze() {
|
---|
472 | ResultsScatterPlot = new ParetoFrontScatterPlot(solutions.Select(x => x.Fitness).ToArray(), solutions.Select(x => x.Mean.ToArray()).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives, ResultsScatterPlot.ProblemSize);
|
---|
473 | ResultsSolutions = solutions.Select(x => x.Mean.ToArray()).ToMatrix();
|
---|
474 |
|
---|
475 | var problem = Problem as MultiObjectiveTestFunctionProblem;
|
---|
476 | if (problem == null) return;
|
---|
477 |
|
---|
478 | var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.Fitness), problem.ReferencePoint.CloneAsArray(), Problem.Maximization, true).ToArray();
|
---|
479 | if (front.Length == 0) return;
|
---|
480 | var bounds = problem.Bounds.CloneAsMatrix();
|
---|
481 | ResultsCrowding = Crowding.Calculate(front, bounds);
|
---|
482 | ResultsSpacing = Spacing.Calculate(front);
|
---|
483 | ResultsGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
|
---|
484 | ResultsInvertedGenerationalDistance = problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
|
---|
485 | ResultsHypervolume = Hypervolume.Calculate(front, problem.ReferencePoint.CloneAsArray(), Problem.Maximization);
|
---|
486 | ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume);
|
---|
487 | ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume;
|
---|
488 |
|
---|
489 | ResultsBestHypervolumeDataLine.Values.Add(ResultsBestHypervolume);
|
---|
490 | ResultsHypervolumeDataLine.Values.Add(ResultsHypervolume);
|
---|
491 | ResultsCrowdingDataLine.Values.Add(ResultsCrowding);
|
---|
492 | ResultsGenerationalDistanceDataLine.Values.Add(ResultsGenerationalDistance);
|
---|
493 | ResultsInvertedGenerationalDistanceDataLine.Values.Add(ResultsInvertedGenerationalDistance);
|
---|
494 | ResultsSpacingDataLine.Values.Add(ResultsSpacing);
|
---|
495 | ResultsHypervolumeDifferenceDataLine.Values.Add(ResultsDifferenceBestKnownHypervolume);
|
---|
496 |
|
---|
497 | Problem.Analyze(
|
---|
498 | solutions.Select(x => (Optimization.Individual)new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x.Mean) } })).ToArray(),
|
---|
499 | solutions.Select(x => x.Fitness).ToArray(),
|
---|
500 | Results,
|
---|
501 | random);
|
---|
502 | }
|
---|
503 | }
|
---|
504 | }
|
---|