1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | * and the BEACON Center for the Study of Evolution in Action.
|
---|
5 | *
|
---|
6 | * This file is part of HeuristicLab.
|
---|
7 | *
|
---|
8 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
9 | * it under the terms of the GNU General Public License as published by
|
---|
10 | * the Free Software Foundation, either version 3 of the License, or
|
---|
11 | * (at your option) any later version.
|
---|
12 | *
|
---|
13 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
14 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
15 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
16 | * GNU General Public License for more details.
|
---|
17 | *
|
---|
18 | * You should have received a copy of the GNU General Public License
|
---|
19 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
20 | */
|
---|
21 | #endregion
|
---|
22 |
|
---|
23 | using System;
|
---|
24 | using System.Collections.Generic;
|
---|
25 | using System.Threading;
|
---|
26 | using HeuristicLab.Analysis;
|
---|
27 | using HeuristicLab.Common;
|
---|
28 | using HeuristicLab.Core;
|
---|
29 | using HeuristicLab.Data;
|
---|
30 | using HeuristicLab.Encodings.RealVectorEncoding;
|
---|
31 | using HeuristicLab.Optimization;
|
---|
32 | using HeuristicLab.Parameters;
|
---|
33 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
34 | using HeuristicLab.Random;
|
---|
35 | using HeuristicLab.Problems.MultiObjectiveTestFunctions;
|
---|
36 | using System.Linq;
|
---|
37 |
|
---|
38 | namespace HeuristicLab.Algorithms.CMAEvolutionStrategy
|
---|
39 | {
|
---|
40 | [Item("MOCMAS Evolution Strategy (MOCMASES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")]
|
---|
41 | [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)]
|
---|
42 | [StorableClass]
|
---|
43 | public class MOCMASEvolutionStrategy : BasicAlgorithm
|
---|
44 | {
|
---|
45 | public override Type ProblemType
|
---|
46 | {
|
---|
47 | get { return typeof(MultiObjectiveTestFunctionProblem); }
|
---|
48 | }
|
---|
49 | public new MultiObjectiveTestFunctionProblem Problem
|
---|
50 | {
|
---|
51 | get { return (MultiObjectiveTestFunctionProblem)base.Problem; }
|
---|
52 | set { base.Problem = value; }
|
---|
53 | }
|
---|
54 |
|
---|
55 | private readonly IRandom random = new MersenneTwister();
|
---|
56 |
|
---|
57 |
|
---|
58 | #region ParameterNames
|
---|
59 | private const string MaximumRuntimeName = "Maximum Runtime";
|
---|
60 | private const string SeedName = "Seed";
|
---|
61 | private const string SetSeedRandomlyName = "SetSeedRandomly";
|
---|
62 | private const string PopulationSizeName = "PopulationSize";
|
---|
63 | private const string InitialIterationsName = "InitialIterations";
|
---|
64 | private const string InitialSigmaName = "InitialSigma";
|
---|
65 | private const string MuName = "Mu";
|
---|
66 | private const string CMAInitializerName = "CMAInitializer";
|
---|
67 | private const string CMAMutatorName = "CMAMutator";
|
---|
68 | private const string CMARecombinatorName = "CMARecombinator";
|
---|
69 | private const string CMAUpdaterName = "CMAUpdater";
|
---|
70 | private const string AnalyzerName = "Analyzer";
|
---|
71 | private const string MaximumGenerationsName = "MaximumGenerations";
|
---|
72 | private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions";
|
---|
73 | private const string TargetQualityName = "TargetQuality";
|
---|
74 | private const string MinimumQualityChangeName = "MinimumQualityChange";
|
---|
75 | private const string MinimumQualityHistoryChangeName = "MinimumQualityHistoryChange";
|
---|
76 | private const string MinimumStandardDeviationName = "MinimumStandardDeviation";
|
---|
77 | private const string MaximumStandardDeviationChangeName = "MaximumStandardDeviationChange";
|
---|
78 | #endregion
|
---|
79 |
|
---|
80 | #region ParameterProperties
|
---|
81 | public IFixedValueParameter<IntValue> MaximumRuntimeParameter
|
---|
82 | {
|
---|
83 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; }
|
---|
84 | }
|
---|
85 | public IFixedValueParameter<IntValue> SeedParameter
|
---|
86 | {
|
---|
87 | get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; }
|
---|
88 | }
|
---|
89 | public FixedValueParameter<BoolValue> SetSeedRandomlyParameter
|
---|
90 | {
|
---|
91 | get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; }
|
---|
92 | }
|
---|
93 | private IFixedValueParameter<IntValue> PopulationSizeParameter
|
---|
94 | {
|
---|
95 | get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; }
|
---|
96 | }
|
---|
97 | public IValueParameter<MultiAnalyzer> AnalyzerParameter
|
---|
98 | {
|
---|
99 | get { return (IValueParameter<MultiAnalyzer>)Parameters[AnalyzerName]; }
|
---|
100 | }
|
---|
101 | private IFixedValueParameter<IntValue> InitialIterationsParameter
|
---|
102 | {
|
---|
103 | get { return (IFixedValueParameter<IntValue>)Parameters[InitialIterationsName]; }
|
---|
104 | }
|
---|
105 | public IValueParameter<DoubleValue> InitialSigmaParameter
|
---|
106 | {
|
---|
107 | get { return (IValueParameter<DoubleValue>)Parameters[InitialSigmaName]; }
|
---|
108 | }
|
---|
109 | private OptionalValueParameter<IntValue> MuParameter
|
---|
110 | {
|
---|
111 | get { return (OptionalValueParameter<IntValue>)Parameters[MuName]; }
|
---|
112 | }
|
---|
113 | private IFixedValueParameter<IntValue> MaximumGenerationsParameter
|
---|
114 | {
|
---|
115 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; }
|
---|
116 | }
|
---|
117 | private IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter
|
---|
118 | {
|
---|
119 | get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; }
|
---|
120 | }
|
---|
121 | private IFixedValueParameter<DoubleValue> TargetQualityParameter
|
---|
122 | {
|
---|
123 | get { return (IFixedValueParameter<DoubleValue>)Parameters[TargetQualityName]; }
|
---|
124 | }
|
---|
125 | private IFixedValueParameter<DoubleValue> MinimumQualityChangeParameter
|
---|
126 | {
|
---|
127 | get { return (IFixedValueParameter<DoubleValue>)Parameters[MinimumQualityChangeName]; }
|
---|
128 | }
|
---|
129 | private IFixedValueParameter<DoubleValue> MinimumQualityHistoryChangeParameter
|
---|
130 | {
|
---|
131 | get { return (IFixedValueParameter<DoubleValue>)Parameters[MinimumQualityHistoryChangeName]; }
|
---|
132 | }
|
---|
133 | private IFixedValueParameter<DoubleValue> MinimumStandardDeviationParameter
|
---|
134 | {
|
---|
135 | get { return (IFixedValueParameter<DoubleValue>)Parameters[MinimumStandardDeviationName]; }
|
---|
136 | }
|
---|
137 | private IFixedValueParameter<DoubleValue> MaximumStandardDeviationChangeParameter
|
---|
138 | {
|
---|
139 | get { return (IFixedValueParameter<DoubleValue>)Parameters[MaximumStandardDeviationChangeName]; }
|
---|
140 | }
|
---|
141 | #endregion
|
---|
142 |
|
---|
143 | #region Properties
|
---|
144 | public int MaximumRuntime
|
---|
145 | {
|
---|
146 | get { return MaximumRuntimeParameter.Value.Value; }
|
---|
147 | set { MaximumRuntimeParameter.Value.Value = value; }
|
---|
148 | }
|
---|
149 | public int Seed
|
---|
150 | {
|
---|
151 | get { return SeedParameter.Value.Value; }
|
---|
152 | set { SeedParameter.Value.Value = value; }
|
---|
153 | }
|
---|
154 | public bool SetSeedRandomly
|
---|
155 | {
|
---|
156 | get { return SetSeedRandomlyParameter.Value.Value; }
|
---|
157 | set { SetSeedRandomlyParameter.Value.Value = value; }
|
---|
158 | }
|
---|
159 | public int PopulationSize
|
---|
160 | {
|
---|
161 | get { return PopulationSizeParameter.Value.Value; }
|
---|
162 | set { PopulationSizeParameter.Value.Value = value; }
|
---|
163 | }
|
---|
164 | public int InitialIterations
|
---|
165 | {
|
---|
166 | get { return InitialIterationsParameter.Value.Value; }
|
---|
167 | set { InitialIterationsParameter.Value.Value = value; }
|
---|
168 | }
|
---|
169 | public int MaximumGenerations
|
---|
170 | {
|
---|
171 | get { return MaximumGenerationsParameter.Value.Value; }
|
---|
172 | set { MaximumGenerationsParameter.Value.Value = value; }
|
---|
173 | }
|
---|
174 | public int MaximumEvaluatedSolutions
|
---|
175 | {
|
---|
176 | get { return MaximumEvaluatedSolutionsParameter.Value.Value; }
|
---|
177 | set { MaximumEvaluatedSolutionsParameter.Value.Value = value; }
|
---|
178 | }
|
---|
179 | public double TargetQuality
|
---|
180 | {
|
---|
181 | get { return TargetQualityParameter.Value.Value; }
|
---|
182 | set { TargetQualityParameter.Value.Value = value; }
|
---|
183 | }
|
---|
184 | public double MinimumQualityChange
|
---|
185 | {
|
---|
186 | get { return MinimumQualityChangeParameter.Value.Value; }
|
---|
187 | set { MinimumQualityChangeParameter.Value.Value = value; }
|
---|
188 | }
|
---|
189 | public double MinimumQualityHistoryChange
|
---|
190 | {
|
---|
191 | get { return MinimumQualityHistoryChangeParameter.Value.Value; }
|
---|
192 | set { MinimumQualityHistoryChangeParameter.Value.Value = value; }
|
---|
193 | }
|
---|
194 | public double MinimumStandardDeviation
|
---|
195 | {
|
---|
196 | get { return MinimumStandardDeviationParameter.Value.Value; }
|
---|
197 | set { MinimumStandardDeviationParameter.Value.Value = value; }
|
---|
198 | }
|
---|
199 | public double MaximumStandardDeviationChange
|
---|
200 | {
|
---|
201 | get { return MaximumStandardDeviationChangeParameter.Value.Value; }
|
---|
202 | set { MaximumStandardDeviationChangeParameter.Value.Value = value; }
|
---|
203 | }
|
---|
204 | public DoubleValue InitialSigma
|
---|
205 | {
|
---|
206 | get { return InitialSigmaParameter.Value; }
|
---|
207 | set { InitialSigmaParameter.Value = value; }
|
---|
208 | }
|
---|
209 | public IntValue Mu
|
---|
210 | {
|
---|
211 | get { return MuParameter.Value; }
|
---|
212 | set { MuParameter.Value = value; }
|
---|
213 | }
|
---|
214 |
|
---|
215 | #endregion
|
---|
216 |
|
---|
217 | #region ResultsProperties
|
---|
218 | private int ResultsEvaluations
|
---|
219 | {
|
---|
220 | get { return ((IntValue)Results["Evaluations"].Value).Value; }
|
---|
221 | set { ((IntValue)Results["Evaluations"].Value).Value = value; }
|
---|
222 | }
|
---|
223 | private int ResultsIterations
|
---|
224 | {
|
---|
225 | get { return ((IntValue)Results["Iterations"].Value).Value; }
|
---|
226 | set { ((IntValue)Results["Iterations"].Value).Value = value; }
|
---|
227 | }
|
---|
228 |
|
---|
229 | //Datatable
|
---|
230 | private DataTable ResultsQualities
|
---|
231 | {
|
---|
232 | get { return ((DataTable)Results["Timetable"].Value); }
|
---|
233 | }
|
---|
234 | private DataRow ResultsHypervolumeBest
|
---|
235 | {
|
---|
236 | get { return ResultsQualities.Rows["Best Hypervolume"]; }
|
---|
237 | }
|
---|
238 | private DataRow ResultsHypervolumeIteration
|
---|
239 | {
|
---|
240 | get { return ResultsQualities.Rows["Iteration Hypervolume"]; }
|
---|
241 | }
|
---|
242 | private DataRow ResultsGenerationalDistanceIteration
|
---|
243 | {
|
---|
244 | get { return ResultsQualities.Rows["Iteration Generational Distance"]; }
|
---|
245 | }
|
---|
246 | private DataRow ResultsInvertedGenerationalDistanceIteration
|
---|
247 | {
|
---|
248 | get { return ResultsQualities.Rows["Iteration Inverted Generational Distance"]; }
|
---|
249 | }
|
---|
250 | private DataRow ResultsCrowdingIteration
|
---|
251 | {
|
---|
252 | get { return ResultsQualities.Rows["Iteration Crowding"]; }
|
---|
253 | }
|
---|
254 |
|
---|
255 |
|
---|
256 | //QualityIndicators
|
---|
257 | private double ResultsHypervolume
|
---|
258 | {
|
---|
259 | get { return ((DoubleValue)Results["Hypervolume"].Value).Value; }
|
---|
260 | set { ((DoubleValue)Results["Hypervolume"].Value).Value = value; }
|
---|
261 | }
|
---|
262 | private double ResultsGenerationalDistance
|
---|
263 | {
|
---|
264 | get { return ((DoubleValue)Results["Generational Distance"].Value).Value; }
|
---|
265 | set { ((DoubleValue)Results["Generational Distance"].Value).Value = value; }
|
---|
266 | }
|
---|
267 | private double ResultsInvertedGenerationalDistance
|
---|
268 | {
|
---|
269 | get { return ((DoubleValue)Results["Inverted Generational Distance"].Value).Value; }
|
---|
270 | set { ((DoubleValue)Results["Inverted Generational Distance"].Value).Value = value; }
|
---|
271 | }
|
---|
272 | private double ResultsCrowding
|
---|
273 | {
|
---|
274 | get { return ((DoubleValue)Results["Crowding"].Value).Value; }
|
---|
275 | set { ((DoubleValue)Results["Crowding"].Value).Value = value; }
|
---|
276 | }
|
---|
277 | private double ResultsBestHypervolume
|
---|
278 | {
|
---|
279 | get { return ((DoubleValue)Results["Best Hypervolume"].Value).Value; }
|
---|
280 | set { ((DoubleValue)Results["Best Hypervolume"].Value).Value = value; }
|
---|
281 | }
|
---|
282 | private double ResultsBestKnownHypervolume
|
---|
283 | {
|
---|
284 | get { return ((DoubleValue)Results["Best Known Hypervolume"].Value).Value; }
|
---|
285 | set { ((DoubleValue)Results["Best Known Hypervolume"].Value).Value = value; }
|
---|
286 | }
|
---|
287 |
|
---|
288 | //Solutions
|
---|
289 | private DoubleMatrix ResultsSolutions
|
---|
290 | {
|
---|
291 | get { return ((DoubleMatrix)Results["Solutions"].Value); }
|
---|
292 | set { Results["Solutions"].Value = value; }
|
---|
293 | }
|
---|
294 | private MOSolution ResultsSolutionsPlot
|
---|
295 | {
|
---|
296 | get { return ((MOSolution)Results["Solutions Scatterplot"].Value); }
|
---|
297 | set { Results["Solutions Scatterplot"].Value = value; }
|
---|
298 | }
|
---|
299 | #endregion
|
---|
300 |
|
---|
301 | #region constructors and hlBoilerPlate-code
|
---|
302 | [StorableConstructor]
|
---|
303 | protected MOCMASEvolutionStrategy(bool deserializing) : base(deserializing) { }
|
---|
304 |
|
---|
305 | protected MOCMASEvolutionStrategy(MOCMASEvolutionStrategy original, Cloner cloner)
|
---|
306 | : base(original, cloner)
|
---|
307 | {
|
---|
308 | }
|
---|
309 |
|
---|
310 | public override IDeepCloneable Clone(Cloner cloner)
|
---|
311 | {
|
---|
312 | return new MOCMASEvolutionStrategy(this, cloner);
|
---|
313 | }
|
---|
314 |
|
---|
315 | public MOCMASEvolutionStrategy()
|
---|
316 | {
|
---|
317 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600)));
|
---|
318 | Parameters.Add(new FixedValueParameter<IntValue>(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
|
---|
319 | Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
|
---|
320 | Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20)));
|
---|
321 | Parameters.Add(new FixedValueParameter<IntValue>(InitialIterationsName, "The number of iterations that should be performed with only axis parallel mutation.", new IntValue(0)));
|
---|
322 | Parameters.Add(new FixedValueParameter<DoubleValue>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleValue(0.5)));
|
---|
323 | Parameters.Add(new OptionalValueParameter<IntValue>(MuName, "Optional, the mu best offspring that should be considered for update of the new mean and strategy parameters. If not given it will be automatically calculated."));
|
---|
324 | Parameters.Add(new ValueParameter<MultiAnalyzer>(AnalyzerName, "The operator used to analyze each generation.", new MultiAnalyzer()));
|
---|
325 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000)));
|
---|
326 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue)));
|
---|
327 | Parameters.Add(new FixedValueParameter<DoubleValue>(TargetQualityName, "(stopFitness) Surpassing this quality value terminates the algorithm.", new DoubleValue(double.NaN)));
|
---|
328 | Parameters.Add(new FixedValueParameter<DoubleValue>(MinimumQualityChangeName, "(stopTolFun) If the range of fitness values is less than a certain value the algorithm terminates (set to 0 or positive value to enable).", new DoubleValue(double.NaN)));
|
---|
329 | Parameters.Add(new FixedValueParameter<DoubleValue>(MinimumQualityHistoryChangeName, "(stopTolFunHist) If the range of fitness values is less than a certain value for a certain time the algorithm terminates (set to 0 or positive to enable).", new DoubleValue(double.NaN)));
|
---|
330 | Parameters.Add(new FixedValueParameter<DoubleValue>(MinimumStandardDeviationName, "(stopTolXFactor) If the standard deviation falls below a certain value the algorithm terminates (set to 0 or positive to enable).", new DoubleValue(double.NaN)));
|
---|
331 | Parameters.Add(new FixedValueParameter<DoubleValue>(MaximumStandardDeviationChangeName, "(stopTolUpXFactor) If the standard deviation changes by a value larger than this parameter the algorithm stops (set to a value > 0 to enable).", new DoubleValue(double.NaN)));
|
---|
332 |
|
---|
333 | }
|
---|
334 | #endregion
|
---|
335 | #region updates
|
---|
336 | private void updatePopulation(CMAHansenIndividual[] parents)
|
---|
337 | {
|
---|
338 | int[] offspringSucess = new int[solutions.Length];
|
---|
339 | int offspringLength = parents.Length - solutions.Length;
|
---|
340 |
|
---|
341 | for (int i = 0; i < offspringLength; i++)
|
---|
342 | {
|
---|
343 | if (parents[i + solutions.Length].selected)
|
---|
344 | {
|
---|
345 | updateAsOffspring(parents[i + solutions.Length]);
|
---|
346 |
|
---|
347 | //TODO this may change if more offspring per parent is allowed
|
---|
348 | offspringSucess[i] += CMAHansenIndividual.success;
|
---|
349 | }
|
---|
350 | }
|
---|
351 | for (int i = 0; i < solutions.Length; i++)
|
---|
352 | {
|
---|
353 | if (parents[i].selected)
|
---|
354 | {
|
---|
355 | updateAsParent(parents[i], offspringSucess[i]);
|
---|
356 | }
|
---|
357 | }
|
---|
358 |
|
---|
359 | solutions = new CMAHansenIndividual[solutions.Length];
|
---|
360 | int j = 0;
|
---|
361 | foreach (CMAHansenIndividual ind in parents)
|
---|
362 | {
|
---|
363 | if (ind.selected)
|
---|
364 | {
|
---|
365 | solutions[j++] = ind;
|
---|
366 | }
|
---|
367 | }
|
---|
368 |
|
---|
369 | }
|
---|
370 |
|
---|
371 | private void updateAsParent(CMAHansenIndividual c, int v)
|
---|
372 | {
|
---|
373 | c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate * (v == CMAHansenIndividual.success ? 1 : 0);
|
---|
374 | c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability));
|
---|
375 | if (v != CMAHansenIndividual.failure) return;
|
---|
376 | if (c.successProbability < successThreshold)
|
---|
377 | {
|
---|
378 | double stepNormSqr = c.getSetpNormSqr();
|
---|
379 | double rate = covarianceMatrixUnlearningRate;
|
---|
380 | if (stepNormSqr > 1 && 1 < covarianceMatrixUnlearningRate * (2 * stepNormSqr - 1))
|
---|
381 | {
|
---|
382 | rate = 1 / (2 * stepNormSqr - 1);
|
---|
383 | }
|
---|
384 | rankOneUpdate(c, 1 + rate, -rate, c.lastStep);
|
---|
385 |
|
---|
386 | }
|
---|
387 | else
|
---|
388 | {
|
---|
389 | roundUpdate(c);
|
---|
390 | }
|
---|
391 |
|
---|
392 | }
|
---|
393 |
|
---|
394 | private void updateAsOffspring(CMAHansenIndividual c)
|
---|
395 | {
|
---|
396 | c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate;
|
---|
397 | c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability));
|
---|
398 | double evolutionpathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate);
|
---|
399 | if (c.successProbability < successThreshold)
|
---|
400 | {
|
---|
401 | c.updateEvolutionPath(1 - evolutionPathLearningRate, evolutionpathUpdateWeight);
|
---|
402 | rankOneUpdate(c, 1 - covarianceMatrixLearningRate, covarianceMatrixLearningRate, c.evolutionPath);
|
---|
403 | }
|
---|
404 | else
|
---|
405 | {
|
---|
406 | roundUpdate(c);
|
---|
407 | }
|
---|
408 | }
|
---|
409 |
|
---|
410 | private void rankOneUpdate(CMAHansenIndividual c, double v1, double v2, RealVector lastStep)
|
---|
411 | {
|
---|
412 | c.choleskyUpdate(lastStep, v1, v2);
|
---|
413 | }
|
---|
414 |
|
---|
415 | private void roundUpdate(CMAHansenIndividual c)
|
---|
416 | {
|
---|
417 | double evolutionPathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate);
|
---|
418 | c.updateEvolutionPath(1 - evolutionPathLearningRate, 0);
|
---|
419 | rankOneUpdate(c, 1 - covarianceMatrixLearningRate + evolutionPathUpdateWeight,
|
---|
420 | covarianceMatrixLearningRate, c.evolutionPath);
|
---|
421 | }
|
---|
422 | #endregion
|
---|
423 | #region selection
|
---|
424 |
|
---|
425 | private T[] merge<T>(T[] parents, T[] offspring)
|
---|
426 | {
|
---|
427 | T[] merged = new T[parents.Length + offspring.Length];
|
---|
428 | for (int i = 0; i < parents.Length; i++)
|
---|
429 | {
|
---|
430 | merged[i] = parents[i];
|
---|
431 | }
|
---|
432 | for (int i = 0; i < offspring.Length; i++)
|
---|
433 | {
|
---|
434 | merged[i + parents.Length] = offspring[i];
|
---|
435 | }
|
---|
436 | return merged;
|
---|
437 | }
|
---|
438 |
|
---|
439 | private void selection(CMAHansenIndividual[] parents, int length)
|
---|
440 | {
|
---|
441 | //perform a nondominated sort to assign the rank to every element
|
---|
442 | var fronts = NonDominatedSort(parents);
|
---|
443 |
|
---|
444 | //deselect the highest rank fronts until we would end up with less or equal mu elements
|
---|
445 | int rank = fronts.Count - 1;
|
---|
446 | int popSize = parents.Length;
|
---|
447 | while (popSize - fronts[rank].Count >= length)
|
---|
448 | {
|
---|
449 | var front = fronts[rank];
|
---|
450 | foreach (var i in front) i.selected = false;
|
---|
451 | popSize -= front.Count;
|
---|
452 | rank--;
|
---|
453 | }
|
---|
454 |
|
---|
455 | //now use the indicator to deselect the worst approximating elements of the last selected front
|
---|
456 | var front_ = fronts[rank];
|
---|
457 | for (; popSize > length; popSize--)
|
---|
458 | {
|
---|
459 | int lc = indicator.leastContributer<CMAHansenIndividual>(front_.ToArray(), x => x.penalizedFitness, Problem); //TODO: This is a battel in its own right to be fought another day
|
---|
460 | front_[lc].selected = false;
|
---|
461 | front_.Swap(lc, front_.Count - 1);
|
---|
462 | front_.RemoveAt(front_.Count - 1);
|
---|
463 | }
|
---|
464 | }
|
---|
465 | #endregion
|
---|
466 | #region penalize Box-Constraints
|
---|
467 | private void penalizeEvaluate(IEnumerable<CMAHansenIndividual> offspring)
|
---|
468 | {
|
---|
469 | foreach (CMAHansenIndividual child in offspring)
|
---|
470 | {
|
---|
471 | penalizeEvaluate(child);
|
---|
472 | }
|
---|
473 | }
|
---|
474 |
|
---|
475 | private void penalizeEvaluate(CMAHansenIndividual offspring)
|
---|
476 | {
|
---|
477 | if (isFeasable(offspring.x))
|
---|
478 | {
|
---|
479 | offspring.fitness = Problem.Evaluate(offspring.x, random);
|
---|
480 | ResultsEvaluations++;
|
---|
481 | offspring.penalizedFitness = offspring.fitness;
|
---|
482 | }
|
---|
483 | else
|
---|
484 | {
|
---|
485 | RealVector t = closestFeasible(offspring.x);
|
---|
486 | offspring.fitness = Problem.Evaluate(t, random);
|
---|
487 | ResultsEvaluations++;
|
---|
488 | offspring.penalizedFitness = penalize(offspring.x, t, offspring.fitness);
|
---|
489 | }
|
---|
490 | }
|
---|
491 |
|
---|
492 | private double[] penalize(RealVector x, RealVector t, double[] fitness)
|
---|
493 | {
|
---|
494 | double[] d = new double[fitness.Length];
|
---|
495 | double penality = penalize(x, t);
|
---|
496 | for (int i = 0; i < fitness.Length; i++)
|
---|
497 | {
|
---|
498 | d[i] = fitness[i] + penality;
|
---|
499 | }
|
---|
500 | return d;
|
---|
501 | }
|
---|
502 |
|
---|
503 | private double penalize(RealVector x, RealVector t)
|
---|
504 | {
|
---|
505 | double sum = 0;
|
---|
506 | for (int i = 0; i < x.Length; i++)
|
---|
507 | {
|
---|
508 | double d = x[i] - t[i];
|
---|
509 | sum += d * d;
|
---|
510 | }
|
---|
511 | return sum;
|
---|
512 | }
|
---|
513 |
|
---|
514 | private RealVector closestFeasible(RealVector x)
|
---|
515 | {
|
---|
516 | DoubleMatrix bounds = Problem.Bounds;
|
---|
517 | RealVector r = new RealVector(x.Length);
|
---|
518 | for (int i = 0; i < x.Length; i++)
|
---|
519 | {
|
---|
520 | int dim = i % bounds.Rows;
|
---|
521 | r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]);
|
---|
522 | }
|
---|
523 | return r;
|
---|
524 | }
|
---|
525 |
|
---|
526 | private bool isFeasable(RealVector offspring)
|
---|
527 | {
|
---|
528 | DoubleMatrix bounds = Problem.Bounds;
|
---|
529 | for (int i = 0; i < offspring.Length; i++)
|
---|
530 | {
|
---|
531 | int dim = i % bounds.Rows;
|
---|
532 | if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false;
|
---|
533 | }
|
---|
534 | return true;
|
---|
535 | }
|
---|
536 |
|
---|
537 | #endregion
|
---|
538 | #region mutation
|
---|
539 | private CMAHansenIndividual[] generateOffspring()
|
---|
540 | {
|
---|
541 | CMAHansenIndividual[] offspring = new CMAHansenIndividual[PopulationSize]; //TODO this changes if 1,1-ES is replaced with 1,n-ES
|
---|
542 | for (int i = 0; i < PopulationSize; i++)
|
---|
543 | {
|
---|
544 | offspring[i] = new CMAHansenIndividual(solutions[i]);
|
---|
545 | offspring[i].mutate(gauss);
|
---|
546 | }
|
---|
547 | return offspring;
|
---|
548 | }
|
---|
549 | #endregion
|
---|
550 |
|
---|
551 | #region initialization
|
---|
552 | private CMAHansenIndividual initializeIndividual(RealVector x)
|
---|
553 | {
|
---|
554 | var zeros = new RealVector(x.Length);
|
---|
555 | var identity = new double[x.Length, x.Length];
|
---|
556 | for (int i = 0; i < x.Length; i++)
|
---|
557 | {
|
---|
558 | identity[i, i] = 1;
|
---|
559 | }
|
---|
560 | return new CMAHansenIndividual(x, targetSuccessProbability, InitialSigma.Value, zeros, identity);
|
---|
561 | }
|
---|
562 |
|
---|
563 | private void initSolutions()
|
---|
564 | {
|
---|
565 | solutions = new CMAHansenIndividual[PopulationSize];
|
---|
566 | for (int i = 0; i < PopulationSize; i++)
|
---|
567 | {
|
---|
568 | RealVector x = new RealVector(Problem.ProblemSize); // uniform distibution in all dimesions assumed TODO is there a better way to initialize RandomVectors
|
---|
569 | var bounds = Problem.Bounds;
|
---|
570 | for (int j = 0; j < Problem.Objectives; j++)
|
---|
571 | {
|
---|
572 | int dim = j % bounds.Rows;
|
---|
573 | x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0];
|
---|
574 | }
|
---|
575 | solutions[i] = initializeIndividual(x);
|
---|
576 | }
|
---|
577 | penalizeEvaluate(solutions);
|
---|
578 | }
|
---|
579 |
|
---|
580 | private void initStrategy()
|
---|
581 | {
|
---|
582 | int lambda = 1;
|
---|
583 | double n = Problem.ProblemSize;
|
---|
584 | indicator = new CrowdingIndicator();
|
---|
585 | gauss = new NormalDistributedRandom(random, 0, 1);
|
---|
586 | targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);
|
---|
587 | stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);
|
---|
588 | stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);
|
---|
589 | evolutionPathLearningRate = 2.0 / (n + 2.0);
|
---|
590 | covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);
|
---|
591 | covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);
|
---|
592 | successThreshold = 0.44;
|
---|
593 |
|
---|
594 | }
|
---|
595 |
|
---|
596 |
|
---|
597 | private void initResults()
|
---|
598 | {
|
---|
599 | Results.Add(new Result("Iterations", new IntValue(0)));
|
---|
600 | Results.Add(new Result("Evaluations", new IntValue(0)));
|
---|
601 |
|
---|
602 | Results.Add(new Result("Hypervolume", new DoubleValue(0)));
|
---|
603 | Results.Add(new Result("Best Hypervolume", new DoubleValue(0)));
|
---|
604 | Results.Add(new Result("Generational Distance", new DoubleValue(0)));
|
---|
605 | Results.Add(new Result("Inverted Generational Distance", new DoubleValue(0)));
|
---|
606 | Results.Add(new Result("Crowding", new DoubleValue(0)));
|
---|
607 |
|
---|
608 | var table = new DataTable("QualityIndicators");
|
---|
609 | Results.Add(new Result("Timetable", table));
|
---|
610 | table.Rows.Add(new DataRow("Best Hypervolume"));
|
---|
611 | table.Rows.Add(new DataRow("Iteration Hypervolume"));
|
---|
612 | table.Rows.Add(new DataRow("Iteration Crowding"));
|
---|
613 | table.Rows.Add(new DataRow("Iteration Generational Distance"));
|
---|
614 | table.Rows.Add(new DataRow("Iteration Inverted Generational Distance"));
|
---|
615 |
|
---|
616 |
|
---|
617 |
|
---|
618 | Results.Add(new Result("Solutions", new DoubleMatrix()));
|
---|
619 | Results.Add(new Result("Solutions Scatterplot", new MOSolution(null, null, Problem.BestKnownFront.ToArray<double[]>(), Problem.Objectives)));
|
---|
620 |
|
---|
621 |
|
---|
622 | if (Problem.BestKnownFront != null)
|
---|
623 | {
|
---|
624 | Results.Add(new Result("Best Known Hypervolume", new DoubleValue(Hypervolume.Calculate(Problem.BestKnownFront, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization))));
|
---|
625 | }
|
---|
626 | }
|
---|
627 | #endregion
|
---|
628 |
|
---|
629 | #region analyze
|
---|
630 | private void analyzeSolutions()
|
---|
631 | {
|
---|
632 | //solutions
|
---|
633 | ResultsSolutionsPlot = new MOSolution(solutions.Select(x => x.fitness).ToArray<double[]>(),
|
---|
634 | solutions.Select(x => ToArray(x.x)).ToArray<double[]>(),
|
---|
635 | ResultsSolutionsPlot.ParetoFront,
|
---|
636 | ResultsSolutionsPlot.Objectives);
|
---|
637 | ResultsSolutions = ToMatrix(solutions.Select(x => ToArray(x.x)));
|
---|
638 | analyzeQualityIndicators();
|
---|
639 |
|
---|
640 | }
|
---|
641 |
|
---|
642 | private void analyzeQualityIndicators()
|
---|
643 | {
|
---|
644 |
|
---|
645 | var front = NonDominatedSelect.selectNonDominatedVectors(solutions.Select(x => x.fitness), Problem.Maximization, true);
|
---|
646 | front = NonDominatedSelect.removeNonReferenceDominatingVectors(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, false);
|
---|
647 | var bounds = ToArray(Problem.Bounds);
|
---|
648 | ResultsCrowding = Crowding.Calculate(front, bounds);
|
---|
649 | ResultsGenerationalDistance = Problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, Problem.BestKnownFront, 1) : Double.NaN;
|
---|
650 |
|
---|
651 | ResultsInvertedGenerationalDistance = Problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, Problem.BestKnownFront, 1) : Double.NaN;
|
---|
652 |
|
---|
653 | ResultsHypervolume = Hypervolume.Calculate(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization);
|
---|
654 | ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume);
|
---|
655 |
|
---|
656 | //Datalines
|
---|
657 | ResultsHypervolumeBest.Values.Add(ResultsBestHypervolume);
|
---|
658 | ResultsHypervolumeIteration.Values.Add(ResultsHypervolume);
|
---|
659 | ResultsCrowdingIteration.Values.Add(ResultsCrowding);
|
---|
660 | ResultsGenerationalDistanceIteration.Values.Add(ResultsGenerationalDistance);
|
---|
661 | ResultsInvertedGenerationalDistanceIteration.Values.Add(ResultsInvertedGenerationalDistance);
|
---|
662 |
|
---|
663 |
|
---|
664 | }
|
---|
665 | #endregion
|
---|
666 |
|
---|
667 |
|
---|
668 | //MU = populationSize
|
---|
669 | #region mainloop
|
---|
670 | protected override void Run(CancellationToken cancellationToken)
|
---|
671 | {
|
---|
672 | // Set up the algorithm
|
---|
673 | if (SetSeedRandomly) Seed = new System.Random().Next();
|
---|
674 | random.Reset(Seed);
|
---|
675 |
|
---|
676 | initResults();
|
---|
677 | initStrategy();
|
---|
678 | initSolutions();
|
---|
679 |
|
---|
680 | // Loop until iteration limit reached or canceled.
|
---|
681 | for (ResultsIterations = 1; ResultsIterations < MaximumGenerations; ResultsIterations++)
|
---|
682 | {
|
---|
683 | try
|
---|
684 | {
|
---|
685 | iterate();
|
---|
686 | cancellationToken.ThrowIfCancellationRequested();
|
---|
687 | }
|
---|
688 | finally
|
---|
689 | {
|
---|
690 | analyzeSolutions();
|
---|
691 | }
|
---|
692 | }
|
---|
693 | }
|
---|
694 |
|
---|
695 | protected override void OnExecutionTimeChanged()
|
---|
696 | {
|
---|
697 | base.OnExecutionTimeChanged();
|
---|
698 | if (CancellationTokenSource == null) return;
|
---|
699 | if (MaximumRuntime == -1) return;
|
---|
700 | if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
|
---|
701 | }
|
---|
702 |
|
---|
703 | private void iterate()
|
---|
704 | {
|
---|
705 | CMAHansenIndividual[] offspring = generateOffspring();
|
---|
706 | penalizeEvaluate(offspring);
|
---|
707 | var parents = merge(solutions, offspring);
|
---|
708 | selection(parents, solutions.Length);
|
---|
709 | updatePopulation(parents);
|
---|
710 |
|
---|
711 | }
|
---|
712 | #endregion
|
---|
713 |
|
---|
714 | #region bernhard properties
|
---|
715 | private NormalDistributedRandom gauss;
|
---|
716 | private CMAHansenIndividual[] solutions;
|
---|
717 | private const double penalizeFactor = 1e-6;
|
---|
718 | private IIndicator indicator;
|
---|
719 |
|
---|
720 | private double stepSizeLearningRate; //=cp learning rate in [0,1]
|
---|
721 | private double stepSizeDampeningFactor; //d
|
---|
722 | private double targetSuccessProbability;// p^target_succ
|
---|
723 | private double evolutionPathLearningRate;//cc
|
---|
724 | private double covarianceMatrixLearningRate;//ccov
|
---|
725 | private double covarianceMatrixUnlearningRate; //from shark
|
---|
726 | private double successThreshold; //ptresh
|
---|
727 | #endregion
|
---|
728 |
|
---|
729 | private class CMAHansenIndividual
|
---|
730 | {
|
---|
731 | public static readonly int success = 1;
|
---|
732 | public static readonly int noSuccess = 2;
|
---|
733 | public static readonly int failure = 3;
|
---|
734 |
|
---|
735 |
|
---|
736 | //Chromosome
|
---|
737 | public RealVector x;
|
---|
738 | public double successProbability;
|
---|
739 | public double sigma;//stepsize
|
---|
740 | public RealVector evolutionPath; // pc
|
---|
741 | public RealVector lastStep;
|
---|
742 | public RealVector lastZ;
|
---|
743 | private double[,] lowerCholesky;
|
---|
744 |
|
---|
745 |
|
---|
746 | //Phenotype
|
---|
747 | public double[] fitness;
|
---|
748 | public double[] penalizedFitness;
|
---|
749 | public bool selected = true;
|
---|
750 |
|
---|
751 | internal double rank;
|
---|
752 |
|
---|
753 |
|
---|
754 |
|
---|
755 | /// <summary>
|
---|
756 | ///
|
---|
757 | /// </summary>
|
---|
758 | /// <param name="x">has to be 0-vector with correct lenght</param>
|
---|
759 | /// <param name="p_succ">has to be ptargetsucc</param>
|
---|
760 | /// <param name="sigma">initialSigma</param>
|
---|
761 | /// <param name="pc">has to be 0-vector with correct lenght</param>
|
---|
762 | /// <param name="C">has to be a symmetric positive definit Covariance matrix</param>
|
---|
763 | public CMAHansenIndividual(RealVector x, double p_succ, double sigma, RealVector pc, double[,] C)
|
---|
764 | {
|
---|
765 | this.x = x;
|
---|
766 | this.successProbability = p_succ;
|
---|
767 | this.sigma = sigma;
|
---|
768 | this.evolutionPath = pc;
|
---|
769 | choleskyDecomposition(C);
|
---|
770 | }
|
---|
771 |
|
---|
772 | private void choleskyDecomposition(double[,] C)
|
---|
773 | {
|
---|
774 | if (C.GetLength(0) != C.GetLength(1)) throw new ArgumentException("Covariancematrix is not quadratic");
|
---|
775 | int n = C.GetLength(0);
|
---|
776 | lowerCholesky = new double[n, n];
|
---|
777 | double[,] A = lowerCholesky;
|
---|
778 | for (int i = 0; i < n; i++)
|
---|
779 | {
|
---|
780 | for (int j = 0; j <= i; j++)
|
---|
781 | {
|
---|
782 | A[i, j] = C[i, j]; //simulate inplace transform
|
---|
783 | double sum = A[i, j];
|
---|
784 | for (int k = 0; k < j; k++)
|
---|
785 | {
|
---|
786 | sum = sum - A[i, k] * A[j, k];
|
---|
787 | if (i > j) { A[i, j] = sum / A[j, j]; }
|
---|
788 | else if (sum > 0)
|
---|
789 | {
|
---|
790 | A[i, i] = Math.Sqrt(sum);
|
---|
791 | }
|
---|
792 | else
|
---|
793 | {
|
---|
794 | throw new ArgumentException("Covariancematrix is not symetric positiv definit");
|
---|
795 | }
|
---|
796 | }
|
---|
797 | }
|
---|
798 |
|
---|
799 | }
|
---|
800 | }
|
---|
801 |
|
---|
802 | public CMAHansenIndividual(CMAHansenIndividual other)
|
---|
803 | {
|
---|
804 |
|
---|
805 |
|
---|
806 | this.successProbability = other.successProbability;
|
---|
807 | this.sigma = other.sigma;
|
---|
808 |
|
---|
809 | // no no no make DEEP copies
|
---|
810 | this.evolutionPath = (RealVector)other.evolutionPath.Clone();
|
---|
811 | this.x = (RealVector)other.x.Clone();
|
---|
812 |
|
---|
813 |
|
---|
814 | this.lowerCholesky = (double[,])other.lowerCholesky.Clone();
|
---|
815 | }
|
---|
816 |
|
---|
817 | public void updateEvolutionPath(double learningRate, double updateWeight)
|
---|
818 | {
|
---|
819 | updateWeight = Math.Sqrt(updateWeight);
|
---|
820 | for (int i = 0; i < evolutionPath.Length; i++)
|
---|
821 | {
|
---|
822 | evolutionPath[i] *= learningRate;
|
---|
823 | evolutionPath[i] += updateWeight * lastStep[i];
|
---|
824 | }
|
---|
825 | }
|
---|
826 |
|
---|
827 | public double getSetpNormSqr()
|
---|
828 | {
|
---|
829 | double sum = 0;
|
---|
830 | foreach (double d in lastZ)
|
---|
831 | {
|
---|
832 | sum += d * d;
|
---|
833 | }
|
---|
834 | return sum;
|
---|
835 | }
|
---|
836 |
|
---|
837 | public void choleskyUpdate(RealVector v, double alpha, double beta)
|
---|
838 | {
|
---|
839 | int n = v.Length;
|
---|
840 | double[] temp = new double[n];
|
---|
841 | for (int i = 0; i < n; i++) temp[i] = v[i];
|
---|
842 | double betaPrime = 1;
|
---|
843 | double a = Math.Sqrt(alpha);
|
---|
844 | for (int j = 0; j < n; j++)
|
---|
845 | {
|
---|
846 | double Ljj = a * lowerCholesky[j, j];
|
---|
847 | double dj = Ljj * Ljj;
|
---|
848 | double wj = temp[j];
|
---|
849 | double swj2 = beta * wj * wj;
|
---|
850 | double gamma = dj * betaPrime + swj2;
|
---|
851 | double x = dj + swj2 / betaPrime;
|
---|
852 | if (x < 0.0) throw new ArgumentException("Update makes Covariancematrix indefinite");
|
---|
853 | double nLjj = Math.Sqrt(x);
|
---|
854 | lowerCholesky[j, j] = nLjj;
|
---|
855 | betaPrime += swj2 / dj;
|
---|
856 | if (j + 1 < n)
|
---|
857 | {
|
---|
858 | for (int i = j + 1; i < n; i++)
|
---|
859 | {
|
---|
860 | lowerCholesky[i, j] *= a;
|
---|
861 | }
|
---|
862 | for (int i = j + 1; i < n; i++)
|
---|
863 | {
|
---|
864 | temp[i] = wj / Ljj * lowerCholesky[i, j];
|
---|
865 | }
|
---|
866 | if (gamma == 0) continue;
|
---|
867 | for (int i = j + 1; i < n; i++)
|
---|
868 | {
|
---|
869 | lowerCholesky[i, j] *= nLjj / Ljj;
|
---|
870 | }
|
---|
871 | for (int i = j + 1; i < n; i++)
|
---|
872 | {
|
---|
873 | lowerCholesky[i, j] += (nLjj * beta * wj / gamma) * temp[i];
|
---|
874 | }
|
---|
875 | }
|
---|
876 |
|
---|
877 | }
|
---|
878 |
|
---|
879 | }
|
---|
880 |
|
---|
881 | public void mutate(NormalDistributedRandom gauss)
|
---|
882 | {
|
---|
883 |
|
---|
884 | //sampling a random z from N(0,I) where I is the Identity matrix;
|
---|
885 | lastZ = new RealVector(x.Length);
|
---|
886 | int n = lastZ.Length;
|
---|
887 | for (int i = 0; i < n; i++)
|
---|
888 | {
|
---|
889 | lastZ[i] = gauss.NextDouble();
|
---|
890 | }
|
---|
891 | //Matrixmultiplication: lastStep = lowerCholesky * lastZ;
|
---|
892 | lastStep = new RealVector(x.Length);
|
---|
893 | for (int i = 0; i < n; i++)
|
---|
894 | {
|
---|
895 | double sum = 0;
|
---|
896 | for (int j = 0; j <= i; j++)
|
---|
897 | {
|
---|
898 | sum += lowerCholesky[i, j] * lastZ[j];
|
---|
899 | }
|
---|
900 | lastStep[i] = sum;
|
---|
901 | }
|
---|
902 |
|
---|
903 | //add the step to x weighted by stepsize;
|
---|
904 | for (int i = 0; i < x.Length; i++)
|
---|
905 | {
|
---|
906 | x[i] += sigma * lastStep[i];
|
---|
907 | }
|
---|
908 |
|
---|
909 | }
|
---|
910 |
|
---|
911 | }
|
---|
912 |
|
---|
913 | //blatantly stolen form HeuristicLab.Optimization.Operators.FastNonDominatedSort
|
---|
914 | #region FastNonDominatedSort
|
---|
915 | private enum DominationResult { Dominates, IsDominated, IsNonDominated };
|
---|
916 |
|
---|
917 | private List<List<CMAHansenIndividual>> NonDominatedSort(CMAHansenIndividual[] individuals)
|
---|
918 | {
|
---|
919 | bool dominateOnEqualQualities = false;
|
---|
920 | bool[] maximization = Problem.Maximization;
|
---|
921 | if (individuals == null) throw new InvalidOperationException(Name + ": No qualities found.");
|
---|
922 | int populationSize = individuals.Length;
|
---|
923 |
|
---|
924 | List<List<CMAHansenIndividual>> fronts = new List<List<CMAHansenIndividual>>();
|
---|
925 | Dictionary<CMAHansenIndividual, List<int>> dominatedScopes = new Dictionary<CMAHansenIndividual, List<int>>();
|
---|
926 | int[] dominationCounter = new int[populationSize];
|
---|
927 | //ItemArray<IntValue> rank = new ItemArray<IntValue>(populationSize);
|
---|
928 |
|
---|
929 | for (int pI = 0; pI < populationSize - 1; pI++)
|
---|
930 | {
|
---|
931 | CMAHansenIndividual p = individuals[pI];
|
---|
932 | List<int> dominatedScopesByp;
|
---|
933 | if (!dominatedScopes.TryGetValue(p, out dominatedScopesByp))
|
---|
934 | dominatedScopes[p] = dominatedScopesByp = new List<int>();
|
---|
935 | for (int qI = pI + 1; qI < populationSize; qI++)
|
---|
936 | {
|
---|
937 | DominationResult test = Dominates(individuals[pI], individuals[qI], maximization, dominateOnEqualQualities);
|
---|
938 | if (test == DominationResult.Dominates)
|
---|
939 | {
|
---|
940 | dominatedScopesByp.Add(qI);
|
---|
941 | dominationCounter[qI] += 1;
|
---|
942 | }
|
---|
943 | else if (test == DominationResult.IsDominated)
|
---|
944 | {
|
---|
945 | dominationCounter[pI] += 1;
|
---|
946 | if (!dominatedScopes.ContainsKey(individuals[qI]))
|
---|
947 | dominatedScopes.Add(individuals[qI], new List<int>());
|
---|
948 | dominatedScopes[individuals[qI]].Add(pI);
|
---|
949 | }
|
---|
950 | if (pI == populationSize - 2
|
---|
951 | && qI == populationSize - 1
|
---|
952 | && dominationCounter[qI] == 0)
|
---|
953 | {
|
---|
954 | //rank[qI] = new IntValue(0);
|
---|
955 | AddToFront(individuals[qI], fronts, 0);
|
---|
956 | }
|
---|
957 | }
|
---|
958 | if (dominationCounter[pI] == 0)
|
---|
959 | {
|
---|
960 | //rank[pI] = new IntValue(0);
|
---|
961 | AddToFront(p, fronts, 0);
|
---|
962 | }
|
---|
963 | }
|
---|
964 | int i = 0;
|
---|
965 | while (i < fronts.Count && fronts[i].Count > 0)
|
---|
966 | {
|
---|
967 | List<CMAHansenIndividual> nextFront = new List<CMAHansenIndividual>();
|
---|
968 | foreach (CMAHansenIndividual p in fronts[i])
|
---|
969 | {
|
---|
970 | List<int> dominatedScopesByp;
|
---|
971 | if (dominatedScopes.TryGetValue(p, out dominatedScopesByp))
|
---|
972 | {
|
---|
973 | for (int k = 0; k < dominatedScopesByp.Count; k++)
|
---|
974 | {
|
---|
975 | int dominatedScope = dominatedScopesByp[k];
|
---|
976 | dominationCounter[dominatedScope] -= 1;
|
---|
977 | if (dominationCounter[dominatedScope] == 0)
|
---|
978 | {
|
---|
979 | //rank[dominatedScope] = new IntValue(i + 1);
|
---|
980 | nextFront.Add(individuals[dominatedScope]);
|
---|
981 | }
|
---|
982 | }
|
---|
983 | }
|
---|
984 | }
|
---|
985 | i += 1;
|
---|
986 | fronts.Add(nextFront);
|
---|
987 | }
|
---|
988 |
|
---|
989 | //RankParameter.ActualValue = rank;
|
---|
990 |
|
---|
991 | //scope.SubScopes.Clear();
|
---|
992 |
|
---|
993 | CMAHansenIndividual[] result = new CMAHansenIndividual[individuals.Length];
|
---|
994 |
|
---|
995 | for (i = 0; i < fronts.Count; i++)
|
---|
996 | {
|
---|
997 | foreach (var p in fronts[i])
|
---|
998 | {
|
---|
999 | p.rank = i;
|
---|
1000 | }
|
---|
1001 | }
|
---|
1002 | return fronts;
|
---|
1003 | }
|
---|
1004 |
|
---|
1005 | private static void AddToFront(CMAHansenIndividual p, List<List<CMAHansenIndividual>> fronts, int i)
|
---|
1006 | {
|
---|
1007 | if (i == fronts.Count) fronts.Add(new List<CMAHansenIndividual>());
|
---|
1008 | fronts[i].Add(p);
|
---|
1009 | }
|
---|
1010 |
|
---|
1011 | private static DominationResult Dominates(CMAHansenIndividual left, CMAHansenIndividual right, bool[] maximizations, bool dominateOnEqualQualities)
|
---|
1012 | {
|
---|
1013 | return Dominates(left.penalizedFitness, right.penalizedFitness, maximizations, dominateOnEqualQualities);
|
---|
1014 | }
|
---|
1015 |
|
---|
1016 | private static DominationResult Dominates(double[] left, double[] right, bool[] maximizations, bool dominateOnEqualQualities)
|
---|
1017 | {
|
---|
1018 | //mkommend Caution: do not use LINQ.SequenceEqual for comparing the two quality arrays (left and right) due to performance reasons
|
---|
1019 | if (dominateOnEqualQualities)
|
---|
1020 | {
|
---|
1021 | var equal = true;
|
---|
1022 | for (int i = 0; i < left.Length; i++)
|
---|
1023 | {
|
---|
1024 | if (left[i] != right[i])
|
---|
1025 | {
|
---|
1026 | equal = false;
|
---|
1027 | break;
|
---|
1028 | }
|
---|
1029 | }
|
---|
1030 | if (equal) return DominationResult.Dominates;
|
---|
1031 | }
|
---|
1032 |
|
---|
1033 | bool leftIsBetter = false, rightIsBetter = false;
|
---|
1034 | for (int i = 0; i < left.Length; i++)
|
---|
1035 | {
|
---|
1036 | if (IsDominated(left[i], right[i], maximizations[i])) rightIsBetter = true;
|
---|
1037 | else if (IsDominated(right[i], left[i], maximizations[i])) leftIsBetter = true;
|
---|
1038 | if (leftIsBetter && rightIsBetter) break;
|
---|
1039 | }
|
---|
1040 |
|
---|
1041 | if (leftIsBetter && !rightIsBetter) return DominationResult.Dominates;
|
---|
1042 | if (!leftIsBetter && rightIsBetter) return DominationResult.IsDominated;
|
---|
1043 | return DominationResult.IsNonDominated;
|
---|
1044 | }
|
---|
1045 |
|
---|
1046 | private static bool IsDominated(double left, double right, bool maximization)
|
---|
1047 | {
|
---|
1048 | return maximization && left < right
|
---|
1049 | || !maximization && left > right;
|
---|
1050 | }
|
---|
1051 |
|
---|
1052 | #endregion
|
---|
1053 |
|
---|
1054 |
|
---|
1055 |
|
---|
1056 | #region conversions
|
---|
1057 |
|
---|
1058 | public double[] ToArray(RealVector r)
|
---|
1059 | {
|
---|
1060 | double[] d = new double[r.Length];
|
---|
1061 | for (int i = 0; i < r.Length; i++)
|
---|
1062 | {
|
---|
1063 | d[i] = r[i];
|
---|
1064 | }
|
---|
1065 | return d;
|
---|
1066 | }
|
---|
1067 |
|
---|
1068 | public DoubleMatrix ToMatrix(IEnumerable<double[]> data)
|
---|
1069 | {
|
---|
1070 | var d2 = data.ToArray<double[]>();
|
---|
1071 | DoubleMatrix mat = new DoubleMatrix(d2.Length, d2[0].Length);
|
---|
1072 | for (int i = 0; i < mat.Rows; i++)
|
---|
1073 | {
|
---|
1074 | for (int j = 0; j < mat.Columns; j++)
|
---|
1075 | {
|
---|
1076 | mat[i, j] = d2[i][j];
|
---|
1077 | }
|
---|
1078 | }
|
---|
1079 | return mat;
|
---|
1080 | }
|
---|
1081 | public double[,] ToArray(DoubleMatrix data)
|
---|
1082 | {
|
---|
1083 |
|
---|
1084 | double[,] mat = new double[data.Rows, data.Columns];
|
---|
1085 | for (int i = 0; i < data.Rows; i++)
|
---|
1086 | {
|
---|
1087 | for (int j = 0; j < data.Columns; j++)
|
---|
1088 | {
|
---|
1089 | mat[i, j] = data[i, j];
|
---|
1090 | }
|
---|
1091 | }
|
---|
1092 | return mat;
|
---|
1093 | }
|
---|
1094 |
|
---|
1095 | #endregion
|
---|
1096 | }
|
---|
1097 | }
|
---|