Free cookie consent management tool by TermsFeed Policy Generator

source: branches/HeuristicLab.Algorithms.CMAEvolutionStrategy/sources/MOCMAES/MOCMASEvolutionStrategy.cs @ 14269

Last change on this file since 14269 was 14269, checked in by bwerth, 7 years ago

#2592 code cleanup + project reactored

File size: 37.4 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Threading;
26using HeuristicLab.Analysis;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Data;
30using HeuristicLab.Encodings.RealVectorEncoding;
31using HeuristicLab.Optimization;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.Random;
35using System.Linq;
36using HeuristicLab.Problems.TestFunctions.MultiObjective;
37using HeuristicLab.Algorithms.MOCMAEvolutionStrategy;
38
39namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy {
40  [Item("MOCMAS Evolution Strategy (MOCMASES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")]
41  [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)]
42  [StorableClass]
43  public class MOCMASEvolutionStrategy : BasicAlgorithm {
44    public override Type ProblemType {
45      get { return typeof(MultiObjectiveTestFunctionProblem); }
46    }
47    public new MultiObjectiveTestFunctionProblem Problem {
48      get { return (MultiObjectiveTestFunctionProblem)base.Problem; }
49      set { base.Problem = value; }
50    }
51
52    #region ParameterNames
53    private const string MaximumRuntimeName = "Maximum Runtime";
54    private const string SeedName = "Seed";
55    private const string SetSeedRandomlyName = "SetSeedRandomly";
56    private const string PopulationSizeName = "PopulationSize";
57    private const string MaximumGenerationsName = "MaximumGenerations";
58    private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions";
59    private const string InitialSigmaName = "InitialSigma";
60    private const string IndicatorName = "Indicator";
61
62    private const string EvaluationsResultName = "Evaluations";
63    private const string IterationsResultName = "Generations";
64    private const string TimetableResultName = "Timetable";
65    private const string HypervolumeResultName = "Hypervolume";
66    private const string GenerationalDistanceResultName = "Generational Distance";
67    private const string InvertedGenerationalDistanceResultName = "Inverted Generational Distance";
68    private const string CrowdingResultName = "Crowding";
69    private const string SpacingResultName = "Spacing";
70    private const string SolutionsResultName = "Pareto Front";
71    private const string BestHypervolumeResultName = "Best Hypervolume";
72    private const string BestKnownHypervolumeResultName = "Best known hypervolume";
73    private const string DifferenceToBestKnownHypervolumeResultName = "Absolute Distance to BestKnownHypervolume";
74    private const string ScatterPlotResultName = "ScatterPlot";
75
76    #endregion
77
78    #region internal variables
79
80    private readonly IRandom random = new MersenneTwister();
81    private NormalDistributedRandom gauss;
82    private MOCMAESIndividual[] solutions;
83    private const double penalizeFactor = 1e-6;
84
85    //TODO OptionalValueParameter
86    private double stepSizeLearningRate; //=cp learning rate in [0,1]
87    private double stepSizeDampeningFactor; //d
88    private double targetSuccessProbability;// p^target_succ
89    private double evolutionPathLearningRate;//cc
90    private double covarianceMatrixLearningRate;//ccov
91    private double covarianceMatrixUnlearningRate;   //from shark
92    private double successThreshold; //ptresh
93    #endregion
94
95    #region ParameterProperties
96    public IFixedValueParameter<IntValue> MaximumRuntimeParameter {
97      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; }
98    }
99    public IFixedValueParameter<IntValue> SeedParameter {
100      get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; }
101    }
102    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
103      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; }
104    }
105    private IFixedValueParameter<IntValue> PopulationSizeParameter {
106      get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; }
107    }
108    private IFixedValueParameter<IntValue> MaximumGenerationsParameter {
109      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; }
110    }
111    private IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter {
112      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; }
113    }
114    private IFixedValueParameter<DoubleValue> InitialSigmaParameter {
115      get { return (IFixedValueParameter<DoubleValue>)Parameters[InitialSigmaName]; }
116    }
117    private IConstrainedValueParameter<IIndicator> IndicatorParameter {
118      get { return (IConstrainedValueParameter<IIndicator>)Parameters[IndicatorName]; }
119    }
120    #endregion
121
122    #region Properties
123    public int MaximumRuntime {
124      get { return MaximumRuntimeParameter.Value.Value; }
125      set { MaximumRuntimeParameter.Value.Value = value; }
126    }
127    public int Seed {
128      get { return SeedParameter.Value.Value; }
129      set { SeedParameter.Value.Value = value; }
130    }
131    public bool SetSeedRandomly {
132      get { return SetSeedRandomlyParameter.Value.Value; }
133      set { SetSeedRandomlyParameter.Value.Value = value; }
134    }
135    public int PopulationSize {
136      get { return PopulationSizeParameter.Value.Value; }
137      set { PopulationSizeParameter.Value.Value = value; }
138    }
139    public int MaximumGenerations {
140      get { return MaximumGenerationsParameter.Value.Value; }
141      set { MaximumGenerationsParameter.Value.Value = value; }
142    }
143    public int MaximumEvaluatedSolutions {
144      get { return MaximumEvaluatedSolutionsParameter.Value.Value; }
145      set { MaximumEvaluatedSolutionsParameter.Value.Value = value; }
146    }
147    public double InitialSigma {
148      get { return InitialSigmaParameter.Value.Value; }
149      set { InitialSigmaParameter.Value.Value = value; }
150    }
151    public IIndicator Indicator {
152      get { return IndicatorParameter.Value; }
153      set { IndicatorParameter.Value = value; }
154    }
155
156
157    #endregion
158    #region ResultsProperties
159    private int ResultsEvaluations {
160      get { return ((IntValue)Results[EvaluationsResultName].Value).Value; }
161      set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; }
162    }
163    private int ResultsIterations {
164      get { return ((IntValue)Results[IterationsResultName].Value).Value; }
165      set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
166    }
167    #region Datatable
168    private DataTable ResultsQualities {
169      get { return (DataTable)Results[TimetableResultName].Value; }
170    }
171    private DataRow ResultsBestHypervolumeDataLine {
172      get { return ResultsQualities.Rows[BestHypervolumeResultName]; }
173    }
174    private DataRow ResultsHypervolumeDataLine {
175      get { return ResultsQualities.Rows[HypervolumeResultName]; }
176    }
177    private DataRow ResultsGenerationalDistanceDataLine {
178      get { return ResultsQualities.Rows[GenerationalDistanceResultName]; }
179    }
180    private DataRow ResultsInvertedGenerationalDistanceDataLine {
181      get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; }
182    }
183    private DataRow ResultsCrowdingDataLine {
184      get { return ResultsQualities.Rows[CrowdingResultName]; }
185    }
186    private DataRow ResultsSpacingDataLine {
187      get { return ResultsQualities.Rows[SpacingResultName]; }
188    }
189    private DataRow ResultsHypervolumeDifferenceDataLine {
190      get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; }
191    }
192    #endregion
193    //QualityIndicators
194    private double ResultsHypervolume {
195      get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; }
196      set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; }
197    }
198    private double ResultsGenerationalDistance {
199      get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; }
200      set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; }
201    }
202    private double ResultsInvertedGenerationalDistance {
203      get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; }
204      set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; }
205    }
206    private double ResultsCrowding {
207      get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; }
208      set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; }
209    }
210    private double ResultsSpacing {
211      get { return ((DoubleValue)Results[SpacingResultName].Value).Value; }
212      set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; }
213    }
214    private double ResultsBestHypervolume {
215      get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; }
216      set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; }
217    }
218    private double ResultsBestKnownHypervolume {
219      get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; }
220      set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; }
221    }
222    private double ResultsDifferenceBestKnownHypervolume {
223      get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; }
224      set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; }
225
226    }
227    //Solutions
228    private DoubleMatrix ResultsSolutions {
229      get { return ((DoubleMatrix)Results[SolutionsResultName].Value); }
230      set { Results[SolutionsResultName].Value = value; }
231    }
232    private ScatterPlotContent ResultsScatterPlot {
233      get { return ((ScatterPlotContent)Results[ScatterPlotResultName].Value); }
234      set { Results[ScatterPlotResultName].Value = value; }
235    }
236    #endregion
237
238    #region constructors and hlBoilerPlate-code
239    [StorableConstructor]
240    protected MOCMASEvolutionStrategy(bool deserializing) : base(deserializing) { }
241
242    protected MOCMASEvolutionStrategy(MOCMASEvolutionStrategy original, Cloner cloner)
243        : base(original, cloner) {
244    }
245
246    public override IDeepCloneable Clone(Cloner cloner) {
247      return new MOCMASEvolutionStrategy(this, cloner);
248    }
249
250    public MOCMASEvolutionStrategy() {
251      Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600)));
252      Parameters.Add(new FixedValueParameter<IntValue>(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
253      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
254      Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20)));
255      Parameters.Add(new FixedValueParameter<DoubleValue>(InitialSigmaName, "The initial sigma is a single value > 0.", new DoubleValue(0.5)));
256      Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000)));
257      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue)));
258
259      ItemSet<IIndicator> set = new ItemSet<IIndicator>();
260      var default_ = new HypervolumeIndicator();
261      set.Add(default_);
262      set.Add(new CrowdingIndicator());
263      Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, default_));
264    }
265    #endregion
266
267    private void AddResult<T>(String name, String desc, T defaultValue) where T : class, IItem {
268      Results.Add(new Result(name, desc, defaultValue));
269    }
270
271    #region updates
272    private void UpdatePopulation(MOCMAESIndividual[] parents) {
273      int[] offspringSucess = new int[solutions.Length];
274      int offspringLength = parents.Length - solutions.Length;
275
276      for (int i = 0; i < offspringLength; i++) {
277        if (parents[i + solutions.Length].selected) {
278          UpdateAsOffspring(parents[i + solutions.Length]);
279
280          //TODO this may change if more offspring per parent is allowed
281          offspringSucess[i] += MOCMAESIndividual.success;
282        }
283      }
284      for (int i = 0; i < solutions.Length; i++) {
285        if (parents[i].selected) {
286          UpdateAsParent(parents[i], offspringSucess[i]);
287        }
288      }
289
290      solutions = new MOCMAESIndividual[solutions.Length];
291      int j = 0;
292      foreach (MOCMAESIndividual ind in parents) {
293        if (ind.selected) {
294          solutions[j++] = ind;
295        }
296      }
297
298    }
299
300    private void UpdateAsParent(MOCMAESIndividual c, int v) {
301      c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate * (v == MOCMAESIndividual.success ? 1 : 0);
302      c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability));
303      if (v != MOCMAESIndividual.failure) return;
304      if (c.successProbability < successThreshold) {
305        double stepNormSqr = c.GetSetpNormSqr();
306        double rate = covarianceMatrixUnlearningRate;
307        if (stepNormSqr > 1 && 1 < covarianceMatrixUnlearningRate * (2 * stepNormSqr - 1)) {
308          rate = 1 / (2 * stepNormSqr - 1);
309        }
310        RankOneUpdate(c, 1 + rate, -rate, c.lastStep);
311
312      } else {
313        RoundUpdate(c);
314      }
315
316    }
317
318    private void UpdateAsOffspring(MOCMAESIndividual c) {
319      c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate;
320      c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability));
321      double evolutionpathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate);
322      if (c.successProbability < successThreshold) {
323        c.UpdateEvolutionPath(1 - evolutionPathLearningRate, evolutionpathUpdateWeight);
324        RankOneUpdate(c, 1 - covarianceMatrixLearningRate, covarianceMatrixLearningRate, c.evolutionPath);
325      } else {
326        RoundUpdate(c);
327      }
328    }
329
330    private void RankOneUpdate(MOCMAESIndividual c, double v1, double v2, RealVector lastStep) {
331      c.CholeskyUpdate(lastStep, v1, v2);
332    }
333
334    private void RoundUpdate(MOCMAESIndividual c) {
335      double evolutionPathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate);
336      c.UpdateEvolutionPath(1 - evolutionPathLearningRate, 0);
337      RankOneUpdate(c, 1 - covarianceMatrixLearningRate + evolutionPathUpdateWeight,
338        covarianceMatrixLearningRate, c.evolutionPath);
339    }
340    #endregion
341    #region selection
342
343    private void Selection(MOCMAESIndividual[] parents, int length) {
344      //perform a nondominated sort to assign the rank to every element
345      var fronts = NonDominatedSort(parents);
346
347      //deselect the highest rank fronts until we would end up with less or equal mu elements
348      int rank = fronts.Count - 1;
349      int popSize = parents.Length;
350      while (popSize - fronts[rank].Count >= length) {
351        var front = fronts[rank];
352        foreach (var i in front) i.selected = false;
353        popSize -= front.Count;
354        rank--;
355      }
356
357      //now use the indicator to deselect the worst approximating elements of the last selected front
358
359      var front_ = fronts[rank];
360      for (; popSize > length; popSize--) {
361        int lc = Indicator.LeastContributer<MOCMAESIndividual>(front_.ToArray(), x => x.penalizedFitness, Problem);
362        front_[lc].selected = false;
363        front_.Swap(lc, front_.Count - 1);
364        front_.RemoveAt(front_.Count - 1);
365      }
366    }
367    #endregion
368    #region penalize Box-Constraints
369    private void PenalizeEvaluate(IEnumerable<MOCMAESIndividual> offspring) {
370      foreach (MOCMAESIndividual child in offspring) PenalizeEvaluate(child);
371    }
372
373    private void PenalizeEvaluate(MOCMAESIndividual offspring) {
374      if (IsFeasable(offspring.x)) {
375        offspring.fitness = Evaluate(offspring.x);
376        offspring.penalizedFitness = offspring.fitness;
377      } else {
378        RealVector t = ClosestFeasible(offspring.x);
379        offspring.fitness = Evaluate(t);
380        offspring.penalizedFitness = Penalize(offspring.x, t, offspring.fitness);
381      }
382    }
383
384    private double[] Evaluate(RealVector x) {
385      double[] res = Problem.Evaluate(x);
386      ResultsEvaluations++;
387      return res;
388    }
389
390    private double[] Penalize(RealVector x, RealVector t, double[] fitness) {
391      double penalty = Penalize(x, t);
392      return fitness.Select(v => v + penalty).ToArray();
393    }
394
395    private double Penalize(RealVector x, RealVector t) {
396      double sum = 0;
397      for (int i = 0; i < x.Length; i++) {
398        double d = x[i] - t[i];
399        sum += d * d;
400      }
401      return sum;
402    }
403
404    private RealVector ClosestFeasible(RealVector x) {
405      DoubleMatrix bounds = Problem.Bounds;
406      RealVector r = new RealVector(x.Length);
407
408      for (int i = 0; i < x.Length; i++) {
409        int dim = i % bounds.Rows;
410        r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]);
411      }
412      return r;
413    }
414
415    private bool IsFeasable(RealVector offspring) {
416      DoubleMatrix bounds = Problem.Bounds;
417
418      for (int i = 0; i < offspring.Length; i++) {
419        int dim = i % bounds.Rows;
420        if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false;
421      }
422      return true;
423    }
424    #endregion
425
426    #region mutation
427    private MOCMAESIndividual[] GenerateOffspring() {
428      MOCMAESIndividual[] offspring = new MOCMAESIndividual[PopulationSize];
429      for (int i = 0; i < PopulationSize; i++) {
430        offspring[i] = new MOCMAESIndividual(solutions[i]);
431        offspring[i].Mutate(gauss);
432      }
433      return offspring;
434    }
435    #endregion
436
437    #region initialization
438    private MOCMAESIndividual InitializeIndividual(RealVector x) {
439      var zeros = new RealVector(x.Length);
440      var identity = new double[x.Length, x.Length];
441      for (int i = 0; i < x.Length; i++) {
442        identity[i, i] = 1;
443      }
444      return new MOCMAESIndividual(x, targetSuccessProbability, InitialSigma, zeros, identity);
445    }
446
447    private void InitSolutions() {
448      solutions = new MOCMAESIndividual[PopulationSize];
449      for (int i = 0; i < PopulationSize; i++) {
450        RealVector x = new RealVector(Problem.ProblemSize); // Uniform distibution in all dimesions assumed.
451                                                            // There is the UniformSolutionCreater associated with the Encoding, but it was considered not usable here
452        var bounds = Problem.Bounds;
453        for (int j = 0; j < Problem.Objectives; j++) {
454          int dim = j % bounds.Rows;
455          x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0];
456        }
457        solutions[i] = InitializeIndividual(x);
458      }
459      PenalizeEvaluate(solutions);
460    }
461
462    private void InitStrategy() {
463      int lambda = 1;
464      double n = Problem.ProblemSize;
465      gauss = new NormalDistributedRandom(random, 0, 1);
466      targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);
467      stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);
468      stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);
469      evolutionPathLearningRate = 2.0 / (n + 2.0);
470      covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);
471      covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);
472      successThreshold = 0.44;
473
474    }
475
476    private void InitResults() {
477      AddResult(IterationsResultName, "The number of gererations evaluated", new IntValue(0));
478      AddResult(EvaluationsResultName, "The number of function evaltions performed", new IntValue(0));
479      AddResult(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0));
480      AddResult(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0));
481      AddResult(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(Double.NaN));
482      AddResult(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(Double.NaN));
483      AddResult(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(Double.NaN));
484      AddResult(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(Double.NaN));
485      AddResult(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0));
486      AddResult(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0));
487
488      var table = new DataTable("QualityIndicators");
489      table.Rows.Add(new DataRow(BestHypervolumeResultName));
490      table.Rows.Add(new DataRow(HypervolumeResultName));
491      table.Rows.Add(new DataRow(CrowdingResultName));
492      table.Rows.Add(new DataRow(GenerationalDistanceResultName));
493      table.Rows.Add(new DataRow(InvertedGenerationalDistanceResultName));
494      table.Rows.Add(new DataRow(DifferenceToBestKnownHypervolumeResultName));
495      table.Rows.Add(new DataRow(SpacingResultName));
496      AddResult(TimetableResultName, "Different quality meassures in a timeseries", table);
497      AddResult(SolutionsResultName, "The current front", new DoubleMatrix());
498      AddResult(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ScatterPlotContent(null, null, null, 2));
499
500      if (Problem.BestKnownFront != null) {
501        ResultsBestKnownHypervolume = Hypervolume.Calculate(Utilities.ToArray(Problem.BestKnownFront), Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization);
502        ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume;
503      }
504      ResultsScatterPlot = new ScatterPlotContent(null, null, Utilities.ToArray(Problem.BestKnownFront), Problem.Objectives);
505    }
506    #endregion
507
508    #region analyze
509    private void AnalyzeSolutions() {
510      //solutions
511      ResultsScatterPlot = new ScatterPlotContent(solutions.Select(x => x.fitness).ToArray<double[]>(),
512          solutions.Select(x => ToArray(x.x)).ToArray<double[]>(),
513          ResultsScatterPlot.ParetoFront,
514          ResultsScatterPlot.Objectives);
515      ResultsSolutions = ToMatrix(solutions.Select(x => ToArray(x.x)));
516      AnalyzeQualityIndicators();
517
518    }
519
520    private void AnalyzeQualityIndicators() {
521
522      //var front = NonDominatedSelect.selectNonDominated(solutions.Select(x => x.fitness), Problem.Maximization, true);
523      var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.fitness), Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, true);
524      //front = NonDominatedSelect.removeNonReferenceDominatingVectors(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, false);
525      var bounds = ToArray(Problem.Bounds);
526      ResultsCrowding = Crowding.Calculate(front, bounds);
527      ResultsSpacing = Spacing.Calculate(front);
528      ResultsGenerationalDistance = Problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : Double.NaN;
529
530      ResultsInvertedGenerationalDistance = Problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : Double.NaN;
531
532      ResultsHypervolume = Hypervolume.Calculate(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization);
533      ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume);
534      ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume; //Take best of this run or current HV???
535
536      //Datalines
537      ResultsBestHypervolumeDataLine.Values.Add(ResultsBestHypervolume);
538      ResultsHypervolumeDataLine.Values.Add(ResultsHypervolume);
539      ResultsCrowdingDataLine.Values.Add(ResultsCrowding);
540      ResultsGenerationalDistanceDataLine.Values.Add(ResultsGenerationalDistance);
541      ResultsInvertedGenerationalDistanceDataLine.Values.Add(ResultsInvertedGenerationalDistance);
542      ResultsSpacingDataLine.Values.Add(ResultsSpacing);
543      ResultsHypervolumeDifferenceDataLine.Values.Add(ResultsDifferenceBestKnownHypervolume);
544
545    }
546    #endregion
547
548    //MU = populationSize
549    #region mainloop
550    protected override void Run(CancellationToken cancellationToken) {
551      // Set up the algorithm
552      if (SetSeedRandomly) Seed = new System.Random().Next();
553      random.Reset(Seed);
554
555      InitResults();
556      InitStrategy();
557      InitSolutions();
558
559      // Loop until iteration limit reached or canceled.
560      for (ResultsIterations = 1; ResultsIterations < MaximumGenerations; ResultsIterations++) {
561        try {
562          Iterate();
563          cancellationToken.ThrowIfCancellationRequested();
564        }
565        finally {
566          AnalyzeSolutions();
567        }
568      }
569    }
570
571    protected override void OnExecutionTimeChanged() {
572      base.OnExecutionTimeChanged();
573      if (CancellationTokenSource == null) return;
574      if (MaximumRuntime == -1) return;
575      if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
576    }
577
578    private void Iterate() {
579      MOCMAESIndividual[] offspring = GenerateOffspring();
580      PenalizeEvaluate(offspring);
581      var parents = Merge(solutions, offspring);
582      Selection(parents, solutions.Length);
583      UpdatePopulation(parents);
584
585    }
586    #endregion
587
588    private class MOCMAESIndividual {
589      public static readonly int success = 1;
590      public static readonly int noSuccess = 2;
591      public static readonly int failure = 3;
592
593
594      //Chromosome
595      public RealVector x;
596      public double successProbability;
597      public double sigma;//stepsize
598      public RealVector evolutionPath; // pc
599      public RealVector lastStep;
600      public RealVector lastZ;
601      private double[,] lowerCholesky;
602
603
604      //Phenotype
605      public double[] fitness;
606      public double[] penalizedFitness;
607      public bool selected = true;
608
609      internal double rank;
610
611      /// <summary>
612      ///
613      /// </summary>
614      /// <param name="x">has to be 0-vector with correct lenght</param>
615      /// <param name="p_succ">has to be ptargetsucc</param>
616      /// <param name="sigma">initialSigma</param>
617      /// <param name="pc">has to be 0-vector with correct lenght</param>
618      /// <param name="C">has to be a symmetric positive definit Covariance matrix</param>
619      public MOCMAESIndividual(RealVector x, double p_succ, double sigma, RealVector pc, double[,] C) {
620        this.x = x;
621        this.successProbability = p_succ;
622        this.sigma = sigma;
623        this.evolutionPath = pc;
624        CholeskyDecomposition(C);
625      }
626
627      private void CholeskyDecomposition(double[,] C) {
628        if (!alglib.spdmatrixcholesky(ref C, C.GetLength(0), false)) {
629          throw new ArgumentException("Covariancematrix is not symmetric positiv definit");
630        }
631        lowerCholesky = (double[,])C.Clone();
632      }
633
634      public MOCMAESIndividual(MOCMAESIndividual other) {
635        this.successProbability = other.successProbability;
636        this.sigma = other.sigma;
637
638        this.evolutionPath = (RealVector)other.evolutionPath.Clone();
639        this.x = (RealVector)other.x.Clone();
640
641        this.lowerCholesky = (double[,])other.lowerCholesky.Clone();
642      }
643
644      public void UpdateEvolutionPath(double learningRate, double updateWeight) {
645        updateWeight = Math.Sqrt(updateWeight);
646        for (int i = 0; i < evolutionPath.Length; i++) {
647          evolutionPath[i] *= learningRate;
648          evolutionPath[i] += updateWeight * lastStep[i];
649        }
650      }
651
652      public double GetSetpNormSqr() {
653        double sum = 0;
654        foreach (double d in lastZ) {
655          sum += d * d;
656        }
657        return sum;
658      }
659
660      public void CholeskyUpdate(RealVector v, double alpha, double beta) {
661        int n = v.Length;
662        double[] temp = new double[n];
663        for (int i = 0; i < n; i++) temp[i] = v[i];
664        double betaPrime = 1;
665        double a = Math.Sqrt(alpha);
666        for (int j = 0; j < n; j++) {
667          double Ljj = a * lowerCholesky[j, j];
668          double dj = Ljj * Ljj;
669          double wj = temp[j];
670          double swj2 = beta * wj * wj;
671          double gamma = dj * betaPrime + swj2;
672          double x = dj + swj2 / betaPrime;
673          if (x < 0.0) throw new ArgumentException("Update makes Covariancematrix indefinite");
674          double nLjj = Math.Sqrt(x);
675          lowerCholesky[j, j] = nLjj;
676          betaPrime += swj2 / dj;
677          if (j + 1 < n) {
678            for (int i = j + 1; i < n; i++) {
679              lowerCholesky[i, j] *= a;
680            }
681            for (int i = j + 1; i < n; i++) {
682              temp[i] = wj / Ljj * lowerCholesky[i, j];
683            }
684            if (gamma == 0) continue;
685            for (int i = j + 1; i < n; i++) {
686              lowerCholesky[i, j] *= nLjj / Ljj;
687            }
688            for (int i = j + 1; i < n; i++) {
689              lowerCholesky[i, j] += (nLjj * beta * wj / gamma) * temp[i];
690            }
691          }
692
693        }
694
695      }
696
697      public void Mutate(NormalDistributedRandom gauss) {
698
699        //sampling a random z from N(0,I) where I is the Identity matrix;
700        lastZ = new RealVector(x.Length);
701        int n = lastZ.Length;
702        for (int i = 0; i < n; i++) {
703          lastZ[i] = gauss.NextDouble();
704        }
705        //Matrixmultiplication: lastStep = lowerCholesky * lastZ;
706        lastStep = new RealVector(x.Length);
707        for (int i = 0; i < n; i++) {
708          double sum = 0;
709          for (int j = 0; j <= i; j++) {
710            sum += lowerCholesky[i, j] * lastZ[j];
711          }
712          lastStep[i] = sum;
713        }
714
715        //add the step to x weighted by stepsize;
716        for (int i = 0; i < x.Length; i++) {
717          x[i] += sigma * lastStep[i];
718        }
719
720      }
721
722    }
723
724    //blatantly stolen form HeuristicLab.Optimization.Operators.FastNonDominatedSort
725    #region FastNonDominatedSort   
726    private enum DominationResult { Dominates, IsDominated, IsNonDominated };
727
728    private List<List<MOCMAESIndividual>> NonDominatedSort(MOCMAESIndividual[] individuals) {
729      bool dominateOnEqualQualities = false;
730      bool[] maximization = Problem.Maximization;
731      if (individuals == null) throw new InvalidOperationException(Name + ": No qualities found.");
732      int populationSize = individuals.Length;
733
734      List<List<MOCMAESIndividual>> fronts = new List<List<MOCMAESIndividual>>();
735      Dictionary<MOCMAESIndividual, List<int>> dominatedScopes = new Dictionary<MOCMAESIndividual, List<int>>();
736      int[] dominationCounter = new int[populationSize];
737
738      for (int pI = 0; pI < populationSize - 1; pI++) {
739        MOCMAESIndividual p = individuals[pI];
740        List<int> dominatedScopesByp;
741        if (!dominatedScopes.TryGetValue(p, out dominatedScopesByp))
742          dominatedScopes[p] = dominatedScopesByp = new List<int>();
743        for (int qI = pI + 1; qI < populationSize; qI++) {
744          DominationResult test = Dominates(individuals[pI], individuals[qI], maximization, dominateOnEqualQualities);
745          if (test == DominationResult.Dominates) {
746            dominatedScopesByp.Add(qI);
747            dominationCounter[qI] += 1;
748          } else if (test == DominationResult.IsDominated) {
749            dominationCounter[pI] += 1;
750            if (!dominatedScopes.ContainsKey(individuals[qI]))
751              dominatedScopes.Add(individuals[qI], new List<int>());
752            dominatedScopes[individuals[qI]].Add(pI);
753          }
754          if (pI == populationSize - 2
755            && qI == populationSize - 1
756            && dominationCounter[qI] == 0) {
757            AddToFront(individuals[qI], fronts, 0);
758          }
759        }
760        if (dominationCounter[pI] == 0) {
761          AddToFront(p, fronts, 0);
762        }
763      }
764      int i = 0;
765      while (i < fronts.Count && fronts[i].Count > 0) {
766        List<MOCMAESIndividual> nextFront = new List<MOCMAESIndividual>();
767        foreach (MOCMAESIndividual p in fronts[i]) {
768          List<int> dominatedScopesByp;
769          if (dominatedScopes.TryGetValue(p, out dominatedScopesByp)) {
770            for (int k = 0; k < dominatedScopesByp.Count; k++) {
771              int dominatedScope = dominatedScopesByp[k];
772              dominationCounter[dominatedScope] -= 1;
773              if (dominationCounter[dominatedScope] == 0) {
774                nextFront.Add(individuals[dominatedScope]);
775              }
776            }
777          }
778        }
779        i += 1;
780        fronts.Add(nextFront);
781      }
782
783      MOCMAESIndividual[] result = new MOCMAESIndividual[individuals.Length];
784
785      for (i = 0; i < fronts.Count; i++) {
786        foreach (var p in fronts[i]) {
787          p.rank = i;
788        }
789      }
790      return fronts;
791    }
792
793    private static void AddToFront(MOCMAESIndividual p, List<List<MOCMAESIndividual>> fronts, int i) {
794      if (i == fronts.Count) fronts.Add(new List<MOCMAESIndividual>());
795      fronts[i].Add(p);
796    }
797
798    private static DominationResult Dominates(MOCMAESIndividual left, MOCMAESIndividual right, bool[] maximizations, bool dominateOnEqualQualities) {
799      return Dominates(left.penalizedFitness, right.penalizedFitness, maximizations, dominateOnEqualQualities);
800    }
801
802    private static DominationResult Dominates(double[] left, double[] right, bool[] maximizations, bool dominateOnEqualQualities) {
803      //mkommend Caution: do not use LINQ.SequenceEqual for comparing the two quality arrays (left and right) due to performance reasons
804      if (dominateOnEqualQualities) {
805        var equal = true;
806        for (int i = 0; i < left.Length; i++) {
807          if (left[i] != right[i]) {
808            equal = false;
809            break;
810          }
811        }
812        if (equal) return DominationResult.Dominates;
813      }
814
815      bool leftIsBetter = false, rightIsBetter = false;
816      for (int i = 0; i < left.Length; i++) {
817        if (IsDominated(left[i], right[i], maximizations[i])) rightIsBetter = true;
818        else if (IsDominated(right[i], left[i], maximizations[i])) leftIsBetter = true;
819        if (leftIsBetter && rightIsBetter) break;
820      }
821
822      if (leftIsBetter && !rightIsBetter) return DominationResult.Dominates;
823      if (!leftIsBetter && rightIsBetter) return DominationResult.IsDominated;
824      return DominationResult.IsNonDominated;
825    }
826
827    private static bool IsDominated(double left, double right, bool maximization) {
828      return maximization && left < right
829        || !maximization && left > right;
830    }
831
832    #endregion
833
834    #region conversions
835
836    private T[] Merge<T>(T[] parents, T[] offspring) {
837      T[] merged = new T[parents.Length + offspring.Length];
838      for (int i = 0; i < parents.Length; i++) {
839        merged[i] = parents[i];
840      }
841      for (int i = 0; i < offspring.Length; i++) {
842        merged[i + parents.Length] = offspring[i];
843      }
844      return merged;
845    }
846
847    public double[] ToArray(RealVector r) {
848      double[] d = new double[r.Length];
849      for (int i = 0; i < r.Length; i++) {
850        d[i] = r[i];
851      }
852      return d;
853    }
854    public DoubleMatrix ToMatrix(IEnumerable<double[]> data) {
855      var d2 = data.ToArray<double[]>();
856      DoubleMatrix mat = new DoubleMatrix(d2.Length, d2[0].Length);
857      for (int i = 0; i < mat.Rows; i++) {
858        for (int j = 0; j < mat.Columns; j++) {
859          mat[i, j] = d2[i][j];
860        }
861      }
862      return mat;
863    }
864    public double[,] ToArray(DoubleMatrix data) {
865
866      double[,] mat = new double[data.Rows, data.Columns];
867      for (int i = 0; i < data.Rows; i++) {
868        for (int j = 0; j < data.Columns; j++) {
869          mat[i, j] = data[i, j];
870        }
871      }
872      return mat;
873    }
874    #endregion
875  }
876}
Note: See TracBrowser for help on using the repository browser.