Free cookie consent management tool by TermsFeed Policy Generator

source: branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/MOCMAEvolutionStrategy.cs @ 15089

Last change on this file since 15089 was 15089, checked in by bwerth, 7 years ago

#2592 removed effectively unused field "rank" from Individual, removed non-dominated sorting

File size: 26.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Linq;
26using System.Threading;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Encodings.RealVectorEncoding;
32using HeuristicLab.Optimization;
33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.TestFunctions.MultiObjective;
36using HeuristicLab.Random;
37
38namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy {
39  [Item("MOCMA Evolution Strategy (MOCMAES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")]
40  [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)]
41  [StorableClass]
42  [System.Runtime.InteropServices.Guid("5AC20A69-BBBF-4153-B57D-3EAF92DC505E")]
43  public class MOCMAEvolutionStrategy : BasicAlgorithm {
44    public override Type ProblemType
45    {
46      get { return typeof(MultiObjectiveBasicProblem<RealVectorEncoding>); }
47    }
48    public new MultiObjectiveBasicProblem<RealVectorEncoding> Problem
49    {
50      get { return (MultiObjectiveBasicProblem<RealVectorEncoding>)base.Problem; }
51      set { base.Problem = value; }
52    }
53    public override bool SupportsPause
54    {
55      get { return true; }
56    }
57
58    #region storable fields
59    [Storable]
60    private IRandom random = new MersenneTwister();
61    [Storable]
62    private NormalDistributedRandom gauss;
63    [Storable]
64    private Individual[] solutions;
65    [Storable]
66    private double stepSizeLearningRate; //=cp learning rate in [0,1]
67    [Storable]
68    private double stepSizeDampeningFactor; //d
69    [Storable]
70    private double targetSuccessProbability;// p^target_succ
71    [Storable]
72    private double evolutionPathLearningRate;//cc
73    [Storable]
74    private double covarianceMatrixLearningRate;//ccov
75    [Storable]
76    private double covarianceMatrixUnlearningRate;
77    [Storable]
78    private double successThreshold; //ptresh
79
80    #endregion
81
82    #region ParameterNames
83    private const string MaximumRuntimeName = "Maximum Runtime";
84    private const string SeedName = "Seed";
85    private const string SetSeedRandomlyName = "SetSeedRandomly";
86    private const string PopulationSizeName = "PopulationSize";
87    private const string MaximumGenerationsName = "MaximumGenerations";
88    private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions";
89    private const string InitialSigmaName = "InitialSigma";
90    private const string IndicatorName = "Indicator";
91
92    private const string EvaluationsResultName = "Evaluations";
93    private const string IterationsResultName = "Generations";
94    private const string TimetableResultName = "Timetable";
95    private const string HypervolumeResultName = "Hypervolume";
96    private const string GenerationalDistanceResultName = "Generational Distance";
97    private const string InvertedGenerationalDistanceResultName = "Inverted Generational Distance";
98    private const string CrowdingResultName = "Crowding";
99    private const string SpacingResultName = "Spacing";
100    private const string CurrentFrontResultName = "Pareto Front";
101    private const string BestHypervolumeResultName = "Best Hypervolume";
102    private const string BestKnownHypervolumeResultName = "Best known hypervolume";
103    private const string DifferenceToBestKnownHypervolumeResultName = "Absolute Distance to BestKnownHypervolume";
104    private const string ScatterPlotResultName = "ScatterPlot";
105    #endregion
106
107    #region ParameterProperties
108    public IFixedValueParameter<IntValue> MaximumRuntimeParameter
109    {
110      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; }
111    }
112    public IFixedValueParameter<IntValue> SeedParameter
113    {
114      get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; }
115    }
116    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter
117    {
118      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; }
119    }
120    public IFixedValueParameter<IntValue> PopulationSizeParameter
121    {
122      get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; }
123    }
124    public IFixedValueParameter<IntValue> MaximumGenerationsParameter
125    {
126      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; }
127    }
128    public IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter
129    {
130      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; }
131    }
132    public IValueParameter<DoubleArray> InitialSigmaParameter
133    {
134      get { return (IValueParameter<DoubleArray>)Parameters[InitialSigmaName]; }
135    }
136    public IConstrainedValueParameter<IIndicator> IndicatorParameter
137    {
138      get { return (IConstrainedValueParameter<IIndicator>)Parameters[IndicatorName]; }
139    }
140    #endregion
141
142    #region Properties
143    public int MaximumRuntime
144    {
145      get { return MaximumRuntimeParameter.Value.Value; }
146      set { MaximumRuntimeParameter.Value.Value = value; }
147    }
148    public int Seed
149    {
150      get { return SeedParameter.Value.Value; }
151      set { SeedParameter.Value.Value = value; }
152    }
153    public bool SetSeedRandomly
154    {
155      get { return SetSeedRandomlyParameter.Value.Value; }
156      set { SetSeedRandomlyParameter.Value.Value = value; }
157    }
158    public int PopulationSize
159    {
160      get { return PopulationSizeParameter.Value.Value; }
161      set { PopulationSizeParameter.Value.Value = value; }
162    }
163    public int MaximumGenerations
164    {
165      get { return MaximumGenerationsParameter.Value.Value; }
166      set { MaximumGenerationsParameter.Value.Value = value; }
167    }
168    public int MaximumEvaluatedSolutions
169    {
170      get { return MaximumEvaluatedSolutionsParameter.Value.Value; }
171      set { MaximumEvaluatedSolutionsParameter.Value.Value = value; }
172    }
173    public DoubleArray InitialSigma
174    {
175      get { return InitialSigmaParameter.Value; }
176      set { InitialSigmaParameter.Value = value; }
177    }
178    public IIndicator Indicator
179    {
180      get { return IndicatorParameter.Value; }
181      set { IndicatorParameter.Value = value; }
182    }
183
184    public double StepSizeLearningRate { get { return stepSizeLearningRate; } }
185    public double StepSizeDampeningFactor { get { return stepSizeDampeningFactor; } }
186    public double TargetSuccessProbability { get { return targetSuccessProbability; } }
187    public double EvolutionPathLearningRate { get { return evolutionPathLearningRate; } }
188    public double CovarianceMatrixLearningRate { get { return covarianceMatrixLearningRate; } }
189    public double CovarianceMatrixUnlearningRate { get { return covarianceMatrixUnlearningRate; } }
190    public double SuccessThreshold { get { return successThreshold; } }
191    #endregion
192
193    #region ResultsProperties
194    private int ResultsEvaluations
195    {
196      get { return ((IntValue)Results[EvaluationsResultName].Value).Value; }
197      set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; }
198    }
199    private int ResultsIterations
200    {
201      get { return ((IntValue)Results[IterationsResultName].Value).Value; }
202      set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
203    }
204    #region Datatable
205    private DataTable ResultsQualities
206    {
207      get { return (DataTable)Results[TimetableResultName].Value; }
208    }
209    private DataRow ResultsBestHypervolumeDataLine
210    {
211      get { return ResultsQualities.Rows[BestHypervolumeResultName]; }
212    }
213    private DataRow ResultsHypervolumeDataLine
214    {
215      get { return ResultsQualities.Rows[HypervolumeResultName]; }
216    }
217    private DataRow ResultsGenerationalDistanceDataLine
218    {
219      get { return ResultsQualities.Rows[GenerationalDistanceResultName]; }
220    }
221    private DataRow ResultsInvertedGenerationalDistanceDataLine
222    {
223      get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; }
224    }
225    private DataRow ResultsCrowdingDataLine
226    {
227      get { return ResultsQualities.Rows[CrowdingResultName]; }
228    }
229    private DataRow ResultsSpacingDataLine
230    {
231      get { return ResultsQualities.Rows[SpacingResultName]; }
232    }
233    private DataRow ResultsHypervolumeDifferenceDataLine
234    {
235      get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; }
236    }
237    #endregion
238    //QualityIndicators
239    private double ResultsHypervolume
240    {
241      get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; }
242      set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; }
243    }
244    private double ResultsGenerationalDistance
245    {
246      get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; }
247      set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; }
248    }
249    private double ResultsInvertedGenerationalDistance
250    {
251      get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; }
252      set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; }
253    }
254    private double ResultsCrowding
255    {
256      get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; }
257      set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; }
258    }
259    private double ResultsSpacing
260    {
261      get { return ((DoubleValue)Results[SpacingResultName].Value).Value; }
262      set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; }
263    }
264    private double ResultsBestHypervolume
265    {
266      get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; }
267      set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; }
268    }
269    private double ResultsBestKnownHypervolume
270    {
271      get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; }
272      set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; }
273    }
274    private double ResultsDifferenceBestKnownHypervolume
275    {
276      get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; }
277      set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; }
278
279    }
280    //Solutions
281    private DoubleMatrix ResultsSolutions
282    {
283      get { return (DoubleMatrix)Results[CurrentFrontResultName].Value; }
284      set { Results[CurrentFrontResultName].Value = value; }
285    }
286    private ScatterPlotContent ResultsScatterPlot
287    {
288      get { return (ScatterPlotContent)Results[ScatterPlotResultName].Value; }
289      set { Results[ScatterPlotResultName].Value = value; }
290    }
291    #endregion
292
293    #region Constructors
294    public MOCMAEvolutionStrategy() {
295      Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600)));
296      Parameters.Add(new FixedValueParameter<IntValue>(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
297      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
298      Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20)));
299      Parameters.Add(new ValueParameter<DoubleArray>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleArray(new[] { 0.5 })));
300      Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000)));
301      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue)));
302      var set = new ItemSet<IIndicator> { new HypervolumeIndicator(), new CrowdingIndicator(), new MinimalDistanceIndicator() };
303      Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, set.First()));
304    }
305
306    [StorableConstructor]
307    protected MOCMAEvolutionStrategy(bool deserializing) : base(deserializing) { }
308
309    protected MOCMAEvolutionStrategy(MOCMAEvolutionStrategy original, Cloner cloner) : base(original, cloner) {
310      random = cloner.Clone(original.random);
311      gauss = cloner.Clone(original.gauss);
312      solutions = original.solutions != null ? original.solutions.Select(cloner.Clone).ToArray() : null;
313      stepSizeLearningRate = original.stepSizeLearningRate;
314      stepSizeDampeningFactor = original.stepSizeDampeningFactor;
315      targetSuccessProbability = original.targetSuccessProbability;
316      evolutionPathLearningRate = original.evolutionPathLearningRate;
317      covarianceMatrixLearningRate = original.covarianceMatrixLearningRate;
318      covarianceMatrixUnlearningRate = original.covarianceMatrixUnlearningRate;
319      successThreshold = original.successThreshold;
320    }
321
322    public override IDeepCloneable Clone(Cloner cloner) { return new MOCMAEvolutionStrategy(this, cloner); }
323    #endregion
324
325    #region Initialization
326    protected override void Initialize(CancellationToken cancellationToken) {
327      if (SetSeedRandomly) Seed = new System.Random().Next();
328      random.Reset(Seed);
329      gauss = new NormalDistributedRandom(random, 0, 1);
330
331      InitResults();
332      InitStrategy();
333      InitSolutions();
334      Analyze();
335
336      ResultsIterations = 1;
337      cancellationToken.ThrowIfCancellationRequested();
338    }
339    private Individual InitializeIndividual(RealVector x) {
340      var zeros = new RealVector(x.Length);
341      var c = new double[x.Length, x.Length];
342      var sigma = InitialSigma.Max();
343      for (var i = 0; i < x.Length; i++) {
344        var d = InitialSigma[i % InitialSigma.Length] / sigma;
345        c[i, i] = d * d;
346      }
347      return new Individual(x, targetSuccessProbability, sigma, zeros, c, this);
348    }
349    private void InitSolutions() {
350      solutions = new Individual[PopulationSize];
351      for (var i = 0; i < PopulationSize; i++) {
352        var x = new RealVector(Problem.Encoding.Length); // Uniform distibution in all dimensions assumed.
353        var bounds = Problem.Encoding.Bounds;
354        for (var j = 0; j < Problem.Encoding.Length; j++) {
355          var dim = j % bounds.Rows;
356          x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0];
357        }
358        solutions[i] = InitializeIndividual(x);
359        PenalizeEvaluate(solutions[i]);
360      }
361    }
362    private void InitStrategy() {
363      const int lambda = 1;
364      double n = Problem.Encoding.Length;
365      targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);
366      stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);
367      stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);
368      evolutionPathLearningRate = 2.0 / (n + 2.0);
369      covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);
370      covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);
371      successThreshold = 0.44;
372    }
373    private void InitResults() {
374      Results.Add(new Result(IterationsResultName, "The number of gererations evaluated", new IntValue(0)));
375      Results.Add(new Result(EvaluationsResultName, "The number of function evaltions performed", new IntValue(0)));
376      Results.Add(new Result(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
377      Results.Add(new Result(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
378      Results.Add(new Result(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(double.NaN)));
379      Results.Add(new Result(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(double.NaN)));
380      Results.Add(new Result(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
381      Results.Add(new Result(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
382      Results.Add(new Result(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0)));
383      Results.Add(new Result(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0)));
384
385      var table = new DataTable("QualityIndicators");
386      table.Rows.Add(new DataRow(BestHypervolumeResultName));
387      table.Rows.Add(new DataRow(HypervolumeResultName));
388      table.Rows.Add(new DataRow(CrowdingResultName));
389      table.Rows.Add(new DataRow(GenerationalDistanceResultName));
390      table.Rows.Add(new DataRow(InvertedGenerationalDistanceResultName));
391      table.Rows.Add(new DataRow(DifferenceToBestKnownHypervolumeResultName));
392      table.Rows.Add(new DataRow(SpacingResultName));
393      Results.Add(new Result(TimetableResultName, "Different quality meassures in a timeseries", table));
394      Results.Add(new Result(CurrentFrontResultName, "The current front", new DoubleMatrix()));
395      Results.Add(new Result(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ScatterPlotContent(null, null, null, 2)));
396
397      var problem = Problem as MultiObjectiveTestFunctionProblem;
398      if (problem == null) return;
399      if (problem.BestKnownFront != null) {
400        ResultsBestKnownHypervolume = Hypervolume.Calculate(problem.BestKnownFront.ToJaggedArray(), problem.TestFunction.ReferencePoint(problem.Objectives), Problem.Maximization);
401        ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume;
402      }
403      //TODO? move FrontScatterPlotContent partially? to MultiobjectiveTestProblem?
404      ResultsScatterPlot = new ScatterPlotContent(new double[0][], new double[0][], problem.BestKnownFront.ToJaggedArray(), problem.Objectives);
405    }
406    #endregion
407
408    #region Mainloop
409    protected override void Run(CancellationToken cancellationToken) {
410      while (ResultsIterations < MaximumGenerations) {
411        try {
412          Iterate();
413          ResultsIterations++;
414          cancellationToken.ThrowIfCancellationRequested();
415        }
416        finally {
417          Analyze();
418        }
419      }
420
421
422    }
423    private void Iterate() {
424      var offspring = solutions.Select(i => {
425        var o = new Individual(i);
426        o.Mutate(gauss);
427        PenalizeEvaluate(o);
428        return o;
429      });
430      var parents = solutions.Concat(offspring).ToArray();
431      SelectParents(parents, solutions.Length);
432      UpdatePopulation(parents);
433    }
434    protected override void OnExecutionTimeChanged() {
435      base.OnExecutionTimeChanged();
436      if (CancellationTokenSource == null) return;
437      if (MaximumRuntime == -1) return;
438      if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
439    }
440    #endregion
441
442    #region Evaluation
443    private void PenalizeEvaluate(Individual individual) {
444      if (IsFeasable(individual.Mean)) {
445        individual.Fitness = Evaluate(individual.Mean);
446        individual.PenalizedFitness = individual.Fitness;
447      } else {
448        var t = ClosestFeasible(individual.Mean);
449        individual.Fitness = Evaluate(t);
450        individual.PenalizedFitness = Penalize(individual.Mean, t, individual.Fitness);
451      }
452    }
453    private double[] Evaluate(RealVector x) {
454      var res = Problem.Evaluate(new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x) } }), random);
455      ResultsEvaluations++;
456      return res;
457    }
458    private double[] Penalize(RealVector x, RealVector t, IEnumerable<double> fitness) {
459      var penalty = x.Zip(t, (a, b) => (a - b) * (a - b)).Sum() * 1E-6;
460      return fitness.Select((v, i) => Problem.Maximization[i] ? v - penalty : v + penalty).ToArray();
461    }
462    private RealVector ClosestFeasible(RealVector x) {
463      var bounds = Problem.Encoding.Bounds;
464      var r = new RealVector(x.Length);
465      for (var i = 0; i < x.Length; i++) {
466        var dim = i % bounds.Rows;
467        r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]);
468      }
469      return r;
470    }
471    private bool IsFeasable(RealVector offspring) {
472      var bounds = Problem.Encoding.Bounds;
473      for (var i = 0; i < offspring.Length; i++) {
474        var dim = i % bounds.Rows;
475        if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false;
476      }
477      return true;
478    }
479    #endregion
480
481    private void SelectParents(IReadOnlyList<Individual> parents, int length) {
482      //perform a nondominated sort to assign the rank to every element
483      int[] ranks;
484      var fronts = DominationCalculator<Individual>.CalculateAllParetoFronts(parents.ToArray(), parents.Select(i => i.PenalizedFitness).ToArray(), Problem.Maximization, out ranks);
485      // NonDominatedSort(parents, individual => individual.PenalizedFitness);
486
487      //deselect the highest rank fronts until we would end up with less or equal mu elements
488      var rank = fronts.Count - 1;
489      var popSize = parents.Count;
490      while (popSize - fronts[rank].Count >= length) {
491        var front = fronts[rank];
492        foreach (var i in front) i.Item1.Selected = false;
493        popSize -= front.Count;
494        rank--;
495      }
496
497      //now use the indicator to deselect the approximatingly worst elements of the last selected front
498      var front1 = fronts[rank].OrderBy(x => x.Item1.PenalizedFitness[0]).ToList();
499      for (; popSize > length; popSize--) {
500        var lc = Indicator.LeastContributer(front1.Select(i => i.Item1).ToArray(), Problem);
501        front1[lc].Item1.Selected = false;
502        front1.Swap(lc, front1.Count - 1);
503        front1.RemoveAt(front1.Count - 1);
504      }
505    }
506
507    private void UpdatePopulation(IReadOnlyList<Individual> parents) {
508      foreach (var p in parents.Skip(solutions.Length).Where(i => i.Selected))
509        p.UpdateAsOffspring();
510
511      for (var i = 0; i < solutions.Length; i++)
512        if (parents[i].Selected)
513          parents[i].UpdateAsParent(parents[i + solutions.Length].Selected);
514
515      solutions = parents.Where(p => p.Selected).ToArray();
516    }
517
518    private void Analyze() {
519      //TODO? move FrontScatterPlotContent partially to MultiobjectiveTestProblem
520      ResultsScatterPlot = new ScatterPlotContent(solutions.Select(x => x.Fitness).ToArray(), solutions.Select(x => x.Mean.ToArray()).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives);
521
522      ResultsSolutions = solutions.Select(x => x.Mean.ToArray()).ToMatrix();
523
524      var problem = Problem as MultiObjectiveTestFunctionProblem;
525      if (problem == null) return;
526
527      var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.Fitness), problem.ReferencePoint.CloneAsArray(), Problem.Maximization, true).ToArray();
528      if (front.Length == 0) return;
529      var bounds = problem.Bounds.CloneAsMatrix();
530      ResultsCrowding = Crowding.Calculate(front, bounds);
531      ResultsSpacing = Spacing.Calculate(front);
532      ResultsGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
533      ResultsInvertedGenerationalDistance = problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
534      ResultsHypervolume = Hypervolume.Calculate(front, problem.ReferencePoint.CloneAsArray(), Problem.Maximization);
535      ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume);
536      ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume;
537
538      ResultsBestHypervolumeDataLine.Values.Add(ResultsBestHypervolume);
539      ResultsHypervolumeDataLine.Values.Add(ResultsHypervolume);
540      ResultsCrowdingDataLine.Values.Add(ResultsCrowding);
541      ResultsGenerationalDistanceDataLine.Values.Add(ResultsGenerationalDistance);
542      ResultsInvertedGenerationalDistanceDataLine.Values.Add(ResultsInvertedGenerationalDistance);
543      ResultsSpacingDataLine.Values.Add(ResultsSpacing);
544      ResultsHypervolumeDifferenceDataLine.Values.Add(ResultsDifferenceBestKnownHypervolume);
545
546      Problem.Analyze(
547        solutions.Select(x => (Optimization.Individual)new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x.Mean) } })).ToArray(),
548        solutions.Select(x => x.Fitness).ToArray(),
549        Results,
550        random);
551    }
552
553  }
554}
Note: See TracBrowser for help on using the repository browser.