Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/MOCMAEvolutionStrategy.cs @ 17226

Last change on this file since 17226 was 17226, checked in by mkommend, 5 years ago

#2521: Merged trunk changes into problem refactoring branch.

File size: 25.3 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Linq;
26using System.Threading;
27using HEAL.Attic;
28using HeuristicLab.Analysis;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Encodings.RealVectorEncoding;
33using HeuristicLab.Optimization;
34using HeuristicLab.Parameters;
35using HeuristicLab.Problems.TestFunctions.MultiObjective;
36using HeuristicLab.Random;
37
38namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy {
39  [Item("Multi-Objective CMA Evolution Strategy (MOCMAES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")]
40  [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)]
41  [StorableType("C10264E3-E4C6-4735-8E94-0DC116E8908D")]
42  public class MOCMAEvolutionStrategy : BasicAlgorithm {
43    public override Type ProblemType {
44      get { return typeof(MultiObjectiveProblem<RealVectorEncoding, RealVector>); }
45    }
46    public new MultiObjectiveProblem<RealVectorEncoding, RealVector> Problem {
47      get { return (MultiObjectiveProblem<RealVectorEncoding, RealVector>)base.Problem; }
48      set { base.Problem = value; }
49    }
50    public override bool SupportsPause {
51      get { return true; }
52    }
53
54    public RealVectorEncoding Encoding {
55      get { return Problem.Encoding; }
56    }
57
58    #region Storable fields
59    [Storable]
60    private IRandom random = new MersenneTwister();
61    [Storable]
62    private NormalDistributedRandom gauss;
63    [Storable]
64    private Individual[] solutions;
65    [Storable]
66    private double stepSizeLearningRate; //=cp learning rate in [0,1]
67    [Storable]
68    private double stepSizeDampeningFactor; //d
69    [Storable]
70    private double targetSuccessProbability; // p^target_succ
71    [Storable]
72    private double evolutionPathLearningRate; //cc
73    [Storable]
74    private double covarianceMatrixLearningRate; //ccov
75    [Storable]
76    private double covarianceMatrixUnlearningRate;
77    [Storable]
78    private double successThreshold; //ptresh
79    #endregion
80
81    #region ParameterNames
82    private const string MaximumRuntimeName = "Maximum Runtime";
83    private const string SeedName = "Seed";
84    private const string SetSeedRandomlyName = "SetSeedRandomly";
85    private const string PopulationSizeName = "PopulationSize";
86    private const string MaximumGenerationsName = "MaximumGenerations";
87    private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions";
88    private const string InitialSigmaName = "InitialSigma";
89    private const string IndicatorName = "Indicator";
90
91    private const string EvaluationsResultName = "Evaluations";
92    private const string IterationsResultName = "Generations";
93    private const string TimetableResultName = "Timetable";
94    private const string HypervolumeResultName = "Hypervolume";
95    private const string GenerationalDistanceResultName = "Generational Distance";
96    private const string InvertedGenerationalDistanceResultName = "Inverted Generational Distance";
97    private const string CrowdingResultName = "Crowding";
98    private const string SpacingResultName = "Spacing";
99    private const string CurrentFrontResultName = "Pareto Front";
100    private const string BestHypervolumeResultName = "Best Hypervolume";
101    private const string BestKnownHypervolumeResultName = "Best known hypervolume";
102    private const string DifferenceToBestKnownHypervolumeResultName = "Absolute Distance to BestKnownHypervolume";
103    private const string ScatterPlotResultName = "ScatterPlot";
104    #endregion
105
106    #region ParameterProperties
107    public IFixedValueParameter<IntValue> MaximumRuntimeParameter {
108      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; }
109    }
110    public IFixedValueParameter<IntValue> SeedParameter {
111      get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; }
112    }
113    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
114      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; }
115    }
116    public IFixedValueParameter<IntValue> PopulationSizeParameter {
117      get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; }
118    }
119    public IFixedValueParameter<IntValue> MaximumGenerationsParameter {
120      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; }
121    }
122    public IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter {
123      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; }
124    }
125    public IValueParameter<DoubleArray> InitialSigmaParameter {
126      get { return (IValueParameter<DoubleArray>)Parameters[InitialSigmaName]; }
127    }
128    public IConstrainedValueParameter<IIndicator> IndicatorParameter {
129      get { return (IConstrainedValueParameter<IIndicator>)Parameters[IndicatorName]; }
130    }
131    #endregion
132
133    #region Properties
134    public int MaximumRuntime {
135      get { return MaximumRuntimeParameter.Value.Value; }
136      set { MaximumRuntimeParameter.Value.Value = value; }
137    }
138    public int Seed {
139      get { return SeedParameter.Value.Value; }
140      set { SeedParameter.Value.Value = value; }
141    }
142    public bool SetSeedRandomly {
143      get { return SetSeedRandomlyParameter.Value.Value; }
144      set { SetSeedRandomlyParameter.Value.Value = value; }
145    }
146    public int PopulationSize {
147      get { return PopulationSizeParameter.Value.Value; }
148      set { PopulationSizeParameter.Value.Value = value; }
149    }
150    public int MaximumGenerations {
151      get { return MaximumGenerationsParameter.Value.Value; }
152      set { MaximumGenerationsParameter.Value.Value = value; }
153    }
154    public int MaximumEvaluatedSolutions {
155      get { return MaximumEvaluatedSolutionsParameter.Value.Value; }
156      set { MaximumEvaluatedSolutionsParameter.Value.Value = value; }
157    }
158    public DoubleArray InitialSigma {
159      get { return InitialSigmaParameter.Value; }
160      set { InitialSigmaParameter.Value = value; }
161    }
162    public IIndicator Indicator {
163      get { return IndicatorParameter.Value; }
164      set { IndicatorParameter.Value = value; }
165    }
166
167    public double StepSizeLearningRate {
168      get { return stepSizeLearningRate; }
169    }
170    public double StepSizeDampeningFactor {
171      get { return stepSizeDampeningFactor; }
172    }
173    public double TargetSuccessProbability {
174      get { return targetSuccessProbability; }
175    }
176    public double EvolutionPathLearningRate {
177      get { return evolutionPathLearningRate; }
178    }
179    public double CovarianceMatrixLearningRate {
180      get { return covarianceMatrixLearningRate; }
181    }
182    public double CovarianceMatrixUnlearningRate {
183      get { return covarianceMatrixUnlearningRate; }
184    }
185    public double SuccessThreshold {
186      get { return successThreshold; }
187    }
188    #endregion
189
190    #region ResultsProperties
191    private int ResultsEvaluations {
192      get { return ((IntValue)Results[EvaluationsResultName].Value).Value; }
193      set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; }
194    }
195    private int ResultsIterations {
196      get { return ((IntValue)Results[IterationsResultName].Value).Value; }
197      set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
198    }
199    #region Datatable
200    private DataTable ResultsQualities {
201      get { return (DataTable)Results[TimetableResultName].Value; }
202    }
203    private DataRow ResultsBestHypervolumeDataLine {
204      get { return ResultsQualities.Rows[BestHypervolumeResultName]; }
205    }
206    private DataRow ResultsHypervolumeDataLine {
207      get { return ResultsQualities.Rows[HypervolumeResultName]; }
208    }
209    private DataRow ResultsGenerationalDistanceDataLine {
210      get { return ResultsQualities.Rows[GenerationalDistanceResultName]; }
211    }
212    private DataRow ResultsInvertedGenerationalDistanceDataLine {
213      get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; }
214    }
215    private DataRow ResultsCrowdingDataLine {
216      get { return ResultsQualities.Rows[CrowdingResultName]; }
217    }
218    private DataRow ResultsSpacingDataLine {
219      get { return ResultsQualities.Rows[SpacingResultName]; }
220    }
221    private DataRow ResultsHypervolumeDifferenceDataLine {
222      get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; }
223    }
224    #endregion
225    //QualityIndicators
226    private double ResultsHypervolume {
227      get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; }
228      set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; }
229    }
230    private double ResultsGenerationalDistance {
231      get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; }
232      set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; }
233    }
234    private double ResultsInvertedGenerationalDistance {
235      get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; }
236      set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; }
237    }
238    private double ResultsCrowding {
239      get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; }
240      set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; }
241    }
242    private double ResultsSpacing {
243      get { return ((DoubleValue)Results[SpacingResultName].Value).Value; }
244      set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; }
245    }
246    private double ResultsBestHypervolume {
247      get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; }
248      set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; }
249    }
250    private double ResultsBestKnownHypervolume {
251      get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; }
252      set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; }
253    }
254    private double ResultsDifferenceBestKnownHypervolume {
255      get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; }
256      set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; }
257    }
258    //Solutions
259    private DoubleMatrix ResultsSolutions {
260      get { return (DoubleMatrix)Results[CurrentFrontResultName].Value; }
261      set { Results[CurrentFrontResultName].Value = value; }
262    }
263    private ParetoFrontScatterPlot ResultsScatterPlot {
264      get { return (ParetoFrontScatterPlot)Results[ScatterPlotResultName].Value; }
265      set { Results[ScatterPlotResultName].Value = value; }
266    }
267    #endregion
268
269    #region Constructors
270    public MOCMAEvolutionStrategy() {
271      Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600)));
272      Parameters.Add(new FixedValueParameter<IntValue>(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
273      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
274      Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20)));
275      Parameters.Add(new ValueParameter<DoubleArray>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleArray(new[] {0.5})));
276      Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000)));
277      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue)));
278      var set = new ItemSet<IIndicator> {new HypervolumeIndicator(), new CrowdingIndicator(), new MinimalDistanceIndicator()};
279      Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, set.First()));
280    }
281
282    [StorableConstructor]
283    protected MOCMAEvolutionStrategy(StorableConstructorFlag _) : base(_) { }
284
285    protected MOCMAEvolutionStrategy(MOCMAEvolutionStrategy original, Cloner cloner) : base(original, cloner) {
286      random = cloner.Clone(original.random);
287      gauss = cloner.Clone(original.gauss);
288      solutions = original.solutions != null ? original.solutions.Select(cloner.Clone).ToArray() : null;
289      stepSizeLearningRate = original.stepSizeLearningRate;
290      stepSizeDampeningFactor = original.stepSizeDampeningFactor;
291      targetSuccessProbability = original.targetSuccessProbability;
292      evolutionPathLearningRate = original.evolutionPathLearningRate;
293      covarianceMatrixLearningRate = original.covarianceMatrixLearningRate;
294      covarianceMatrixUnlearningRate = original.covarianceMatrixUnlearningRate;
295      successThreshold = original.successThreshold;
296    }
297
298    public override IDeepCloneable Clone(Cloner cloner) {
299      return new MOCMAEvolutionStrategy(this, cloner);
300    }
301    #endregion
302
303    #region Initialization
304    protected override void Initialize(CancellationToken cancellationToken) {
305      if (SetSeedRandomly) Seed = RandomSeedGenerator.GetSeed();
306      random.Reset(Seed);
307      gauss = new NormalDistributedRandom(random, 0, 1);
308
309      InitResults();
310      InitStrategy();
311      InitSolutions();
312      Analyze();
313
314      ResultsIterations = 1;
315    }
316    private Individual InitializeIndividual(RealVector x) {
317      var zeros = new RealVector(x.Length);
318      var c = new double[x.Length, x.Length];
319      var sigma = InitialSigma.Max();
320      for (var i = 0; i < x.Length; i++) {
321        var d = InitialSigma[i % InitialSigma.Length] / sigma;
322        c[i, i] = d * d;
323      }
324      return new Individual(x, targetSuccessProbability, sigma, zeros, c, this);
325    }
326    private void InitSolutions() {
327      solutions = new Individual[PopulationSize];
328      for (var i = 0; i < PopulationSize; i++) {
329        var x = new RealVector(Encoding.Length); // Uniform distibution in all dimensions assumed.
330        var bounds = Encoding.Bounds;
331        for (var j = 0; j < Encoding.Length; j++) {
332          var dim = j % bounds.Rows;
333          x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0];
334        }
335        solutions[i] = InitializeIndividual(x);
336        PenalizeEvaluate(solutions[i]);
337      }
338      ResultsEvaluations += solutions.Length;
339    }
340    private void InitStrategy() {
341      const int lambda = 1;
342      double n = Encoding.Length;
343      targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);
344      stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);
345      stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);
346      evolutionPathLearningRate = 2.0 / (n + 2.0);
347      covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);
348      covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);
349      successThreshold = 0.44;
350    }
351    private void InitResults() {
352      Results.Add(new Result(IterationsResultName, "The number of gererations evaluated", new IntValue(0)));
353      Results.Add(new Result(EvaluationsResultName, "The number of function evaltions performed", new IntValue(0)));
354      Results.Add(new Result(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
355      Results.Add(new Result(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
356      Results.Add(new Result(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(double.NaN)));
357      Results.Add(new Result(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(double.NaN)));
358      Results.Add(new Result(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
359      Results.Add(new Result(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
360      Results.Add(new Result(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0)));
361      Results.Add(new Result(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0)));
362
363      var table = new DataTable("QualityIndicators");
364      table.Rows.Add(new DataRow(BestHypervolumeResultName));
365      table.Rows.Add(new DataRow(HypervolumeResultName));
366      table.Rows.Add(new DataRow(CrowdingResultName));
367      table.Rows.Add(new DataRow(GenerationalDistanceResultName));
368      table.Rows.Add(new DataRow(InvertedGenerationalDistanceResultName));
369      table.Rows.Add(new DataRow(DifferenceToBestKnownHypervolumeResultName));
370      table.Rows.Add(new DataRow(SpacingResultName));
371      Results.Add(new Result(TimetableResultName, "Different quality meassures in a timeseries", table));
372      Results.Add(new Result(CurrentFrontResultName, "The current front", new DoubleMatrix()));
373      Results.Add(new Result(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ParetoFrontScatterPlot()));
374
375      var problem = Problem;
376      if (problem == null) return;
377      var bkf = problem.BestKnownFront == null ? null : problem.BestKnownFront.ToArray();
378      if (bkf != null && problem.ReferencePoint != null) {
379        ResultsBestKnownHypervolume = HypervolumeCalculator.CalculateHypervolume(bkf, problem.ReferencePoint, Problem.Maximization);
380        ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume;
381      }
382      ResultsScatterPlot = new ParetoFrontScatterPlot(new double[0][], new double[0][], bkf, Problem.Objectives, Problem.Encoding.Length);
383    }
384    #endregion
385
386    #region Mainloop
387    protected override void Run(CancellationToken cancellationToken) {
388      while (ResultsIterations < MaximumGenerations && ResultsEvaluations < MaximumEvaluatedSolutions) {
389        try {
390          Iterate();
391          ResultsIterations++;
392          cancellationToken.ThrowIfCancellationRequested();
393        } finally {
394          Analyze();
395        }
396      }
397    }
398    private void Iterate() {
399      var offspring = solutions.Select(i => {
400        var o = new Individual(i);
401        o.Mutate(gauss);
402        PenalizeEvaluate(o);
403        return o;
404      });
405      ResultsEvaluations += solutions.Length;
406      var parents = solutions.Concat(offspring).ToArray();
407      SelectParents(parents, solutions.Length);
408      UpdatePopulation(parents);
409    }
410    protected override void OnExecutionTimeChanged() {
411      base.OnExecutionTimeChanged();
412      if (CancellationTokenSource == null) return;
413      if (MaximumRuntime == -1) return;
414      if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
415    }
416    #endregion
417
418    #region Evaluation
419    private void PenalizeEvaluate(Individual individual) {
420      if (IsFeasable(individual.Mean)) {
421        individual.Fitness = Evaluate(individual.Mean);
422        individual.PenalizedFitness = individual.Fitness;
423      } else {
424        var t = ClosestFeasible(individual.Mean);
425        individual.Fitness = Evaluate(t);
426        individual.PenalizedFitness = Penalize(individual.Mean, t, individual.Fitness);
427      }
428    }
429    private double[] Evaluate(RealVector x) {
430      var res = Problem.Evaluate(x, random);
431      return res;
432    }
433    private double[] Penalize(RealVector x, RealVector t, IEnumerable<double> fitness) {
434      var penalty = x.Zip(t, (a, b) => (a - b) * (a - b)).Sum() * 1E-6;
435      return fitness.Select((v, i) => Problem.Maximization[i] ? v - penalty : v + penalty).ToArray();
436    }
437    private RealVector ClosestFeasible(RealVector x) {
438      var bounds = Encoding.Bounds;
439      var r = new RealVector(x.Length);
440      for (var i = 0; i < x.Length; i++) {
441        var dim = i % bounds.Rows;
442        r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]);
443      }
444      return r;
445    }
446    private bool IsFeasable(RealVector offspring) {
447      var bounds = Encoding.Bounds;
448      for (var i = 0; i < offspring.Length; i++) {
449        var dim = i % bounds.Rows;
450        if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false;
451      }
452      return true;
453    }
454    #endregion
455
456    private void SelectParents(IReadOnlyList<Individual> parents, int length) {
457      //perform a nondominated sort to assign the rank to every element
458      int[] ranks;
459      var fronts = DominationCalculator.CalculateAllParetoFronts(parents.ToArray(), parents.Select(i => i.PenalizedFitness).ToArray(), Problem.Maximization, out ranks);
460
461      //deselect the highest rank fronts until we would end up with less or equal mu elements
462      var rank = fronts.Count - 1;
463      var popSize = parents.Count;
464      while (popSize - fronts[rank].Count >= length) {
465        var front = fronts[rank];
466        foreach (var i in front) i.Item1.Selected = false;
467        popSize -= front.Count;
468        rank--;
469      }
470
471      //now use the indicator to deselect the approximatingly worst elements of the last selected front
472      var front1 = fronts[rank].OrderBy(x => x.Item1.PenalizedFitness[0]).ToList();
473      for (; popSize > length; popSize--) {
474        var lc = Indicator.LeastContributer(front1.Select(i => i.Item1).ToArray(), Problem);
475        front1[lc].Item1.Selected = false;
476        front1.Swap(lc, front1.Count - 1);
477        front1.RemoveAt(front1.Count - 1);
478      }
479    }
480
481    private void UpdatePopulation(IReadOnlyList<Individual> parents) {
482      foreach (var p in parents.Skip(solutions.Length).Where(i => i.Selected))
483        p.UpdateAsOffspring();
484      for (var i = 0; i < solutions.Length; i++)
485        if (parents[i].Selected)
486          parents[i].UpdateAsParent(parents[i + solutions.Length].Selected);
487      solutions = parents.Where(p => p.Selected).ToArray();
488    }
489
490    private void Analyze() {
491      var qualities = solutions.Select(x => x.Fitness).ToArray();
492
493      //to do check for side effects
494      ResultsScatterPlot = new ParetoFrontScatterPlot(qualities, solutions.Select(x => x.Mean.ToArray()).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives, ResultsScatterPlot.ProblemSize);
495      ResultsSolutions = solutions.Select(x => x.Mean.ToArray()).ToMatrix();
496
497      var problem = Problem as MultiObjectiveProblem<RealVectorEncoding, RealVector>;
498      if (problem == null) return;
499
500
501      if (qualities.Length == 0) return;
502      ResultsCrowding = CrowdingCalculator.CalculateCrowding(qualities);
503      ResultsSpacing = Spacing.Calculate(qualities);
504
505
506      ResultsGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistance.Calculate(qualities, problem.BestKnownFront, 1) : double.NaN;
507      ResultsInvertedGenerationalDistance = problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(qualities, problem.BestKnownFront, 1) : double.NaN;
508      ResultsHypervolume = problem.ReferencePoint != null ? HypervolumeCalculator.CalculateHypervolume(qualities, problem.ReferencePoint, Problem.Maximization) : double.NaN;
509      ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume);
510      ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume;
511
512      ResultsBestHypervolumeDataLine.Values.Add(ResultsBestHypervolume);
513      ResultsHypervolumeDataLine.Values.Add(ResultsHypervolume);
514      ResultsCrowdingDataLine.Values.Add(ResultsCrowding);
515      ResultsGenerationalDistanceDataLine.Values.Add(ResultsGenerationalDistance);
516      ResultsInvertedGenerationalDistanceDataLine.Values.Add(ResultsInvertedGenerationalDistance);
517      ResultsSpacingDataLine.Values.Add(ResultsSpacing);
518      ResultsHypervolumeDifferenceDataLine.Values.Add(ResultsDifferenceBestKnownHypervolume);
519
520      Problem.Analyze(solutions.Select(x => x.Mean).ToArray(),
521        solutions.Select(x => x.Fitness).ToArray(),
522        Results,
523        random);
524    }
525  }
526}
Note: See TracBrowser for help on using the repository browser.