Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2943_MOBasicProblem_MOCMAES/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/MOCMAEvolutionStrategy.cs @ 16310

Last change on this file since 16310 was 16171, checked in by bwerth, 6 years ago

#2943 worked on MOBasicProblem - added Interfaces;reworked MOCalculators; several minor changes

File size: 25.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Linq;
26using System.Threading;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Encodings.RealVectorEncoding;
32using HeuristicLab.Optimization;
33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.TestFunctions.MultiObjective;
36using HeuristicLab.Random;
37
38namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy {
39  [Item("Multi-Objective CMA Evolution Strategy (MOCMAES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")]
40  [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)]
41  [StorableClass]
42  public class MOCMAEvolutionStrategy : BasicAlgorithm {
43    public override Type ProblemType {
44      get { return typeof(IMultiObjectiveBasicProblem); }
45    }
46    public new IMultiObjectiveBasicProblem Problem {
47      get { return (IMultiObjectiveBasicProblem)base.Problem; }
48      set { base.Problem = value; }
49    }
50    public override bool SupportsPause {
51      get { return true; }
52    }
53
54    private RealVectorEncoding Encoding {
55      get {
56        if(Problem == null || !(Problem.Encoding is RealVectorEncoding))
57          throw new ArgumentException("Multiobjective CMA-ES is only applicable to problems with RealVectorEncodings");
58        return (RealVectorEncoding)Problem.Encoding;
59      }
60    }
61
62    #region Storable fields
63    [Storable]
64    private IRandom random = new MersenneTwister();
65    [Storable]
66    private NormalDistributedRandom gauss;
67    [Storable]
68    private Individual[] solutions;
69    [Storable]
70    private double stepSizeLearningRate; //=cp learning rate in [0,1]
71    [Storable]
72    private double stepSizeDampeningFactor; //d
73    [Storable]
74    private double targetSuccessProbability;// p^target_succ
75    [Storable]
76    private double evolutionPathLearningRate;//cc
77    [Storable]
78    private double covarianceMatrixLearningRate;//ccov
79    [Storable]
80    private double covarianceMatrixUnlearningRate;
81    [Storable]
82    private double successThreshold; //ptresh
83
84    #endregion
85
86    #region ParameterNames
87    private const string MaximumRuntimeName = "Maximum Runtime";
88    private const string SeedName = "Seed";
89    private const string SetSeedRandomlyName = "SetSeedRandomly";
90    private const string PopulationSizeName = "PopulationSize";
91    private const string MaximumGenerationsName = "MaximumGenerations";
92    private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions";
93    private const string InitialSigmaName = "InitialSigma";
94    private const string IndicatorName = "Indicator";
95
96    private const string EvaluationsResultName = "Evaluations";
97    private const string IterationsResultName = "Generations";
98    private const string TimetableResultName = "Timetable";
99    private const string HypervolumeResultName = "Hypervolume";
100    private const string GenerationalDistanceResultName = "Generational Distance";
101    private const string InvertedGenerationalDistanceResultName = "Inverted Generational Distance";
102    private const string CrowdingResultName = "Crowding";
103    private const string SpacingResultName = "Spacing";
104    private const string CurrentFrontResultName = "Pareto Front";
105    private const string BestHypervolumeResultName = "Best Hypervolume";
106    private const string BestKnownHypervolumeResultName = "Best known hypervolume";
107    private const string DifferenceToBestKnownHypervolumeResultName = "Absolute Distance to BestKnownHypervolume";
108    private const string ScatterPlotResultName = "ScatterPlot";
109    #endregion
110
111    #region ParameterProperties
112    public IFixedValueParameter<IntValue> MaximumRuntimeParameter {
113      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; }
114    }
115    public IFixedValueParameter<IntValue> SeedParameter {
116      get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; }
117    }
118    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
119      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; }
120    }
121    public IFixedValueParameter<IntValue> PopulationSizeParameter {
122      get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; }
123    }
124    public IFixedValueParameter<IntValue> MaximumGenerationsParameter {
125      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; }
126    }
127    public IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter {
128      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; }
129    }
130    public IValueParameter<DoubleArray> InitialSigmaParameter {
131      get { return (IValueParameter<DoubleArray>)Parameters[InitialSigmaName]; }
132    }
133    public IConstrainedValueParameter<IIndicator> IndicatorParameter {
134      get { return (IConstrainedValueParameter<IIndicator>)Parameters[IndicatorName]; }
135    }
136    #endregion
137
138    #region Properties
139    public int MaximumRuntime {
140      get { return MaximumRuntimeParameter.Value.Value; }
141      set { MaximumRuntimeParameter.Value.Value = value; }
142    }
143    public int Seed {
144      get { return SeedParameter.Value.Value; }
145      set { SeedParameter.Value.Value = value; }
146    }
147    public bool SetSeedRandomly {
148      get { return SetSeedRandomlyParameter.Value.Value; }
149      set { SetSeedRandomlyParameter.Value.Value = value; }
150    }
151    public int PopulationSize {
152      get { return PopulationSizeParameter.Value.Value; }
153      set { PopulationSizeParameter.Value.Value = value; }
154    }
155    public int MaximumGenerations {
156      get { return MaximumGenerationsParameter.Value.Value; }
157      set { MaximumGenerationsParameter.Value.Value = value; }
158    }
159    public int MaximumEvaluatedSolutions {
160      get { return MaximumEvaluatedSolutionsParameter.Value.Value; }
161      set { MaximumEvaluatedSolutionsParameter.Value.Value = value; }
162    }
163    public DoubleArray InitialSigma {
164      get { return InitialSigmaParameter.Value; }
165      set { InitialSigmaParameter.Value = value; }
166    }
167    public IIndicator Indicator {
168      get { return IndicatorParameter.Value; }
169      set { IndicatorParameter.Value = value; }
170    }
171
172    public double StepSizeLearningRate { get { return stepSizeLearningRate; } }
173    public double StepSizeDampeningFactor { get { return stepSizeDampeningFactor; } }
174    public double TargetSuccessProbability { get { return targetSuccessProbability; } }
175    public double EvolutionPathLearningRate { get { return evolutionPathLearningRate; } }
176    public double CovarianceMatrixLearningRate { get { return covarianceMatrixLearningRate; } }
177    public double CovarianceMatrixUnlearningRate { get { return covarianceMatrixUnlearningRate; } }
178    public double SuccessThreshold { get { return successThreshold; } }
179    #endregion
180
181    #region ResultsProperties
182    private int ResultsEvaluations {
183      get { return ((IntValue)Results[EvaluationsResultName].Value).Value; }
184      set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; }
185    }
186    private int ResultsIterations {
187      get { return ((IntValue)Results[IterationsResultName].Value).Value; }
188      set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
189    }
190    #region Datatable
191    private DataTable ResultsQualities {
192      get { return (DataTable)Results[TimetableResultName].Value; }
193    }
194    private DataRow ResultsBestHypervolumeDataLine {
195      get { return ResultsQualities.Rows[BestHypervolumeResultName]; }
196    }
197    private DataRow ResultsHypervolumeDataLine {
198      get { return ResultsQualities.Rows[HypervolumeResultName]; }
199    }
200    private DataRow ResultsGenerationalDistanceDataLine {
201      get { return ResultsQualities.Rows[GenerationalDistanceResultName]; }
202    }
203    private DataRow ResultsInvertedGenerationalDistanceDataLine {
204      get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; }
205    }
206    private DataRow ResultsCrowdingDataLine {
207      get { return ResultsQualities.Rows[CrowdingResultName]; }
208    }
209    private DataRow ResultsSpacingDataLine {
210      get { return ResultsQualities.Rows[SpacingResultName]; }
211    }
212    private DataRow ResultsHypervolumeDifferenceDataLine {
213      get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; }
214    }
215    #endregion
216    //QualityIndicators
217    private double ResultsHypervolume {
218      get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; }
219      set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; }
220    }
221    private double ResultsGenerationalDistance {
222      get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; }
223      set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; }
224    }
225    private double ResultsInvertedGenerationalDistance {
226      get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; }
227      set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; }
228    }
229    private double ResultsCrowding {
230      get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; }
231      set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; }
232    }
233    private double ResultsSpacing {
234      get { return ((DoubleValue)Results[SpacingResultName].Value).Value; }
235      set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; }
236    }
237    private double ResultsBestHypervolume {
238      get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; }
239      set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; }
240    }
241    private double ResultsBestKnownHypervolume {
242      get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; }
243      set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; }
244    }
245    private double ResultsDifferenceBestKnownHypervolume {
246      get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; }
247      set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; }
248
249    }
250    //Solutions
251    private DoubleMatrix ResultsSolutions {
252      get { return (DoubleMatrix)Results[CurrentFrontResultName].Value; }
253      set { Results[CurrentFrontResultName].Value = value; }
254    }
255    private ParetoFrontScatterPlot ResultsScatterPlot {
256      get { return (ParetoFrontScatterPlot)Results[ScatterPlotResultName].Value; }
257      set { Results[ScatterPlotResultName].Value = value; }
258    }
259    #endregion
260
261    #region Constructors
262    public MOCMAEvolutionStrategy() {
263      Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600)));
264      Parameters.Add(new FixedValueParameter<IntValue>(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
265      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
266      Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20)));
267      Parameters.Add(new ValueParameter<DoubleArray>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleArray(new[] { 0.5 })));
268      Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000)));
269      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue)));
270      var set = new ItemSet<IIndicator> { new HypervolumeIndicator(), new CrowdingIndicator(), new MinimalDistanceIndicator() };
271      Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, set.First()));
272    }
273
274    [StorableConstructor]
275    protected MOCMAEvolutionStrategy(bool deserializing) : base(deserializing) { }
276
277    protected MOCMAEvolutionStrategy(MOCMAEvolutionStrategy original, Cloner cloner) : base(original, cloner) {
278      random = cloner.Clone(original.random);
279      gauss = cloner.Clone(original.gauss);
280      solutions = original.solutions != null ? original.solutions.Select(cloner.Clone).ToArray() : null;
281      stepSizeLearningRate = original.stepSizeLearningRate;
282      stepSizeDampeningFactor = original.stepSizeDampeningFactor;
283      targetSuccessProbability = original.targetSuccessProbability;
284      evolutionPathLearningRate = original.evolutionPathLearningRate;
285      covarianceMatrixLearningRate = original.covarianceMatrixLearningRate;
286      covarianceMatrixUnlearningRate = original.covarianceMatrixUnlearningRate;
287      successThreshold = original.successThreshold;
288    }
289
290    public override IDeepCloneable Clone(Cloner cloner) { return new MOCMAEvolutionStrategy(this, cloner); }
291    #endregion
292
293    #region Initialization
294    protected override void Initialize(CancellationToken cancellationToken) {
295      if (SetSeedRandomly) Seed = RandomSeedGenerator.GetSeed();
296      random.Reset(Seed);
297      gauss = new NormalDistributedRandom(random, 0, 1);
298
299      InitResults();
300      InitStrategy();
301      InitSolutions();
302      Analyze();
303
304      ResultsIterations = 1;
305    }
306    private Individual InitializeIndividual(RealVector x) {
307      var zeros = new RealVector(x.Length);
308      var c = new double[x.Length, x.Length];
309      var sigma = InitialSigma.Max();
310      for (var i = 0; i < x.Length; i++) {
311        var d = InitialSigma[i % InitialSigma.Length] / sigma;
312        c[i, i] = d * d;
313      }
314      return new Individual(x, targetSuccessProbability, sigma, zeros, c, this);
315    }
316    private void InitSolutions() {
317      solutions = new Individual[PopulationSize];
318      for (var i = 0; i < PopulationSize; i++) {
319        var x = new RealVector(Encoding.Length); // Uniform distibution in all dimensions assumed.
320        var bounds = Encoding.Bounds;
321        for (var j = 0; j < Encoding.Length; j++) {
322          var dim = j % bounds.Rows;
323          x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0];
324        }
325        solutions[i] = InitializeIndividual(x);
326        PenalizeEvaluate(solutions[i]);
327      }
328      ResultsEvaluations += solutions.Length;
329    }
330    private void InitStrategy() {
331      const int lambda = 1;
332      double n = Encoding.Length;
333      targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);
334      stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);
335      stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);
336      evolutionPathLearningRate = 2.0 / (n + 2.0);
337      covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);
338      covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);
339      successThreshold = 0.44;
340    }
341    private void InitResults() {
342      Results.Add(new Result(IterationsResultName, "The number of gererations evaluated", new IntValue(0)));
343      Results.Add(new Result(EvaluationsResultName, "The number of function evaltions performed", new IntValue(0)));
344      Results.Add(new Result(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
345      Results.Add(new Result(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
346      Results.Add(new Result(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(double.NaN)));
347      Results.Add(new Result(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(double.NaN)));
348      Results.Add(new Result(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
349      Results.Add(new Result(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
350      Results.Add(new Result(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0)));
351      Results.Add(new Result(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0)));
352
353      var table = new DataTable("QualityIndicators");
354      table.Rows.Add(new DataRow(BestHypervolumeResultName));
355      table.Rows.Add(new DataRow(HypervolumeResultName));
356      table.Rows.Add(new DataRow(CrowdingResultName));
357      table.Rows.Add(new DataRow(GenerationalDistanceResultName));
358      table.Rows.Add(new DataRow(InvertedGenerationalDistanceResultName));
359      table.Rows.Add(new DataRow(DifferenceToBestKnownHypervolumeResultName));
360      table.Rows.Add(new DataRow(SpacingResultName));
361      Results.Add(new Result(TimetableResultName, "Different quality meassures in a timeseries", table));
362      Results.Add(new Result(CurrentFrontResultName, "The current front", new DoubleMatrix()));
363      Results.Add(new Result(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ParetoFrontScatterPlot()));
364
365      var problem = Problem as MultiObjectiveTestFunctionProblem;
366      if (problem == null) return;
367      if (problem.BestKnownFront != null) {
368        ResultsBestKnownHypervolume = HypervolumeCalculator.CalculateHypervolume(problem.BestKnownFront.ToJaggedArray(), problem.TestFunction.ReferencePoint(problem.Objectives), Problem.Maximization);
369        ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume;
370      }
371      ResultsScatterPlot = new ParetoFrontScatterPlot(new double[0][], new double[0][], problem.BestKnownFront.ToJaggedArray(), problem.Objectives, problem.ProblemSize);
372    }
373    #endregion
374
375    #region Mainloop
376    protected override void Run(CancellationToken cancellationToken) {
377      while (ResultsIterations < MaximumGenerations && ResultsEvaluations < MaximumEvaluatedSolutions) {
378        try {
379          Iterate();
380          ResultsIterations++;
381          cancellationToken.ThrowIfCancellationRequested();
382        } finally {
383          Analyze();
384        }
385      }
386    }
387    private void Iterate() {
388      var offspring = solutions.Select(i => {
389        var o = new Individual(i);
390        o.Mutate(gauss);
391        PenalizeEvaluate(o);
392        return o;
393      });
394      ResultsEvaluations += solutions.Length;
395      var parents = solutions.Concat(offspring).ToArray();
396      SelectParents(parents, solutions.Length);
397      UpdatePopulation(parents);
398    }
399    protected override void OnExecutionTimeChanged() {
400      base.OnExecutionTimeChanged();
401      if (CancellationTokenSource == null) return;
402      if (MaximumRuntime == -1) return;
403      if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
404    }
405    #endregion
406
407    #region Evaluation
408    private void PenalizeEvaluate(Individual individual) {
409      if (IsFeasable(individual.Mean)) {
410        individual.Fitness = Evaluate(individual.Mean);
411        individual.PenalizedFitness = individual.Fitness;
412      } else {
413        var t = ClosestFeasible(individual.Mean);
414        individual.Fitness = Evaluate(t);
415        individual.PenalizedFitness = Penalize(individual.Mean, t, individual.Fitness);
416      }
417    }
418    private double[] Evaluate(RealVector x) {
419      var res = Problem.Evaluate(new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x) } }), random);
420      return res;
421    }
422    private double[] Penalize(RealVector x, RealVector t, IEnumerable<double> fitness) {
423      var penalty = x.Zip(t, (a, b) => (a - b) * (a - b)).Sum() * 1E-6;
424      return fitness.Select((v, i) => Problem.Maximization[i] ? v - penalty : v + penalty).ToArray();
425    }
426    private RealVector ClosestFeasible(RealVector x) {
427      var bounds = Encoding.Bounds;
428      var r = new RealVector(x.Length);
429      for (var i = 0; i < x.Length; i++) {
430        var dim = i % bounds.Rows;
431        r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]);
432      }
433      return r;
434    }
435    private bool IsFeasable(RealVector offspring) {
436      var bounds = Encoding.Bounds;
437      for (var i = 0; i < offspring.Length; i++) {
438        var dim = i % bounds.Rows;
439        if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false;
440      }
441      return true;
442    }
443    #endregion
444
445    private void SelectParents(IReadOnlyList<Individual> parents, int length) {
446      //perform a nondominated sort to assign the rank to every element
447      int[] ranks;
448      var fronts = DominationCalculator.CalculateAllParetoFronts(parents.ToArray(), parents.Select(i => i.PenalizedFitness).ToArray(), Problem.Maximization, out ranks);
449
450      //deselect the highest rank fronts until we would end up with less or equal mu elements
451      var rank = fronts.Count - 1;
452      var popSize = parents.Count;
453      while (popSize - fronts[rank].Count >= length) {
454        var front = fronts[rank];
455        foreach (var i in front) i.Item1.Selected = false;
456        popSize -= front.Count;
457        rank--;
458      }
459
460      //now use the indicator to deselect the approximatingly worst elements of the last selected front
461      var front1 = fronts[rank].OrderBy(x => x.Item1.PenalizedFitness[0]).ToList();
462      for (; popSize > length; popSize--) {
463        var lc = Indicator.LeastContributer(front1.Select(i => i.Item1).ToArray(), Problem);
464        front1[lc].Item1.Selected = false;
465        front1.Swap(lc, front1.Count - 1);
466        front1.RemoveAt(front1.Count - 1);
467      }
468    }
469
470    private void UpdatePopulation(IReadOnlyList<Individual> parents) {
471      foreach (var p in parents.Skip(solutions.Length).Where(i => i.Selected))
472        p.UpdateAsOffspring();
473      for (var i = 0; i < solutions.Length; i++)
474        if (parents[i].Selected)
475          parents[i].UpdateAsParent(parents[i + solutions.Length].Selected);
476      solutions = parents.Where(p => p.Selected).ToArray();
477    }
478
479    private void Analyze() {
480      var qualities = solutions.Select(x => x.Fitness).ToArray();
481
482      //to do check for side effects
483      ResultsScatterPlot = new ParetoFrontScatterPlot(qualities, solutions.Select(x => x.Mean.ToArray()).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives, ResultsScatterPlot.ProblemSize);
484      ResultsSolutions = solutions.Select(x => x.Mean.ToArray()).ToMatrix();
485
486      var problem = Problem as MultiObjectiveTestFunctionProblem;
487      if (problem == null) return;
488
489     
490      if (qualities.Length == 0) return;
491      ResultsCrowding = CrowdingCalculator.CalculateCrowding(qualities);
492      ResultsSpacing = SpacingCalculator.CalculateSpacing(qualities);
493      ResultsGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistanceCalculator.CalculateGenerationalDistance(qualities, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
494      ResultsInvertedGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistanceCalculator.CalculateInverseGenerationalDistance(qualities, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
495      ResultsHypervolume = HypervolumeCalculator.CalculateHypervolume(qualities, problem.ReferencePoint.CloneAsArray(), Problem.Maximization);
496      ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume);
497      ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume;
498
499      ResultsBestHypervolumeDataLine.Values.Add(ResultsBestHypervolume);
500      ResultsHypervolumeDataLine.Values.Add(ResultsHypervolume);
501      ResultsCrowdingDataLine.Values.Add(ResultsCrowding);
502      ResultsGenerationalDistanceDataLine.Values.Add(ResultsGenerationalDistance);
503      ResultsInvertedGenerationalDistanceDataLine.Values.Add(ResultsInvertedGenerationalDistance);
504      ResultsSpacingDataLine.Values.Add(ResultsSpacing);
505      ResultsHypervolumeDifferenceDataLine.Values.Add(ResultsDifferenceBestKnownHypervolume);
506
507      Problem.Analyze(
508        solutions.Select(x => (Optimization.Individual)new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x.Mean) } })).ToArray(),
509        solutions.Select(x => x.Fitness).ToArray(),
510        Results,
511        random);
512    }
513  }
514}
Note: See TracBrowser for help on using the repository browser.