Free cookie consent management tool by TermsFeed Policy Generator

source: branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/MOCMAEvolutionStrategy.cs @ 15176

Last change on this file since 15176 was 15176, checked in by bwerth, 7 years ago

#2592 formatting & removed unreferenced enum

File size: 25.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Linq;
26using System.Threading;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Encodings.RealVectorEncoding;
32using HeuristicLab.Optimization;
33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.TestFunctions.MultiObjective;
36using HeuristicLab.Random;
37
38namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy {
39  [Item("MOCMA Evolution Strategy (MOCMAES)", "A multi objective evolution strategy based on covariance matrix adaptation. Code is based on 'Covariance Matrix Adaptation for Multi - objective Optimization' by Igel, Hansen and Roth")]
40  [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 210)]
41  [StorableClass]
42  [System.Runtime.InteropServices.Guid("5AC20A69-BBBF-4153-B57D-3EAF92DC505E")]
43  public class MOCMAEvolutionStrategy : BasicAlgorithm {
44    public override Type ProblemType {
45      get { return typeof(MultiObjectiveBasicProblem<RealVectorEncoding>); }
46    }
47    public new MultiObjectiveBasicProblem<RealVectorEncoding> Problem {
48      get { return (MultiObjectiveBasicProblem<RealVectorEncoding>)base.Problem; }
49      set { base.Problem = value; }
50    }
51    public override bool SupportsPause {
52      get { return true; }
53    }
54
55    #region Storable fields
56    [Storable]
57    private IRandom random = new MersenneTwister();
58    [Storable]
59    private NormalDistributedRandom gauss;
60    [Storable]
61    private Individual[] solutions;
62    [Storable]
63    private double stepSizeLearningRate; //=cp learning rate in [0,1]
64    [Storable]
65    private double stepSizeDampeningFactor; //d
66    [Storable]
67    private double targetSuccessProbability;// p^target_succ
68    [Storable]
69    private double evolutionPathLearningRate;//cc
70    [Storable]
71    private double covarianceMatrixLearningRate;//ccov
72    [Storable]
73    private double covarianceMatrixUnlearningRate;
74    [Storable]
75    private double successThreshold; //ptresh
76
77    #endregion
78
79    #region ParameterNames
80    private const string MaximumRuntimeName = "Maximum Runtime";
81    private const string SeedName = "Seed";
82    private const string SetSeedRandomlyName = "SetSeedRandomly";
83    private const string PopulationSizeName = "PopulationSize";
84    private const string MaximumGenerationsName = "MaximumGenerations";
85    private const string MaximumEvaluatedSolutionsName = "MaximumEvaluatedSolutions";
86    private const string InitialSigmaName = "InitialSigma";
87    private const string IndicatorName = "Indicator";
88
89    private const string EvaluationsResultName = "Evaluations";
90    private const string IterationsResultName = "Generations";
91    private const string TimetableResultName = "Timetable";
92    private const string HypervolumeResultName = "Hypervolume";
93    private const string GenerationalDistanceResultName = "Generational Distance";
94    private const string InvertedGenerationalDistanceResultName = "Inverted Generational Distance";
95    private const string CrowdingResultName = "Crowding";
96    private const string SpacingResultName = "Spacing";
97    private const string CurrentFrontResultName = "Pareto Front";
98    private const string BestHypervolumeResultName = "Best Hypervolume";
99    private const string BestKnownHypervolumeResultName = "Best known hypervolume";
100    private const string DifferenceToBestKnownHypervolumeResultName = "Absolute Distance to BestKnownHypervolume";
101    private const string ScatterPlotResultName = "ScatterPlot";
102    #endregion
103
104    #region ParameterProperties
105    public IFixedValueParameter<IntValue> MaximumRuntimeParameter {
106      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; }
107    }
108    public IFixedValueParameter<IntValue> SeedParameter {
109      get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; }
110    }
111    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
112      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; }
113    }
114    public IFixedValueParameter<IntValue> PopulationSizeParameter {
115      get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; }
116    }
117    public IFixedValueParameter<IntValue> MaximumGenerationsParameter {
118      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; }
119    }
120    public IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter {
121      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; }
122    }
123    public IValueParameter<DoubleArray> InitialSigmaParameter {
124      get { return (IValueParameter<DoubleArray>)Parameters[InitialSigmaName]; }
125    }
126    public IConstrainedValueParameter<IIndicator> IndicatorParameter {
127      get { return (IConstrainedValueParameter<IIndicator>)Parameters[IndicatorName]; }
128    }
129    #endregion
130
131    #region Properties
132    public int MaximumRuntime {
133      get { return MaximumRuntimeParameter.Value.Value; }
134      set { MaximumRuntimeParameter.Value.Value = value; }
135    }
136    public int Seed {
137      get { return SeedParameter.Value.Value; }
138      set { SeedParameter.Value.Value = value; }
139    }
140    public bool SetSeedRandomly {
141      get { return SetSeedRandomlyParameter.Value.Value; }
142      set { SetSeedRandomlyParameter.Value.Value = value; }
143    }
144    public int PopulationSize {
145      get { return PopulationSizeParameter.Value.Value; }
146      set { PopulationSizeParameter.Value.Value = value; }
147    }
148    public int MaximumGenerations {
149      get { return MaximumGenerationsParameter.Value.Value; }
150      set { MaximumGenerationsParameter.Value.Value = value; }
151    }
152    public int MaximumEvaluatedSolutions {
153      get { return MaximumEvaluatedSolutionsParameter.Value.Value; }
154      set { MaximumEvaluatedSolutionsParameter.Value.Value = value; }
155    }
156    public DoubleArray InitialSigma {
157      get { return InitialSigmaParameter.Value; }
158      set { InitialSigmaParameter.Value = value; }
159    }
160    public IIndicator Indicator {
161      get { return IndicatorParameter.Value; }
162      set { IndicatorParameter.Value = value; }
163    }
164
165    public double StepSizeLearningRate { get { return stepSizeLearningRate; } }
166    public double StepSizeDampeningFactor { get { return stepSizeDampeningFactor; } }
167    public double TargetSuccessProbability { get { return targetSuccessProbability; } }
168    public double EvolutionPathLearningRate { get { return evolutionPathLearningRate; } }
169    public double CovarianceMatrixLearningRate { get { return covarianceMatrixLearningRate; } }
170    public double CovarianceMatrixUnlearningRate { get { return covarianceMatrixUnlearningRate; } }
171    public double SuccessThreshold { get { return successThreshold; } }
172    #endregion
173
174    #region ResultsProperties
175    private int ResultsEvaluations {
176      get { return ((IntValue)Results[EvaluationsResultName].Value).Value; }
177      set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; }
178    }
179    private int ResultsIterations {
180      get { return ((IntValue)Results[IterationsResultName].Value).Value; }
181      set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
182    }
183    #region Datatable
184    private DataTable ResultsQualities {
185      get { return (DataTable)Results[TimetableResultName].Value; }
186    }
187    private DataRow ResultsBestHypervolumeDataLine {
188      get { return ResultsQualities.Rows[BestHypervolumeResultName]; }
189    }
190    private DataRow ResultsHypervolumeDataLine {
191      get { return ResultsQualities.Rows[HypervolumeResultName]; }
192    }
193    private DataRow ResultsGenerationalDistanceDataLine {
194      get { return ResultsQualities.Rows[GenerationalDistanceResultName]; }
195    }
196    private DataRow ResultsInvertedGenerationalDistanceDataLine {
197      get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; }
198    }
199    private DataRow ResultsCrowdingDataLine {
200      get { return ResultsQualities.Rows[CrowdingResultName]; }
201    }
202    private DataRow ResultsSpacingDataLine {
203      get { return ResultsQualities.Rows[SpacingResultName]; }
204    }
205    private DataRow ResultsHypervolumeDifferenceDataLine {
206      get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; }
207    }
208    #endregion
209    //QualityIndicators
210    private double ResultsHypervolume {
211      get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; }
212      set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; }
213    }
214    private double ResultsGenerationalDistance {
215      get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; }
216      set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; }
217    }
218    private double ResultsInvertedGenerationalDistance {
219      get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; }
220      set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; }
221    }
222    private double ResultsCrowding {
223      get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; }
224      set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; }
225    }
226    private double ResultsSpacing {
227      get { return ((DoubleValue)Results[SpacingResultName].Value).Value; }
228      set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; }
229    }
230    private double ResultsBestHypervolume {
231      get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; }
232      set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; }
233    }
234    private double ResultsBestKnownHypervolume {
235      get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; }
236      set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; }
237    }
238    private double ResultsDifferenceBestKnownHypervolume {
239      get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; }
240      set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; }
241
242    }
243    //Solutions
244    private DoubleMatrix ResultsSolutions {
245      get { return (DoubleMatrix)Results[CurrentFrontResultName].Value; }
246      set { Results[CurrentFrontResultName].Value = value; }
247    }
248    private ScatterPlotContent ResultsScatterPlot {
249      get { return (ScatterPlotContent)Results[ScatterPlotResultName].Value; }
250      set { Results[ScatterPlotResultName].Value = value; }
251    }
252    #endregion
253
254    #region Constructors
255    public MOCMAEvolutionStrategy() {
256      Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600)));
257      Parameters.Add(new FixedValueParameter<IntValue>(SeedName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
258      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
259      Parameters.Add(new FixedValueParameter<IntValue>(PopulationSizeName, "λ (lambda) - the size of the offspring population.", new IntValue(20)));
260      Parameters.Add(new ValueParameter<DoubleArray>(InitialSigmaName, "The initial sigma can be a single value or a value for each dimension. All values need to be > 0.", new DoubleArray(new[] { 0.5 })));
261      Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000)));
262      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue)));
263      var set = new ItemSet<IIndicator> { new HypervolumeIndicator(), new CrowdingIndicator(), new MinimalDistanceIndicator() };
264      Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, set.First()));
265    }
266
267    [StorableConstructor]
268    protected MOCMAEvolutionStrategy(bool deserializing) : base(deserializing) { }
269
270    protected MOCMAEvolutionStrategy(MOCMAEvolutionStrategy original, Cloner cloner) : base(original, cloner) {
271      random = cloner.Clone(original.random);
272      gauss = cloner.Clone(original.gauss);
273      solutions = original.solutions != null ? original.solutions.Select(cloner.Clone).ToArray() : null;
274      stepSizeLearningRate = original.stepSizeLearningRate;
275      stepSizeDampeningFactor = original.stepSizeDampeningFactor;
276      targetSuccessProbability = original.targetSuccessProbability;
277      evolutionPathLearningRate = original.evolutionPathLearningRate;
278      covarianceMatrixLearningRate = original.covarianceMatrixLearningRate;
279      covarianceMatrixUnlearningRate = original.covarianceMatrixUnlearningRate;
280      successThreshold = original.successThreshold;
281    }
282
283    public override IDeepCloneable Clone(Cloner cloner) { return new MOCMAEvolutionStrategy(this, cloner); }
284    #endregion
285
286    #region Initialization
287    protected override void Initialize(CancellationToken cancellationToken) {
288      if (SetSeedRandomly) Seed = new System.Random().Next();
289      random.Reset(Seed);
290      gauss = new NormalDistributedRandom(random, 0, 1);
291
292      InitResults();
293      InitStrategy();
294      InitSolutions();
295      Analyze();
296
297      ResultsIterations = 1;
298      cancellationToken.ThrowIfCancellationRequested();
299    }
300    private Individual InitializeIndividual(RealVector x) {
301      var zeros = new RealVector(x.Length);
302      var c = new double[x.Length, x.Length];
303      var sigma = InitialSigma.Max();
304      for (var i = 0; i < x.Length; i++) {
305        var d = InitialSigma[i % InitialSigma.Length] / sigma;
306        c[i, i] = d * d;
307      }
308      return new Individual(x, targetSuccessProbability, sigma, zeros, c, this);
309    }
310    private void InitSolutions() {
311      solutions = new Individual[PopulationSize];
312      for (var i = 0; i < PopulationSize; i++) {
313        var x = new RealVector(Problem.Encoding.Length); // Uniform distibution in all dimensions assumed.
314        var bounds = Problem.Encoding.Bounds;
315        for (var j = 0; j < Problem.Encoding.Length; j++) {
316          var dim = j % bounds.Rows;
317          x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0];
318        }
319        solutions[i] = InitializeIndividual(x);
320        PenalizeEvaluate(solutions[i]);
321      }
322    }
323    private void InitStrategy() {
324      const int lambda = 1;
325      double n = Problem.Encoding.Length;
326      targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);
327      stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);
328      stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);
329      evolutionPathLearningRate = 2.0 / (n + 2.0);
330      covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);
331      covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);
332      successThreshold = 0.44;
333    }
334    private void InitResults() {
335      Results.Add(new Result(IterationsResultName, "The number of gererations evaluated", new IntValue(0)));
336      Results.Add(new Result(EvaluationsResultName, "The number of function evaltions performed", new IntValue(0)));
337      Results.Add(new Result(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
338      Results.Add(new Result(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0)));
339      Results.Add(new Result(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(double.NaN)));
340      Results.Add(new Result(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(double.NaN)));
341      Results.Add(new Result(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
342      Results.Add(new Result(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)));
343      Results.Add(new Result(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0)));
344      Results.Add(new Result(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0)));
345
346      var table = new DataTable("QualityIndicators");
347      table.Rows.Add(new DataRow(BestHypervolumeResultName));
348      table.Rows.Add(new DataRow(HypervolumeResultName));
349      table.Rows.Add(new DataRow(CrowdingResultName));
350      table.Rows.Add(new DataRow(GenerationalDistanceResultName));
351      table.Rows.Add(new DataRow(InvertedGenerationalDistanceResultName));
352      table.Rows.Add(new DataRow(DifferenceToBestKnownHypervolumeResultName));
353      table.Rows.Add(new DataRow(SpacingResultName));
354      Results.Add(new Result(TimetableResultName, "Different quality meassures in a timeseries", table));
355      Results.Add(new Result(CurrentFrontResultName, "The current front", new DoubleMatrix()));
356      Results.Add(new Result(ScatterPlotResultName, "A scatterplot displaying the evaluated solutions and (if available) the analytically optimal front", new ScatterPlotContent(null, null, null, 2)));
357
358      var problem = Problem as MultiObjectiveTestFunctionProblem;
359      if (problem == null) return;
360      if (problem.BestKnownFront != null) {
361        ResultsBestKnownHypervolume = Hypervolume.Calculate(problem.BestKnownFront.ToJaggedArray(), problem.TestFunction.ReferencePoint(problem.Objectives), Problem.Maximization);
362        ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume;
363      }
364      ResultsScatterPlot = new ScatterPlotContent(new double[0][], new double[0][], problem.BestKnownFront.ToJaggedArray(), problem.Objectives);
365    }
366    #endregion
367
368    #region Mainloop
369    protected override void Run(CancellationToken cancellationToken) {
370      while (ResultsIterations < MaximumGenerations) {
371        try {
372          Iterate();
373          ResultsIterations++;
374          cancellationToken.ThrowIfCancellationRequested();
375        } finally {
376          Analyze();
377        }
378      }
379    }
380    private void Iterate() {
381      var offspring = solutions.Select(i => {
382        var o = new Individual(i);
383        o.Mutate(gauss);
384        PenalizeEvaluate(o);
385        return o;
386      });
387      var parents = solutions.Concat(offspring).ToArray();
388      SelectParents(parents, solutions.Length);
389      UpdatePopulation(parents);
390    }
391    protected override void OnExecutionTimeChanged() {
392      base.OnExecutionTimeChanged();
393      if (CancellationTokenSource == null) return;
394      if (MaximumRuntime == -1) return;
395      if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
396    }
397    #endregion
398
399    #region Evaluation
400    private void PenalizeEvaluate(Individual individual) {
401      if (IsFeasable(individual.Mean)) {
402        individual.Fitness = Evaluate(individual.Mean);
403        individual.PenalizedFitness = individual.Fitness;
404      } else {
405        var t = ClosestFeasible(individual.Mean);
406        individual.Fitness = Evaluate(t);
407        individual.PenalizedFitness = Penalize(individual.Mean, t, individual.Fitness);
408      }
409    }
410    private double[] Evaluate(RealVector x) {
411      var res = Problem.Evaluate(new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x) } }), random);
412      ResultsEvaluations++;
413      return res;
414    }
415    private double[] Penalize(RealVector x, RealVector t, IEnumerable<double> fitness) {
416      var penalty = x.Zip(t, (a, b) => (a - b) * (a - b)).Sum() * 1E-6;
417      return fitness.Select((v, i) => Problem.Maximization[i] ? v - penalty : v + penalty).ToArray();
418    }
419    private RealVector ClosestFeasible(RealVector x) {
420      var bounds = Problem.Encoding.Bounds;
421      var r = new RealVector(x.Length);
422      for (var i = 0; i < x.Length; i++) {
423        var dim = i % bounds.Rows;
424        r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]);
425      }
426      return r;
427    }
428    private bool IsFeasable(RealVector offspring) {
429      var bounds = Problem.Encoding.Bounds;
430      for (var i = 0; i < offspring.Length; i++) {
431        var dim = i % bounds.Rows;
432        if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false;
433      }
434      return true;
435    }
436    #endregion
437
438    private void SelectParents(IReadOnlyList<Individual> parents, int length) {
439      //perform a nondominated sort to assign the rank to every element
440      int[] ranks;
441      var fronts = DominationCalculator<Individual>.CalculateAllParetoFronts(parents.ToArray(), parents.Select(i => i.PenalizedFitness).ToArray(), Problem.Maximization, out ranks);
442
443      //deselect the highest rank fronts until we would end up with less or equal mu elements
444      var rank = fronts.Count - 1;
445      var popSize = parents.Count;
446      while (popSize - fronts[rank].Count >= length) {
447        var front = fronts[rank];
448        foreach (var i in front) i.Item1.Selected = false;
449        popSize -= front.Count;
450        rank--;
451      }
452
453      //now use the indicator to deselect the approximatingly worst elements of the last selected front
454      var front1 = fronts[rank].OrderBy(x => x.Item1.PenalizedFitness[0]).ToList();
455      for (; popSize > length; popSize--) {
456        var lc = Indicator.LeastContributer(front1.Select(i => i.Item1).ToArray(), Problem);
457        front1[lc].Item1.Selected = false;
458        front1.Swap(lc, front1.Count - 1);
459        front1.RemoveAt(front1.Count - 1);
460      }
461    }
462
463    private void UpdatePopulation(IReadOnlyList<Individual> parents) {
464      foreach (var p in parents.Skip(solutions.Length).Where(i => i.Selected))
465        p.UpdateAsOffspring();
466      for (var i = 0; i < solutions.Length; i++)
467        if (parents[i].Selected)
468          parents[i].UpdateAsParent(parents[i + solutions.Length].Selected);
469      solutions = parents.Where(p => p.Selected).ToArray();
470    }
471
472    private void Analyze() {
473      ResultsScatterPlot = new ScatterPlotContent(solutions.Select(x => x.Fitness).ToArray(), solutions.Select(x => x.Mean.ToArray()).ToArray(), ResultsScatterPlot.ParetoFront, ResultsScatterPlot.Objectives);
474      ResultsSolutions = solutions.Select(x => x.Mean.ToArray()).ToMatrix();
475
476      var problem = Problem as MultiObjectiveTestFunctionProblem;
477      if (problem == null) return;
478
479      var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.Fitness), problem.ReferencePoint.CloneAsArray(), Problem.Maximization, true).ToArray();
480      if (front.Length == 0) return;
481      var bounds = problem.Bounds.CloneAsMatrix();
482      ResultsCrowding = Crowding.Calculate(front, bounds);
483      ResultsSpacing = Spacing.Calculate(front);
484      ResultsGenerationalDistance = problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
485      ResultsInvertedGenerationalDistance = problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, problem.BestKnownFront.ToJaggedArray(), 1) : double.NaN;
486      ResultsHypervolume = Hypervolume.Calculate(front, problem.ReferencePoint.CloneAsArray(), Problem.Maximization);
487      ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume);
488      ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume;
489
490      ResultsBestHypervolumeDataLine.Values.Add(ResultsBestHypervolume);
491      ResultsHypervolumeDataLine.Values.Add(ResultsHypervolume);
492      ResultsCrowdingDataLine.Values.Add(ResultsCrowding);
493      ResultsGenerationalDistanceDataLine.Values.Add(ResultsGenerationalDistance);
494      ResultsInvertedGenerationalDistanceDataLine.Values.Add(ResultsInvertedGenerationalDistance);
495      ResultsSpacingDataLine.Values.Add(ResultsSpacing);
496      ResultsHypervolumeDifferenceDataLine.Values.Add(ResultsDifferenceBestKnownHypervolume);
497
498      Problem.Analyze(
499        solutions.Select(x => (Optimization.Individual)new SingleEncodingIndividual(Problem.Encoding, new Scope { Variables = { new Variable(Problem.Encoding.Name, x.Mean) } })).ToArray(),
500        solutions.Select(x => x.Fitness).ToArray(),
501        Results,
502        random);
503    }
504  }
505}
Note: See TracBrowser for help on using the repository browser.