Free cookie consent management tool by TermsFeed Policy Generator

source: branches/ichiriac/HeuristicLab.Algorithms.Shade/Shade.cs @ 15344

Last change on this file since 15344 was 14699, checked in by gkronber, 8 years ago

#2646: fixed compile errors in SHADE implementation (and set svn:ignore properties)

File size: 24.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * The implementation is inspired by the implementation in JAVA of SHADE algorithm https://sites.google.com/site/tanaberyoji/software/SHADE1.0.1_CEC2013.zip?attredirects=0&d=1
8 *
9 * HeuristicLab is free software: you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation, either version 3 of the License, or
12 * (at your option) any later version.
13 *
14 * HeuristicLab is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
21 */
22
23#endregion
24using HeuristicLab.Analysis;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Encodings.RealVectorEncoding;
29using HeuristicLab.Optimization;
30using HeuristicLab.Parameters;
31using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
32using HeuristicLab.Problems.TestFunctions;
33using HeuristicLab.Random;
34using System;
35using System.Collections.Generic;
36using System.Threading;
37
38namespace HeuristicLab.Algorithms.Shade {
39
40  [Item("Success-History Based Parameter Adaptation for DE (SHADE)", "A self-adaptive version of differential evolution")]
41  [StorableClass]
42  [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 400)]
43  public class Shade : BasicAlgorithm {
44    public Func<IEnumerable<double>, double> Evaluation;
45
46    public override Type ProblemType {
47      get { return typeof(SingleObjectiveTestFunctionProblem); }
48    }
49    public new SingleObjectiveTestFunctionProblem Problem {
50      get { return (SingleObjectiveTestFunctionProblem)base.Problem; }
51      set { base.Problem = value; }
52    }
53
54    private readonly IRandom _random = new MersenneTwister();
55    private int evals;
56    private int pop_size;
57    private double arc_rate;
58    private int arc_size;
59    private double p_best_rate;
60    private int memory_size;
61
62    private double[][] pop;
63    private double[] fitness;
64    private double[][] children;
65    private double[] children_fitness;
66
67    private double[] bsf_solution;
68    private double bsf_fitness = 1e+30;
69    private double[,] archive;
70    private int num_arc_inds = 0;
71
72    #region ParameterNames
73    private const string MaximumEvaluationsParameterName = "Maximum Evaluations";
74    private const string SeedParameterName = "Seed";
75    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
76    private const string CrossoverProbabilityParameterName = "CrossoverProbability";
77    private const string PopulationSizeParameterName = "PopulationSize";
78    private const string ScalingFactorParameterName = "ScalingFactor";
79    private const string ValueToReachParameterName = "ValueToReach";
80    private const string ArchiveRateParameterName = "ArchiveRate";
81    private const string MemorySizeParameterName = "MemorySize";
82    private const string BestRateParameterName = "BestRate";
83    #endregion
84
85    #region ParameterProperties
86    public IFixedValueParameter<IntValue> MaximumEvaluationsParameter {
87      get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluationsParameterName]; }
88    }
89    public IFixedValueParameter<IntValue> SeedParameter {
90      get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; }
91    }
92    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
93      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
94    }
95    private ValueParameter<IntValue> PopulationSizeParameter {
96      get { return (ValueParameter<IntValue>)Parameters[PopulationSizeParameterName]; }
97    }
98    public ValueParameter<DoubleValue> CrossoverProbabilityParameter {
99      get { return (ValueParameter<DoubleValue>)Parameters[CrossoverProbabilityParameterName]; }
100    }
101    public ValueParameter<DoubleValue> ScalingFactorParameter {
102      get { return (ValueParameter<DoubleValue>)Parameters[ScalingFactorParameterName]; }
103    }
104    public ValueParameter<DoubleValue> ValueToReachParameter {
105      get { return (ValueParameter<DoubleValue>)Parameters[ValueToReachParameterName]; }
106    }
107    public ValueParameter<DoubleValue> ArchiveRateParameter {
108      get { return (ValueParameter<DoubleValue>)Parameters[ArchiveRateParameterName]; }
109    }
110    public ValueParameter<IntValue> MemorySizeParameter {
111      get { return (ValueParameter<IntValue>)Parameters[MemorySizeParameterName]; }
112    }
113    public ValueParameter<DoubleValue> BestRateParameter {
114      get { return (ValueParameter<DoubleValue>)Parameters[BestRateParameterName]; }
115    }
116    #endregion
117
118    #region Properties
119    public int MaximumEvaluations {
120      get { return MaximumEvaluationsParameter.Value.Value; }
121      set { MaximumEvaluationsParameter.Value.Value = value; }
122    }
123
124    public Double CrossoverProbability {
125      get { return CrossoverProbabilityParameter.Value.Value; }
126      set { CrossoverProbabilityParameter.Value.Value = value; }
127    }
128    public Double ScalingFactor {
129      get { return ScalingFactorParameter.Value.Value; }
130      set { ScalingFactorParameter.Value.Value = value; }
131    }
132    public int Seed {
133      get { return SeedParameter.Value.Value; }
134      set { SeedParameter.Value.Value = value; }
135    }
136    public bool SetSeedRandomly {
137      get { return SetSeedRandomlyParameter.Value.Value; }
138      set { SetSeedRandomlyParameter.Value.Value = value; }
139    }
140    public IntValue PopulationSize {
141      get { return PopulationSizeParameter.Value; }
142      set { PopulationSizeParameter.Value = value; }
143    }
144    public Double ValueToReach {
145      get { return ValueToReachParameter.Value.Value; }
146      set { ValueToReachParameter.Value.Value = value; }
147    }
148    public Double ArchiveRate {
149      get { return ArchiveRateParameter.Value.Value; }
150      set { ArchiveRateParameter.Value.Value = value; }
151    }
152    public IntValue MemorySize {
153      get { return MemorySizeParameter.Value; }
154      set { MemorySizeParameter.Value = value; }
155    }
156    public Double BestRate {
157      get { return BestRateParameter.Value.Value; }
158      set { BestRateParameter.Value.Value = value; }
159    }
160    #endregion
161
162    #region ResultsProperties
163    private double ResultsBestQuality {
164      get { return ((DoubleValue)Results["Best Quality"].Value).Value; }
165      set { ((DoubleValue)Results["Best Quality"].Value).Value = value; }
166    }
167
168    private double VTRBestQuality {
169      get { return ((DoubleValue)Results["VTR"].Value).Value; }
170      set { ((DoubleValue)Results["VTR"].Value).Value = value; }
171    }
172
173    private RealVector ResultsBestSolution {
174      get { return (RealVector)Results["Best Solution"].Value; }
175      set { Results["Best Solution"].Value = value; }
176    }
177
178    private int ResultsEvaluations {
179      get { return ((IntValue)Results["Evaluations"].Value).Value; }
180      set { ((IntValue)Results["Evaluations"].Value).Value = value; }
181    }
182    private int ResultsIterations {
183      get { return ((IntValue)Results["Iterations"].Value).Value; }
184      set { ((IntValue)Results["Iterations"].Value).Value = value; }
185    }
186
187    private DataTable ResultsQualities {
188      get { return ((DataTable)Results["Qualities"].Value); }
189    }
190    private DataRow ResultsQualitiesBest {
191      get { return ResultsQualities.Rows["Best Quality"]; }
192    }
193
194    #endregion
195
196    [StorableConstructor]
197    protected Shade(bool deserializing) : base(deserializing) { }
198
199    protected Shade(Shade original, Cloner cloner)
200      : base(original, cloner) {
201    }
202
203    public override IDeepCloneable Clone(Cloner cloner) {
204      return new Shade(this, cloner);
205    }
206
207    public Shade() {
208      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluationsParameterName, "", new IntValue(Int32.MaxValue)));
209      Parameters.Add(new ValueParameter<IntValue>(PopulationSizeParameterName, "The size of the population of solutions.", new IntValue(75)));
210      Parameters.Add(new ValueParameter<DoubleValue>(ValueToReachParameterName, "Value to reach (VTR) parameter", new DoubleValue(0.00000001)));
211      Parameters.Add(new ValueParameter<DoubleValue>(ArchiveRateParameterName, "Archive rate parameter", new DoubleValue(2.0)));
212      Parameters.Add(new ValueParameter<IntValue>(MemorySizeParameterName, "Memory size parameter", new IntValue(0)));
213      Parameters.Add(new ValueParameter<DoubleValue>(BestRateParameterName, "Best rate parameter", new DoubleValue(0.1)));
214    }
215
216    protected override void Run(CancellationToken cancellationToken) {
217
218      // Set up the results display
219      Results.Add(new Result("Iterations", new IntValue(0)));
220      Results.Add(new Result("Evaluations", new IntValue(0)));
221      Results.Add(new Result("Best Solution", new RealVector()));
222      Results.Add(new Result("Best Quality", new DoubleValue(double.NaN)));
223      Results.Add(new Result("VTR", new DoubleValue(double.NaN)));
224      var table = new DataTable("Qualities");
225      table.Rows.Add(new DataRow("Best Quality"));
226      Results.Add(new Result("Qualities", table));
227
228
229      this.evals = 0;
230      int archive_size = (int)Math.Round(ArchiveRateParameter.Value.Value * PopulationSize.Value);
231      int problem_size = Problem.ProblemSize.Value;
232
233      int pop_size = PopulationSizeParameter.Value.Value;
234      this.arc_rate = ArchiveRateParameter.Value.Value;
235      this.arc_size = (int)Math.Round(this.arc_rate * pop_size);
236      this.p_best_rate = BestRateParameter.Value.Value;
237      this.memory_size = MemorySizeParameter.Value.Value;
238
239      this.pop = new double[pop_size][];
240      this.fitness = new double[pop_size];
241      this.children = new double[pop_size][];
242      this.children_fitness = new double[pop_size];
243
244      this.bsf_solution = new double[problem_size];
245      this.bsf_fitness = 1e+30;
246      this.archive = new double[arc_size, Problem.ProblemSize.Value];
247      this.num_arc_inds = 0;
248
249      double[,] populationOld = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value];
250      double[,] mutationPopulation = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value];
251      double[,] trialPopulation = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value];
252      double[] bestPopulation = new double[Problem.ProblemSize.Value];
253      double[] bestPopulationIteration = new double[Problem.ProblemSize.Value];
254      double[,] archive = new double[archive_size, Problem.ProblemSize.Value];
255
256
257      // //for external archive
258      int rand_arc_ind;
259
260      int num_success_params;
261
262      double[] success_sf = new double[PopulationSizeParameter.Value.Value];
263      double[] success_cr = new double[PopulationSizeParameter.Value.Value];
264      double[] dif_fitness = new double[PopulationSizeParameter.Value.Value];
265      double[] fitness = new double[PopulationSizeParameter.Value.Value];
266
267      // the contents of M_f and M_cr are all initialiezed 0.5
268      double[] memory_sf = new double[MemorySizeParameter.Value.Value];
269      double[] memory_cr = new double[MemorySizeParameter.Value.Value];
270
271      for(int i = 0; i < MemorySizeParameter.Value.Value; i++) {
272        memory_sf[i] = 0.5;
273        memory_cr[i] = 0.5;
274      }
275
276      //memory index counter
277      int memory_pos = 0;
278      double temp_sum_sf1, temp_sum_sf2, temp_sum_cr1, temp_sum_cr2, temp_sum, temp_weight;
279
280      //for new parameters sampling
281      double mu_sf, mu_cr;
282      int rand_mem_index;
283
284      double[] pop_sf = new double[PopulationSizeParameter.Value.Value];
285      double[] pop_cr = new double[PopulationSizeParameter.Value.Value];
286
287      //for current-to-pbest/1
288      int p_best_ind;
289      double m = PopulationSizeParameter.Value.Value * BestRateParameter.Value.Value;
290      int p_num = (int)Math.Round(m);
291      int[] sorted_array = new int[PopulationSizeParameter.Value.Value];
292      double[] sorted_fitness = new double[PopulationSizeParameter.Value.Value];
293
294      //initialize the population
295      populationOld = makeNewIndividuals();
296
297      //evaluate the best member after the intialiazation
298      //the idea is to select first member and after that to check the others members from the population
299
300      int best_index = 0;
301      double[] populationRow = new double[Problem.ProblemSize.Value];
302      bestPopulation = getMatrixRow(populationOld, best_index);
303      RealVector bestPopulationVector = new RealVector(bestPopulation);
304      double bestPopulationValue = Obj(bestPopulationVector);
305      fitness[best_index] = bestPopulationValue;
306      RealVector selectionVector;
307      RealVector trialVector;
308      double qtrial;
309
310
311      for(var i = 0; i < PopulationSizeParameter.Value.Value; i++) {
312        populationRow = getMatrixRow(populationOld, i);
313        trialVector = new RealVector(populationRow);
314
315        qtrial = Obj(trialVector);
316        fitness[i] = qtrial;
317
318        if(qtrial > bestPopulationValue) {
319          bestPopulationVector = new RealVector(populationRow);
320          bestPopulationValue = qtrial;
321          best_index = i;
322        }
323      }
324
325      int iterations = 1;
326
327      // Loop until iteration limit reached or canceled.
328      // todo replace with a function
329      // && bestPopulationValue > Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value
330      while(ResultsEvaluations < MaximumEvaluations
331          && !cancellationToken.IsCancellationRequested &&
332          bestPopulationValue > Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value) {
333        for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) sorted_array[i] = i;
334        for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) sorted_fitness[i] = fitness[i];
335
336        Quicksort(sorted_fitness, 0, PopulationSizeParameter.Value.Value - 1, sorted_array);
337
338        for(int target = 0; target < PopulationSizeParameter.Value.Value; target++) {
339          rand_mem_index = (int)(_random.NextDouble() * MemorySizeParameter.Value.Value);
340          mu_sf = memory_sf[rand_mem_index];
341          mu_cr = memory_cr[rand_mem_index];
342
343          //generate CR_i and repair its value
344          if(mu_cr == -1) {
345            pop_cr[target] = 0;
346          } else {
347            pop_cr[target] = gauss(mu_cr, 0.1);
348            if(pop_cr[target] > 1) pop_cr[target] = 1;
349            else if(pop_cr[target] < 0) pop_cr[target] = 0;
350          }
351
352          //generate F_i and repair its value
353          do {
354            pop_sf[target] = cauchy_g(mu_sf, 0.1);
355          } while(pop_sf[target] <= 0);
356
357          if(pop_sf[target] > 1) pop_sf[target] = 1;
358
359          //p-best individual is randomly selected from the top pop_size *  p_i members
360          p_best_ind = sorted_array[(int)(_random.NextDouble() * p_num)];
361
362          trialPopulation = operateCurrentToPBest1BinWithArchive(populationOld, trialPopulation, target, p_best_ind, pop_sf[target], pop_cr[target]);
363        }
364
365        for(int i = 0; i < pop_size; i++) {
366          trialVector = new RealVector(getMatrixRow(trialPopulation, i));
367          children_fitness[i] = Obj(trialVector);
368        }
369
370        //update bfs solution
371        for(var i = 0; i < PopulationSizeParameter.Value.Value; i++) {
372          populationRow = getMatrixRow(populationOld, i);
373          qtrial = fitness[i];
374
375          if(qtrial > bestPopulationValue) {
376            bestPopulationVector = new RealVector(populationRow);
377            bestPopulationValue = qtrial;
378            best_index = i;
379          }
380        }
381
382        num_success_params = 0;
383
384        //generation alternation
385        for(int i = 0; i < pop_size; i++) {
386          if(children_fitness[i] == fitness[i]) {
387            fitness[i] = children_fitness[i];
388            for(int j = 0; j < problem_size; j++) populationOld[i, j] = trialPopulation[i, j];
389          } else if(children_fitness[i] < fitness[i]) {
390            //parent vectors x_i which were worse than the trial vectors u_i are preserved
391            if(arc_size > 1) {
392              if(num_arc_inds < arc_size) {
393                for(int j = 0; j < problem_size; j++) this.archive[num_arc_inds, j] = populationOld[i, j];
394                num_arc_inds++;
395
396              }
397              //Whenever the size of the archive exceeds, randomly selected elements are deleted to make space for the newly inserted elements
398              else {
399                rand_arc_ind = (int)(_random.NextDouble() * arc_size);
400                for(int j = 0; j < problem_size; j++) this.archive[rand_arc_ind, j] = populationOld[i, j];
401              }
402            }
403
404            dif_fitness[num_success_params] = Math.Abs(fitness[i] - children_fitness[i]);
405
406            fitness[i] = children_fitness[i];
407            for(int j = 0; j < problem_size; j++) populationOld[i, j] = trialPopulation[i, j];
408
409            //successful parameters are preserved in S_F and S_CR
410            success_sf[num_success_params] = pop_sf[i];
411            success_cr[num_success_params] = pop_cr[i];
412            num_success_params++;
413          }
414        }
415
416        if(num_success_params > 0) {
417          temp_sum_sf1 = 0;
418          temp_sum_sf2 = 0;
419          temp_sum_cr1 = 0;
420          temp_sum_cr2 = 0;
421          temp_sum = 0;
422          temp_weight = 0;
423
424          for(int i = 0; i < num_success_params; i++) temp_sum += dif_fitness[i];
425
426          //weighted lehmer mean
427          for(int i = 0; i < num_success_params; i++) {
428            temp_weight = dif_fitness[i] / temp_sum;
429
430            temp_sum_sf1 += temp_weight * success_sf[i] * success_sf[i];
431            temp_sum_sf2 += temp_weight * success_sf[i];
432
433            temp_sum_cr1 += temp_weight * success_cr[i] * success_cr[i];
434            temp_sum_cr2 += temp_weight * success_cr[i];
435          }
436
437          memory_sf[memory_pos] = temp_sum_sf1 / temp_sum_sf2;
438
439          if(temp_sum_cr2 == 0 || memory_cr[memory_pos] == -1) {
440            memory_cr[memory_pos] = -1;
441          } else {
442            memory_cr[memory_pos] = temp_sum_cr1 / temp_sum_cr2;
443          }
444
445          //increment the counter
446          memory_pos++;
447          if(memory_pos >= memory_size) memory_pos = 0;
448        }
449
450        //update the best candidate
451        for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) {
452          selectionVector = new RealVector(getMatrixRow(populationOld, i));
453          var quality = fitness[i];
454          if(quality < bestPopulationValue) {
455            bestPopulationVector = (RealVector)selectionVector.Clone();
456            bestPopulationValue = quality;
457          }
458        }
459
460        iterations = iterations + 1;
461
462        //update the results
463        ResultsEvaluations = evals;
464        ResultsIterations = iterations;
465        ResultsBestSolution = bestPopulationVector;
466        ResultsBestQuality = bestPopulationValue;
467
468        //update the results in view
469        if(iterations % 10 == 0) ResultsQualitiesBest.Values.Add(bestPopulationValue);
470        if(bestPopulationValue < Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value) {
471          VTRBestQuality = bestPopulationValue;
472        }
473      }
474    }
475
476    public override bool SupportsPause { get { return false; } } // TODO (can we pause?)
477
478    //evaluate the vector
479    public double Obj(RealVector x) {
480      evals = evals + 1;
481      if(Problem.Maximization.Value)
482        return -Problem.Evaluator.Evaluate(x);
483
484      return Problem.Evaluator.Evaluate(x);
485    }
486
487    // Get ith row from the matrix
488    public double[] getMatrixRow(double[,] Mat, int i) {
489      double[] tmp = new double[Mat.GetUpperBound(1) + 1];
490
491      for(int j = 0; j <= Mat.GetUpperBound(1); j++) {
492        tmp[j] = Mat[i, j];
493      }
494
495      return tmp;
496    }
497
498    /*
499        Return random value from Cauchy distribution with mean "mu" and variance "gamma"
500        http://www.sat.t.u-tokyo.ac.jp/~omi/random_variables_generation.html#Cauchy
501    */
502    private double cauchy_g(double mu, double gamma) {
503      return mu + gamma * Math.Tan(Math.PI * (_random.NextDouble() - 0.5));
504    }
505
506    /*
507         Return random value from normal distribution with mean "mu" and variance "gamma"
508         http://www.sat.t.u-tokyo.ac.jp/~omi/random_variables_generation.html#Gauss
509    */
510    private double gauss(double mu, double sigma) {
511      return mu + sigma * Math.Sqrt(-2.0 * Math.Log(_random.NextDouble())) * Math.Sin(2.0 * Math.PI * _random.NextDouble());
512    }
513
514    private double[,] makeNewIndividuals() {
515      //problem variables
516      var dim = Problem.ProblemSize.Value;
517      var lb = Problem.Bounds[0, 0];
518      var ub = Problem.Bounds[0, 1];
519      var range = ub - lb;
520      double[,] population = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value];
521
522      //create initial population
523      //population is a matrix of size PopulationSize*ProblemSize
524      for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) {
525        for(int j = 0; j < Problem.ProblemSize.Value; j++) {
526          population[i, j] = _random.NextDouble() * range + lb;
527        }
528      }
529      return population;
530    }
531
532    private static void Quicksort(double[] elements, int left, int right, int[] index) {
533      int i = left, j = right;
534      double pivot = elements[(left + right) / 2];
535      double tmp_var = 0;
536      int tmp_index = 0;
537
538      while(i <= j) {
539        while(elements[i].CompareTo(pivot) < 0) {
540          i++;
541        }
542
543        while(elements[j].CompareTo(pivot) > 0) {
544          j--;
545        }
546
547        if(i <= j) {
548          // Swap
549          tmp_var = elements[i];
550          elements[i] = elements[j];
551          elements[j] = tmp_var;
552
553          tmp_index = index[i];
554          index[i] = index[j];
555          index[j] = tmp_index;
556
557          i++;
558          j--;
559        }
560      }
561
562      // Recursive calls
563      if(left < j) {
564        Quicksort(elements, left, j, index);
565      }
566
567      if(i < right) {
568        Quicksort(elements, i, right, index);
569      }
570    }
571
572    // current to best selection scheme with archive
573    // analyze how the archive is implemented
574    private double[,] operateCurrentToPBest1BinWithArchive(double[,] pop, double[,] children, int target, int p_best_individual, double scaling_factor, double cross_rate) {
575      int r1, r2;
576      int num_arc_inds = 0;
577      var lb = Problem.Bounds[0, 0];
578      var ub = Problem.Bounds[0, 1];
579
580      do {
581        r1 = (int)(_random.NextDouble() * PopulationSizeParameter.Value.Value);
582      } while(r1 == target);
583      do {
584        r2 = (int)(_random.NextDouble() * (PopulationSizeParameter.Value.Value + num_arc_inds));
585      } while((r2 == target) || (r2 == r1));
586
587      int random_variable = (int)(_random.NextDouble() * Problem.ProblemSize.Value);
588
589      if(r2 >= PopulationSizeParameter.Value.Value) {
590        r2 -= PopulationSizeParameter.Value.Value;
591        for(int i = 0; i < Problem.ProblemSize.Value; i++) {
592          if((_random.NextDouble() < cross_rate) || (i == random_variable)) children[target, i] = pop[target, i] + scaling_factor * (pop[p_best_individual, i] - pop[target, i]) + scaling_factor * (pop[r1, i] - archive[r2, i]);
593          else children[target, i] = pop[target, i];
594        }
595      } else {
596        for(int i = 0; i < Problem.ProblemSize.Value; i++) {
597          if((_random.NextDouble() < cross_rate) || (i == random_variable)) children[target, i] = pop[target, i] + scaling_factor * (pop[p_best_individual, i] - pop[target, i]) + scaling_factor * (pop[r1, i] - pop[r2, i]);
598          else children[target, i] = pop[target, i];
599        }
600      }
601
602      for(int i = 0; i < Problem.ProblemSize.Value; i++) {
603        if(children[target, i] < lb) children[target, i] = (lb + pop[target, i]) / 2.0;
604        else if(children[target, i] > ub) children[target, i] = (ub + pop[target, i]) / 2.0;
605      }
606
607      return children;
608    }
609  }
610}
Note: See TracBrowser for help on using the repository browser.