Changeset 14699
- Timestamp:
- 02/23/17 19:48:22 (8 years ago)
- Location:
- branches/ichiriac/HeuristicLab.Algorithms.Shade
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/ichiriac/HeuristicLab.Algorithms.Shade
-
Property
svn:ignore
set to
obj
-
Property
svn:ignore
set to
-
branches/ichiriac/HeuristicLab.Algorithms.Shade/Plugin.cs
r14088 r14699 1 using System; 2 using System.Collections.Generic; 3 using System.Linq; 4 using System.Text; 5 using System.Threading.Tasks; 1 using HeuristicLab.PluginInfrastructure; 6 2 7 using HeuristicLab.PluginInfrastructure; 8 9 namespace HeuristicLab.Algorithms.Shade 10 { 11 [Plugin("HeuristicLab.Algorithms.Shade", "Provides an implementation of SHADE algorithm", "3.3.9.0")] 12 [PluginFile("HeuristicLab.Algorithms.Shade.dll", PluginFileType.Assembly)] 13 public class Plugin : PluginBase 14 { 15 } 3 namespace HeuristicLab.Algorithms.Shade { 4 [Plugin("HeuristicLab.Algorithms.Shade", "Provides an implementation of SHADE algorithm", "3.3.9.0")] 5 [PluginFile("HeuristicLab.Algorithms.Shade.dll", PluginFileType.Assembly)] 6 public class Plugin : PluginBase { 7 } 16 8 } -
branches/ichiriac/HeuristicLab.Algorithms.Shade/Shade.cs
r14091 r14699 20 20 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>. 21 21 */ 22 23 #endregion 22 24 using HeuristicLab.Analysis; 23 25 using HeuristicLab.Common; … … 34 36 using System.Threading; 35 37 36 namespace HeuristicLab.Algorithms.Shade 37 { 38 39 [Item("Success-History Based Parameter Adaptation for DE (SHADE)", "A self-adaptive version of differential evolution")] 40 [StorableClass] 41 [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 400)] 42 public class Shade : BasicAlgorithm 43 { 44 public Func<IEnumerable<double>, double> Evaluation; 45 46 public override Type ProblemType 47 { 48 get { return typeof(SingleObjectiveTestFunctionProblem); } 49 } 50 public new SingleObjectiveTestFunctionProblem Problem 51 { 52 get { return (SingleObjectiveTestFunctionProblem)base.Problem; } 53 set { base.Problem = value; } 54 } 55 56 private readonly IRandom _random = new MersenneTwister(); 57 private int evals; 58 private int pop_size; 59 private double arc_rate; 60 private int arc_size; 61 private double p_best_rate; 62 private int memory_size; 63 64 private double[][] pop; 65 private double[] fitness; 66 private double[][] children; 67 private double[] children_fitness; 68 69 private double[] bsf_solution; 70 private double bsf_fitness = 1e+30; 71 private double[,] archive; 72 private int num_arc_inds = 0; 73 74 #region ParameterNames 75 private const string MaximumEvaluationsParameterName = "Maximum Evaluations"; 76 private const string SeedParameterName = "Seed"; 77 private const string SetSeedRandomlyParameterName = "SetSeedRandomly"; 78 private const string CrossoverProbabilityParameterName = "CrossoverProbability"; 79 private const string PopulationSizeParameterName = "PopulationSize"; 80 private const string ScalingFactorParameterName = "ScalingFactor"; 81 private const string ValueToReachParameterName = "ValueToReach"; 82 private const string ArchiveRateParameterName = "ArchiveRate"; 83 private const string MemorySizeParameterName = "MemorySize"; 84 private const string BestRateParameterName = "BestRate"; 85 #endregion 86 87 #region ParameterProperties 88 public IFixedValueParameter<IntValue> MaximumEvaluationsParameter 89 { 90 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluationsParameterName]; } 91 } 92 public IFixedValueParameter<IntValue> SeedParameter 93 { 94 get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; } 95 } 96 public FixedValueParameter<BoolValue> SetSeedRandomlyParameter 97 { 98 get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; } 99 } 100 private ValueParameter<IntValue> PopulationSizeParameter 101 { 102 get { return (ValueParameter<IntValue>)Parameters[PopulationSizeParameterName]; } 103 } 104 public ValueParameter<DoubleValue> CrossoverProbabilityParameter 105 { 106 get { return (ValueParameter<DoubleValue>)Parameters[CrossoverProbabilityParameterName]; } 107 } 108 public ValueParameter<DoubleValue> ScalingFactorParameter 109 { 110 get { return (ValueParameter<DoubleValue>)Parameters[ScalingFactorParameterName]; } 111 } 112 public ValueParameter<DoubleValue> ValueToReachParameter 113 { 114 get { return (ValueParameter<DoubleValue>)Parameters[ValueToReachParameterName]; } 115 } 116 public ValueParameter<DoubleValue> ArchiveRateParameter 117 { 118 get { return (ValueParameter<DoubleValue>)Parameters[ArchiveRateParameterName]; } 119 } 120 public ValueParameter<IntValue> MemorySizeParameter 121 { 122 get { return (ValueParameter<IntValue>)Parameters[MemorySizeParameterName]; } 123 } 124 public ValueParameter<DoubleValue> BestRateParameter 125 { 126 get { return (ValueParameter<DoubleValue>)Parameters[BestRateParameterName]; } 127 } 128 #endregion 129 130 #region Properties 131 public int MaximumEvaluations 132 { 133 get { return MaximumEvaluationsParameter.Value.Value; } 134 set { MaximumEvaluationsParameter.Value.Value = value; } 135 } 136 137 public Double CrossoverProbability 138 { 139 get { return CrossoverProbabilityParameter.Value.Value; } 140 set { CrossoverProbabilityParameter.Value.Value = value; } 141 } 142 public Double ScalingFactor 143 { 144 get { return ScalingFactorParameter.Value.Value; } 145 set { ScalingFactorParameter.Value.Value = value; } 146 } 147 public int Seed 148 { 149 get { return SeedParameter.Value.Value; } 150 set { SeedParameter.Value.Value = value; } 151 } 152 public bool SetSeedRandomly 153 { 154 get { return SetSeedRandomlyParameter.Value.Value; } 155 set { SetSeedRandomlyParameter.Value.Value = value; } 156 } 157 public IntValue PopulationSize 158 { 159 get { return PopulationSizeParameter.Value; } 160 set { PopulationSizeParameter.Value = value; } 161 } 162 public Double ValueToReach 163 { 164 get { return ValueToReachParameter.Value.Value; } 165 set { ValueToReachParameter.Value.Value = value; } 166 } 167 public Double ArchiveRate 168 { 169 get { return ArchiveRateParameter.Value.Value; } 170 set { ArchiveRateParameter.Value.Value = value; } 171 } 172 public IntValue MemorySize 173 { 174 get { return MemorySizeParameter.Value; } 175 set { MemorySizeParameter.Value = value; } 176 } 177 public Double BestRate 178 { 179 get { return BestRateParameter.Value.Value; } 180 set { BestRateParameter.Value.Value = value; } 181 } 182 #endregion 183 184 #region ResultsProperties 185 private double ResultsBestQuality 186 { 187 get { return ((DoubleValue)Results["Best Quality"].Value).Value; } 188 set { ((DoubleValue)Results["Best Quality"].Value).Value = value; } 189 } 190 191 private double VTRBestQuality 192 { 193 get { return ((DoubleValue)Results["VTR"].Value).Value; } 194 set { ((DoubleValue)Results["VTR"].Value).Value = value; } 195 } 196 197 private RealVector ResultsBestSolution 198 { 199 get { return (RealVector)Results["Best Solution"].Value; } 200 set { Results["Best Solution"].Value = value; } 201 } 202 203 private int ResultsEvaluations 204 { 205 get { return ((IntValue)Results["Evaluations"].Value).Value; } 206 set { ((IntValue)Results["Evaluations"].Value).Value = value; } 207 } 208 private int ResultsIterations 209 { 210 get { return ((IntValue)Results["Iterations"].Value).Value; } 211 set { ((IntValue)Results["Iterations"].Value).Value = value; } 212 } 213 214 private DataTable ResultsQualities 215 { 216 get { return ((DataTable)Results["Qualities"].Value); } 217 } 218 private DataRow ResultsQualitiesBest 219 { 220 get { return ResultsQualities.Rows["Best Quality"]; } 221 } 222 223 #endregion 224 225 [StorableConstructor] 226 protected Shade(bool deserializing) : base(deserializing) { } 227 228 protected Shade(Shade original, Cloner cloner) 229 : base(original, cloner) 230 { 231 } 232 233 public override IDeepCloneable Clone(Cloner cloner) 234 { 235 return new Shade(this, cloner); 236 } 237 238 public Shade() 239 { 240 Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluationsParameterName, "", new IntValue(Int32.MaxValue))); 241 Parameters.Add(new ValueParameter<IntValue>(PopulationSizeParameterName, "The size of the population of solutions.", new IntValue(75))); 242 Parameters.Add(new ValueParameter<DoubleValue>(ValueToReachParameterName, "Value to reach (VTR) parameter", new DoubleValue(0.00000001))); 243 Parameters.Add(new ValueParameter<DoubleValue>(ArchiveRateParameterName, "Archive rate parameter", new DoubleValue(2.0))); 244 Parameters.Add(new ValueParameter<IntValue>(MemorySizeParameterName, "Memory size parameter", new IntValue(0))); 245 Parameters.Add(new ValueParameter<DoubleValue>(BestRateParameterName, "Best rate parameter", new DoubleValue(0.1))); 246 } 247 248 protected override void Run(CancellationToken cancellationToken) 249 { 250 251 // Set up the results display 252 Results.Add(new Result("Iterations", new IntValue(0))); 253 Results.Add(new Result("Evaluations", new IntValue(0))); 254 Results.Add(new Result("Best Solution", new RealVector())); 255 Results.Add(new Result("Best Quality", new DoubleValue(double.NaN))); 256 Results.Add(new Result("VTR", new DoubleValue(double.NaN))); 257 var table = new DataTable("Qualities"); 258 table.Rows.Add(new DataRow("Best Quality")); 259 Results.Add(new Result("Qualities", table)); 260 261 262 this.evals = 0; 263 int archive_size = (int)Math.Round(ArchiveRateParameter.Value.Value * PopulationSize.Value); 264 int problem_size = Problem.ProblemSize.Value; 265 266 int pop_size = PopulationSizeParameter.Value.Value; 267 this.arc_rate = ArchiveRateParameter.Value.Value; 268 this.arc_size = (int)Math.Round(this.arc_rate * pop_size); 269 this.p_best_rate = BestRateParameter.Value.Value; 270 this.memory_size = MemorySizeParameter.Value.Value; 271 272 this.pop = new double[pop_size][]; 273 this.fitness = new double[pop_size]; 274 this.children = new double[pop_size][]; 275 this.children_fitness = new double[pop_size]; 276 277 this.bsf_solution = new double[problem_size]; 278 this.bsf_fitness = 1e+30; 279 this.archive = new double[arc_size, Problem.ProblemSize.Value]; 280 this.num_arc_inds = 0; 281 282 double[,] populationOld = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 283 double[,] mutationPopulation = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 284 double[,] trialPopulation = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 285 double[] bestPopulation = new double[Problem.ProblemSize.Value]; 286 double[] bestPopulationIteration = new double[Problem.ProblemSize.Value]; 287 double[,] archive = new double[archive_size, Problem.ProblemSize.Value]; 288 289 290 // //for external archive 291 int rand_arc_ind; 292 293 int num_success_params; 294 295 double[] success_sf = new double[PopulationSizeParameter.Value.Value]; 296 double[] success_cr = new double[PopulationSizeParameter.Value.Value]; 297 double[] dif_fitness = new double[PopulationSizeParameter.Value.Value]; 298 double[] fitness = new double[PopulationSizeParameter.Value.Value]; 299 300 // the contents of M_f and M_cr are all initialiezed 0.5 301 double[] memory_sf = new double[MemorySizeParameter.Value.Value]; 302 double[] memory_cr = new double[MemorySizeParameter.Value.Value]; 303 304 for (int i = 0; i < MemorySizeParameter.Value.Value; i++) 305 { 306 memory_sf[i] = 0.5; 307 memory_cr[i] = 0.5; 38 namespace HeuristicLab.Algorithms.Shade { 39 40 [Item("Success-History Based Parameter Adaptation for DE (SHADE)", "A self-adaptive version of differential evolution")] 41 [StorableClass] 42 [Creatable(CreatableAttribute.Categories.PopulationBasedAlgorithms, Priority = 400)] 43 public class Shade : BasicAlgorithm { 44 public Func<IEnumerable<double>, double> Evaluation; 45 46 public override Type ProblemType { 47 get { return typeof(SingleObjectiveTestFunctionProblem); } 48 } 49 public new SingleObjectiveTestFunctionProblem Problem { 50 get { return (SingleObjectiveTestFunctionProblem)base.Problem; } 51 set { base.Problem = value; } 52 } 53 54 private readonly IRandom _random = new MersenneTwister(); 55 private int evals; 56 private int pop_size; 57 private double arc_rate; 58 private int arc_size; 59 private double p_best_rate; 60 private int memory_size; 61 62 private double[][] pop; 63 private double[] fitness; 64 private double[][] children; 65 private double[] children_fitness; 66 67 private double[] bsf_solution; 68 private double bsf_fitness = 1e+30; 69 private double[,] archive; 70 private int num_arc_inds = 0; 71 72 #region ParameterNames 73 private const string MaximumEvaluationsParameterName = "Maximum Evaluations"; 74 private const string SeedParameterName = "Seed"; 75 private const string SetSeedRandomlyParameterName = "SetSeedRandomly"; 76 private const string CrossoverProbabilityParameterName = "CrossoverProbability"; 77 private const string PopulationSizeParameterName = "PopulationSize"; 78 private const string ScalingFactorParameterName = "ScalingFactor"; 79 private const string ValueToReachParameterName = "ValueToReach"; 80 private const string ArchiveRateParameterName = "ArchiveRate"; 81 private const string MemorySizeParameterName = "MemorySize"; 82 private const string BestRateParameterName = "BestRate"; 83 #endregion 84 85 #region ParameterProperties 86 public IFixedValueParameter<IntValue> MaximumEvaluationsParameter { 87 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluationsParameterName]; } 88 } 89 public IFixedValueParameter<IntValue> SeedParameter { 90 get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; } 91 } 92 public FixedValueParameter<BoolValue> SetSeedRandomlyParameter { 93 get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; } 94 } 95 private ValueParameter<IntValue> PopulationSizeParameter { 96 get { return (ValueParameter<IntValue>)Parameters[PopulationSizeParameterName]; } 97 } 98 public ValueParameter<DoubleValue> CrossoverProbabilityParameter { 99 get { return (ValueParameter<DoubleValue>)Parameters[CrossoverProbabilityParameterName]; } 100 } 101 public ValueParameter<DoubleValue> ScalingFactorParameter { 102 get { return (ValueParameter<DoubleValue>)Parameters[ScalingFactorParameterName]; } 103 } 104 public ValueParameter<DoubleValue> ValueToReachParameter { 105 get { return (ValueParameter<DoubleValue>)Parameters[ValueToReachParameterName]; } 106 } 107 public ValueParameter<DoubleValue> ArchiveRateParameter { 108 get { return (ValueParameter<DoubleValue>)Parameters[ArchiveRateParameterName]; } 109 } 110 public ValueParameter<IntValue> MemorySizeParameter { 111 get { return (ValueParameter<IntValue>)Parameters[MemorySizeParameterName]; } 112 } 113 public ValueParameter<DoubleValue> BestRateParameter { 114 get { return (ValueParameter<DoubleValue>)Parameters[BestRateParameterName]; } 115 } 116 #endregion 117 118 #region Properties 119 public int MaximumEvaluations { 120 get { return MaximumEvaluationsParameter.Value.Value; } 121 set { MaximumEvaluationsParameter.Value.Value = value; } 122 } 123 124 public Double CrossoverProbability { 125 get { return CrossoverProbabilityParameter.Value.Value; } 126 set { CrossoverProbabilityParameter.Value.Value = value; } 127 } 128 public Double ScalingFactor { 129 get { return ScalingFactorParameter.Value.Value; } 130 set { ScalingFactorParameter.Value.Value = value; } 131 } 132 public int Seed { 133 get { return SeedParameter.Value.Value; } 134 set { SeedParameter.Value.Value = value; } 135 } 136 public bool SetSeedRandomly { 137 get { return SetSeedRandomlyParameter.Value.Value; } 138 set { SetSeedRandomlyParameter.Value.Value = value; } 139 } 140 public IntValue PopulationSize { 141 get { return PopulationSizeParameter.Value; } 142 set { PopulationSizeParameter.Value = value; } 143 } 144 public Double ValueToReach { 145 get { return ValueToReachParameter.Value.Value; } 146 set { ValueToReachParameter.Value.Value = value; } 147 } 148 public Double ArchiveRate { 149 get { return ArchiveRateParameter.Value.Value; } 150 set { ArchiveRateParameter.Value.Value = value; } 151 } 152 public IntValue MemorySize { 153 get { return MemorySizeParameter.Value; } 154 set { MemorySizeParameter.Value = value; } 155 } 156 public Double BestRate { 157 get { return BestRateParameter.Value.Value; } 158 set { BestRateParameter.Value.Value = value; } 159 } 160 #endregion 161 162 #region ResultsProperties 163 private double ResultsBestQuality { 164 get { return ((DoubleValue)Results["Best Quality"].Value).Value; } 165 set { ((DoubleValue)Results["Best Quality"].Value).Value = value; } 166 } 167 168 private double VTRBestQuality { 169 get { return ((DoubleValue)Results["VTR"].Value).Value; } 170 set { ((DoubleValue)Results["VTR"].Value).Value = value; } 171 } 172 173 private RealVector ResultsBestSolution { 174 get { return (RealVector)Results["Best Solution"].Value; } 175 set { Results["Best Solution"].Value = value; } 176 } 177 178 private int ResultsEvaluations { 179 get { return ((IntValue)Results["Evaluations"].Value).Value; } 180 set { ((IntValue)Results["Evaluations"].Value).Value = value; } 181 } 182 private int ResultsIterations { 183 get { return ((IntValue)Results["Iterations"].Value).Value; } 184 set { ((IntValue)Results["Iterations"].Value).Value = value; } 185 } 186 187 private DataTable ResultsQualities { 188 get { return ((DataTable)Results["Qualities"].Value); } 189 } 190 private DataRow ResultsQualitiesBest { 191 get { return ResultsQualities.Rows["Best Quality"]; } 192 } 193 194 #endregion 195 196 [StorableConstructor] 197 protected Shade(bool deserializing) : base(deserializing) { } 198 199 protected Shade(Shade original, Cloner cloner) 200 : base(original, cloner) { 201 } 202 203 public override IDeepCloneable Clone(Cloner cloner) { 204 return new Shade(this, cloner); 205 } 206 207 public Shade() { 208 Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluationsParameterName, "", new IntValue(Int32.MaxValue))); 209 Parameters.Add(new ValueParameter<IntValue>(PopulationSizeParameterName, "The size of the population of solutions.", new IntValue(75))); 210 Parameters.Add(new ValueParameter<DoubleValue>(ValueToReachParameterName, "Value to reach (VTR) parameter", new DoubleValue(0.00000001))); 211 Parameters.Add(new ValueParameter<DoubleValue>(ArchiveRateParameterName, "Archive rate parameter", new DoubleValue(2.0))); 212 Parameters.Add(new ValueParameter<IntValue>(MemorySizeParameterName, "Memory size parameter", new IntValue(0))); 213 Parameters.Add(new ValueParameter<DoubleValue>(BestRateParameterName, "Best rate parameter", new DoubleValue(0.1))); 214 } 215 216 protected override void Run(CancellationToken cancellationToken) { 217 218 // Set up the results display 219 Results.Add(new Result("Iterations", new IntValue(0))); 220 Results.Add(new Result("Evaluations", new IntValue(0))); 221 Results.Add(new Result("Best Solution", new RealVector())); 222 Results.Add(new Result("Best Quality", new DoubleValue(double.NaN))); 223 Results.Add(new Result("VTR", new DoubleValue(double.NaN))); 224 var table = new DataTable("Qualities"); 225 table.Rows.Add(new DataRow("Best Quality")); 226 Results.Add(new Result("Qualities", table)); 227 228 229 this.evals = 0; 230 int archive_size = (int)Math.Round(ArchiveRateParameter.Value.Value * PopulationSize.Value); 231 int problem_size = Problem.ProblemSize.Value; 232 233 int pop_size = PopulationSizeParameter.Value.Value; 234 this.arc_rate = ArchiveRateParameter.Value.Value; 235 this.arc_size = (int)Math.Round(this.arc_rate * pop_size); 236 this.p_best_rate = BestRateParameter.Value.Value; 237 this.memory_size = MemorySizeParameter.Value.Value; 238 239 this.pop = new double[pop_size][]; 240 this.fitness = new double[pop_size]; 241 this.children = new double[pop_size][]; 242 this.children_fitness = new double[pop_size]; 243 244 this.bsf_solution = new double[problem_size]; 245 this.bsf_fitness = 1e+30; 246 this.archive = new double[arc_size, Problem.ProblemSize.Value]; 247 this.num_arc_inds = 0; 248 249 double[,] populationOld = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 250 double[,] mutationPopulation = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 251 double[,] trialPopulation = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 252 double[] bestPopulation = new double[Problem.ProblemSize.Value]; 253 double[] bestPopulationIteration = new double[Problem.ProblemSize.Value]; 254 double[,] archive = new double[archive_size, Problem.ProblemSize.Value]; 255 256 257 // //for external archive 258 int rand_arc_ind; 259 260 int num_success_params; 261 262 double[] success_sf = new double[PopulationSizeParameter.Value.Value]; 263 double[] success_cr = new double[PopulationSizeParameter.Value.Value]; 264 double[] dif_fitness = new double[PopulationSizeParameter.Value.Value]; 265 double[] fitness = new double[PopulationSizeParameter.Value.Value]; 266 267 // the contents of M_f and M_cr are all initialiezed 0.5 268 double[] memory_sf = new double[MemorySizeParameter.Value.Value]; 269 double[] memory_cr = new double[MemorySizeParameter.Value.Value]; 270 271 for(int i = 0; i < MemorySizeParameter.Value.Value; i++) { 272 memory_sf[i] = 0.5; 273 memory_cr[i] = 0.5; 274 } 275 276 //memory index counter 277 int memory_pos = 0; 278 double temp_sum_sf1, temp_sum_sf2, temp_sum_cr1, temp_sum_cr2, temp_sum, temp_weight; 279 280 //for new parameters sampling 281 double mu_sf, mu_cr; 282 int rand_mem_index; 283 284 double[] pop_sf = new double[PopulationSizeParameter.Value.Value]; 285 double[] pop_cr = new double[PopulationSizeParameter.Value.Value]; 286 287 //for current-to-pbest/1 288 int p_best_ind; 289 double m = PopulationSizeParameter.Value.Value * BestRateParameter.Value.Value; 290 int p_num = (int)Math.Round(m); 291 int[] sorted_array = new int[PopulationSizeParameter.Value.Value]; 292 double[] sorted_fitness = new double[PopulationSizeParameter.Value.Value]; 293 294 //initialize the population 295 populationOld = makeNewIndividuals(); 296 297 //evaluate the best member after the intialiazation 298 //the idea is to select first member and after that to check the others members from the population 299 300 int best_index = 0; 301 double[] populationRow = new double[Problem.ProblemSize.Value]; 302 bestPopulation = getMatrixRow(populationOld, best_index); 303 RealVector bestPopulationVector = new RealVector(bestPopulation); 304 double bestPopulationValue = Obj(bestPopulationVector); 305 fitness[best_index] = bestPopulationValue; 306 RealVector selectionVector; 307 RealVector trialVector; 308 double qtrial; 309 310 311 for(var i = 0; i < PopulationSizeParameter.Value.Value; i++) { 312 populationRow = getMatrixRow(populationOld, i); 313 trialVector = new RealVector(populationRow); 314 315 qtrial = Obj(trialVector); 316 fitness[i] = qtrial; 317 318 if(qtrial > bestPopulationValue) { 319 bestPopulationVector = new RealVector(populationRow); 320 bestPopulationValue = qtrial; 321 best_index = i; 322 } 323 } 324 325 int iterations = 1; 326 327 // Loop until iteration limit reached or canceled. 328 // todo replace with a function 329 // && bestPopulationValue > Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value 330 while(ResultsEvaluations < MaximumEvaluations 331 && !cancellationToken.IsCancellationRequested && 332 bestPopulationValue > Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value) { 333 for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) sorted_array[i] = i; 334 for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) sorted_fitness[i] = fitness[i]; 335 336 Quicksort(sorted_fitness, 0, PopulationSizeParameter.Value.Value - 1, sorted_array); 337 338 for(int target = 0; target < PopulationSizeParameter.Value.Value; target++) { 339 rand_mem_index = (int)(_random.NextDouble() * MemorySizeParameter.Value.Value); 340 mu_sf = memory_sf[rand_mem_index]; 341 mu_cr = memory_cr[rand_mem_index]; 342 343 //generate CR_i and repair its value 344 if(mu_cr == -1) { 345 pop_cr[target] = 0; 346 } else { 347 pop_cr[target] = gauss(mu_cr, 0.1); 348 if(pop_cr[target] > 1) pop_cr[target] = 1; 349 else if(pop_cr[target] < 0) pop_cr[target] = 0; 350 } 351 352 //generate F_i and repair its value 353 do { 354 pop_sf[target] = cauchy_g(mu_sf, 0.1); 355 } while(pop_sf[target] <= 0); 356 357 if(pop_sf[target] > 1) pop_sf[target] = 1; 358 359 //p-best individual is randomly selected from the top pop_size * p_i members 360 p_best_ind = sorted_array[(int)(_random.NextDouble() * p_num)]; 361 362 trialPopulation = operateCurrentToPBest1BinWithArchive(populationOld, trialPopulation, target, p_best_ind, pop_sf[target], pop_cr[target]); 363 } 364 365 for(int i = 0; i < pop_size; i++) { 366 trialVector = new RealVector(getMatrixRow(trialPopulation, i)); 367 children_fitness[i] = Obj(trialVector); 368 } 369 370 //update bfs solution 371 for(var i = 0; i < PopulationSizeParameter.Value.Value; i++) { 372 populationRow = getMatrixRow(populationOld, i); 373 qtrial = fitness[i]; 374 375 if(qtrial > bestPopulationValue) { 376 bestPopulationVector = new RealVector(populationRow); 377 bestPopulationValue = qtrial; 378 best_index = i; 379 } 380 } 381 382 num_success_params = 0; 383 384 //generation alternation 385 for(int i = 0; i < pop_size; i++) { 386 if(children_fitness[i] == fitness[i]) { 387 fitness[i] = children_fitness[i]; 388 for(int j = 0; j < problem_size; j++) populationOld[i, j] = trialPopulation[i, j]; 389 } else if(children_fitness[i] < fitness[i]) { 390 //parent vectors x_i which were worse than the trial vectors u_i are preserved 391 if(arc_size > 1) { 392 if(num_arc_inds < arc_size) { 393 for(int j = 0; j < problem_size; j++) this.archive[num_arc_inds, j] = populationOld[i, j]; 394 num_arc_inds++; 395 396 } 397 //Whenever the size of the archive exceeds, randomly selected elements are deleted to make space for the newly inserted elements 398 else { 399 rand_arc_ind = (int)(_random.NextDouble() * arc_size); 400 for(int j = 0; j < problem_size; j++) this.archive[rand_arc_ind, j] = populationOld[i, j]; 401 } 308 402 } 309 403 310 //memory index counter 311 int memory_pos = 0; 312 double temp_sum_sf1, temp_sum_sf2, temp_sum_cr1, temp_sum_cr2, temp_sum, temp_weight; 313 314 //for new parameters sampling 315 double mu_sf, mu_cr; 316 int rand_mem_index; 317 318 double[] pop_sf = new double[PopulationSizeParameter.Value.Value]; 319 double[] pop_cr = new double[PopulationSizeParameter.Value.Value]; 320 321 //for current-to-pbest/1 322 int p_best_ind; 323 double m = PopulationSizeParameter.Value.Value * BestRateParameter.Value.Value; 324 int p_num = (int)Math.Round(m); 325 int[] sorted_array = new int[PopulationSizeParameter.Value.Value]; 326 double[] sorted_fitness = new double[PopulationSizeParameter.Value.Value]; 327 328 //initialize the population 329 populationOld = makeNewIndividuals(); 330 331 //evaluate the best member after the intialiazation 332 //the idea is to select first member and after that to check the others members from the population 333 334 int best_index = 0; 335 double[] populationRow = new double[Problem.ProblemSize.Value]; 336 bestPopulation = getMatrixRow(populationOld, best_index); 337 RealVector bestPopulationVector = new RealVector(bestPopulation); 338 double bestPopulationValue = Obj(bestPopulationVector); 339 fitness[best_index] = bestPopulationValue; 340 RealVector selectionVector; 341 RealVector trialVector; 342 double qtrial; 343 344 345 for (var i = 0; i < PopulationSizeParameter.Value.Value; i++) 346 { 347 populationRow = getMatrixRow(populationOld, i); 348 trialVector = new RealVector(populationRow); 349 350 qtrial = Obj(trialVector); 351 fitness[i] = qtrial; 352 353 if (qtrial > bestPopulationValue) 354 { 355 bestPopulationVector = new RealVector(populationRow); 356 bestPopulationValue = qtrial; 357 best_index = i; 358 } 359 } 360 361 int iterations = 1; 362 363 // Loop until iteration limit reached or canceled. 364 // todo replace with a function 365 // && bestPopulationValue > Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value 366 while (ResultsEvaluations < MaximumEvaluations 367 && !cancellationToken.IsCancellationRequested && 368 bestPopulationValue > Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value) 369 { 370 for (int i = 0; i < PopulationSizeParameter.Value.Value; i++) sorted_array[i] = i; 371 for (int i = 0; i < PopulationSizeParameter.Value.Value; i++) sorted_fitness[i] = fitness[i]; 372 373 Quicksort(sorted_fitness, 0, PopulationSizeParameter.Value.Value - 1, sorted_array); 374 375 for (int target = 0; target < PopulationSizeParameter.Value.Value; target++) 376 { 377 rand_mem_index = (int)(_random.NextDouble() * MemorySizeParameter.Value.Value); 378 mu_sf = memory_sf[rand_mem_index]; 379 mu_cr = memory_cr[rand_mem_index]; 380 381 //generate CR_i and repair its value 382 if (mu_cr == -1) 383 { 384 pop_cr[target] = 0; 385 } 386 else { 387 pop_cr[target] = gauss(mu_cr, 0.1); 388 if (pop_cr[target] > 1) pop_cr[target] = 1; 389 else if (pop_cr[target] < 0) pop_cr[target] = 0; 390 } 391 392 //generate F_i and repair its value 393 do { 394 pop_sf[target] = cauchy_g(mu_sf, 0.1); 395 } while (pop_sf[target] <= 0); 396 397 if (pop_sf[target] > 1) pop_sf[target] = 1; 398 399 //p-best individual is randomly selected from the top pop_size * p_i members 400 p_best_ind = sorted_array[(int)(_random.NextDouble() * p_num)]; 401 402 trialPopulation = operateCurrentToPBest1BinWithArchive(populationOld, trialPopulation, target, p_best_ind, pop_sf[target], pop_cr[target]); 403 } 404 405 for (int i = 0; i < pop_size; i++) { 406 trialVector = new RealVector(getMatrixRow(trialPopulation, i)); 407 children_fitness[i] = Obj(trialVector); 408 } 409 410 //update bfs solution 411 for (var i = 0; i < PopulationSizeParameter.Value.Value; i++) 412 { 413 populationRow = getMatrixRow(populationOld, i); 414 qtrial = fitness[i]; 415 416 if (qtrial > bestPopulationValue) 417 { 418 bestPopulationVector = new RealVector(populationRow); 419 bestPopulationValue = qtrial; 420 best_index = i; 421 } 422 } 423 424 num_success_params = 0; 425 426 //generation alternation 427 for (int i = 0; i < pop_size; i++) 428 { 429 if (children_fitness[i] == fitness[i]) 430 { 431 fitness[i] = children_fitness[i]; 432 for (int j = 0; j < problem_size; j++) populationOld[i,j] = trialPopulation[i,j]; 433 } 434 else if (children_fitness[i] < fitness[i]) 435 { 436 //parent vectors x_i which were worse than the trial vectors u_i are preserved 437 if (arc_size > 1) 438 { 439 if (num_arc_inds < arc_size) 440 { 441 for (int j = 0; j < problem_size; j++) this.archive[num_arc_inds, j] = populationOld[i, j]; 442 num_arc_inds++; 443 444 } 445 //Whenever the size of the archive exceeds, randomly selected elements are deleted to make space for the newly inserted elements 446 else { 447 rand_arc_ind = (int)(_random.NextDouble() * arc_size); 448 for (int j = 0; j < problem_size; j++) this.archive[rand_arc_ind, j] = populationOld[i, j]; 449 } 450 } 451 452 dif_fitness[num_success_params] = Math.Abs(fitness[i] - children_fitness[i]); 453 454 fitness[i] = children_fitness[i]; 455 for (int j = 0; j < problem_size; j++) populationOld[i, j] = trialPopulation[i, j]; 456 457 //successful parameters are preserved in S_F and S_CR 458 success_sf[num_success_params] = pop_sf[i]; 459 success_cr[num_success_params] = pop_cr[i]; 460 num_success_params++; 461 } 462 } 463 464 if (num_success_params > 0) 465 { 466 temp_sum_sf1 = 0; 467 temp_sum_sf2 = 0; 468 temp_sum_cr1 = 0; 469 temp_sum_cr2 = 0; 470 temp_sum = 0; 471 temp_weight = 0; 472 473 for (int i = 0; i < num_success_params; i++) temp_sum += dif_fitness[i]; 474 475 //weighted lehmer mean 476 for (int i = 0; i < num_success_params; i++) 477 { 478 temp_weight = dif_fitness[i] / temp_sum; 479 480 temp_sum_sf1 += temp_weight * success_sf[i] * success_sf[i]; 481 temp_sum_sf2 += temp_weight * success_sf[i]; 482 483 temp_sum_cr1 += temp_weight * success_cr[i] * success_cr[i]; 484 temp_sum_cr2 += temp_weight * success_cr[i]; 485 } 486 487 memory_sf[memory_pos] = temp_sum_sf1 / temp_sum_sf2; 488 489 if (temp_sum_cr2 == 0 || memory_cr[memory_pos] == -1) 490 { 491 memory_cr[memory_pos] = -1; 492 } else { 493 memory_cr[memory_pos] = temp_sum_cr1 / temp_sum_cr2; 494 } 495 496 //increment the counter 497 memory_pos++; 498 if (memory_pos >= memory_size) memory_pos = 0; 499 } 500 501 //update the best candidate 502 for (int i = 0; i < PopulationSizeParameter.Value.Value; i++) 503 { 504 selectionVector = new RealVector(getMatrixRow(populationOld, i)); 505 var quality = fitness[i]; 506 if (quality < bestPopulationValue) 507 { 508 bestPopulationVector = (RealVector)selectionVector.Clone(); 509 bestPopulationValue = quality; 510 } 511 } 512 513 iterations = iterations + 1; 514 515 //update the results 516 ResultsEvaluations = evals; 517 ResultsIterations = iterations; 518 ResultsBestSolution = bestPopulationVector; 519 ResultsBestQuality = bestPopulationValue; 520 521 //update the results in view 522 if (iterations % 10 == 0) ResultsQualitiesBest.Values.Add(bestPopulationValue); 523 if (bestPopulationValue < Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value) 524 { 525 VTRBestQuality = bestPopulationValue; 526 } 527 } 528 } 529 530 //evaluate the vector 531 public double Obj(RealVector x) 532 { 533 evals = evals + 1; 534 if (Problem.Maximization.Value) 535 return -Problem.Evaluator.Evaluate(x); 536 537 return Problem.Evaluator.Evaluate(x); 538 } 539 540 // Get ith row from the matrix 541 public double[] getMatrixRow(double[,] Mat, int i) 542 { 543 double[] tmp = new double[Mat.GetUpperBound(1) + 1]; 544 545 for (int j = 0; j <= Mat.GetUpperBound(1); j++) 546 { 547 tmp[j] = Mat[i, j]; 548 } 549 550 return tmp; 551 } 552 553 /* 554 Return random value from Cauchy distribution with mean "mu" and variance "gamma" 555 http://www.sat.t.u-tokyo.ac.jp/~omi/random_variables_generation.html#Cauchy 556 */ 557 private double cauchy_g(double mu, double gamma) 558 { 559 return mu + gamma * Math.Tan(Math.PI * (_random.NextDouble() - 0.5)); 560 } 561 562 /* 563 Return random value from normal distribution with mean "mu" and variance "gamma" 564 http://www.sat.t.u-tokyo.ac.jp/~omi/random_variables_generation.html#Gauss 565 */ 566 private double gauss(double mu, double sigma) 567 { 568 return mu + sigma * Math.Sqrt(-2.0 * Math.Log(_random.NextDouble())) * Math.Sin(2.0 * Math.PI * _random.NextDouble()); 569 } 570 571 private double[,] makeNewIndividuals() { 572 //problem variables 573 var dim = Problem.ProblemSize.Value; 574 var lb = Problem.Bounds[0, 0]; 575 var ub = Problem.Bounds[0, 1]; 576 var range = ub - lb; 577 double[,] population = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 578 579 //create initial population 580 //population is a matrix of size PopulationSize*ProblemSize 581 for (int i = 0; i < PopulationSizeParameter.Value.Value; i++) 582 { 583 for (int j = 0; j < Problem.ProblemSize.Value; j++) 584 { 585 population[i, j] = _random.NextDouble() * range + lb; 586 } 587 } 588 return population; 589 } 590 591 private static void Quicksort(double[] elements, int left, int right, int[] index) 592 { 593 int i = left, j = right; 594 double pivot = elements[(left + right) / 2]; 595 double tmp_var = 0; 596 int tmp_index = 0; 597 598 while (i <= j) 599 { 600 while (elements[i].CompareTo(pivot) < 0) 601 { 602 i++; 603 } 604 605 while (elements[j].CompareTo(pivot) > 0) 606 { 607 j--; 608 } 609 610 if (i <= j) 611 { 612 // Swap 613 tmp_var = elements[i]; 614 elements[i] = elements[j]; 615 elements[j] = tmp_var; 616 617 tmp_index = index[i]; 618 index[i] = index[j]; 619 index[j] = tmp_index; 620 621 i++; 622 j--; 623 } 624 } 625 626 // Recursive calls 627 if (left < j) 628 { 629 Quicksort(elements, left, j, index); 630 } 631 632 if (i < right) 633 { 634 Quicksort(elements, i, right, index); 635 } 636 } 637 638 // current to best selection scheme with archive 639 // analyze how the archive is implemented 640 private double[,] operateCurrentToPBest1BinWithArchive(double[,] pop, double[,]children, int target, int p_best_individual, double scaling_factor, double cross_rate) 641 { 642 int r1, r2; 643 int num_arc_inds = 0; 644 var lb = Problem.Bounds[0, 0]; 645 var ub = Problem.Bounds[0, 1]; 646 647 do 648 { 649 r1 = (int)(_random.NextDouble() * PopulationSizeParameter.Value.Value); 650 } while (r1 == target); 651 do 652 { 653 r2 = (int)(_random.NextDouble() * (PopulationSizeParameter.Value.Value + num_arc_inds)); 654 } while ((r2 == target) || (r2 == r1)); 655 656 int random_variable = (int)(_random.NextDouble() * Problem.ProblemSize.Value); 657 658 if (r2 >= PopulationSizeParameter.Value.Value) 659 { 660 r2 -= PopulationSizeParameter.Value.Value; 661 for (int i = 0; i < Problem.ProblemSize.Value; i++) 662 { 663 if ((_random.NextDouble() < cross_rate) || (i == random_variable)) children[target, i] = pop[target, i] + scaling_factor * (pop[p_best_individual, i] - pop[target, i]) + scaling_factor * (pop[r1, i] - archive[r2, i]); 664 else children[target, i] = pop[target, i]; 665 } 666 } 667 else { 668 for (int i = 0; i < Problem.ProblemSize.Value; i++) 669 { 670 if ((_random.NextDouble() < cross_rate) || (i == random_variable)) children[target, i] = pop[target, i] + scaling_factor * (pop[p_best_individual, i] - pop[target, i]) + scaling_factor * (pop[r1, i] - pop[r2, i]); 671 else children[target, i] = pop[target, i]; 672 } 673 } 674 675 for (int i = 0; i < Problem.ProblemSize.Value; i++) { 676 if (children[target, i] < lb) children[target, i] = (lb + pop[target, i]) / 2.0; 677 else if (children[target, i] > ub) children[target, i] = (ub + pop[target, i]) / 2.0; 678 } 679 680 return children; 681 } 682 } 404 dif_fitness[num_success_params] = Math.Abs(fitness[i] - children_fitness[i]); 405 406 fitness[i] = children_fitness[i]; 407 for(int j = 0; j < problem_size; j++) populationOld[i, j] = trialPopulation[i, j]; 408 409 //successful parameters are preserved in S_F and S_CR 410 success_sf[num_success_params] = pop_sf[i]; 411 success_cr[num_success_params] = pop_cr[i]; 412 num_success_params++; 413 } 414 } 415 416 if(num_success_params > 0) { 417 temp_sum_sf1 = 0; 418 temp_sum_sf2 = 0; 419 temp_sum_cr1 = 0; 420 temp_sum_cr2 = 0; 421 temp_sum = 0; 422 temp_weight = 0; 423 424 for(int i = 0; i < num_success_params; i++) temp_sum += dif_fitness[i]; 425 426 //weighted lehmer mean 427 for(int i = 0; i < num_success_params; i++) { 428 temp_weight = dif_fitness[i] / temp_sum; 429 430 temp_sum_sf1 += temp_weight * success_sf[i] * success_sf[i]; 431 temp_sum_sf2 += temp_weight * success_sf[i]; 432 433 temp_sum_cr1 += temp_weight * success_cr[i] * success_cr[i]; 434 temp_sum_cr2 += temp_weight * success_cr[i]; 435 } 436 437 memory_sf[memory_pos] = temp_sum_sf1 / temp_sum_sf2; 438 439 if(temp_sum_cr2 == 0 || memory_cr[memory_pos] == -1) { 440 memory_cr[memory_pos] = -1; 441 } else { 442 memory_cr[memory_pos] = temp_sum_cr1 / temp_sum_cr2; 443 } 444 445 //increment the counter 446 memory_pos++; 447 if(memory_pos >= memory_size) memory_pos = 0; 448 } 449 450 //update the best candidate 451 for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) { 452 selectionVector = new RealVector(getMatrixRow(populationOld, i)); 453 var quality = fitness[i]; 454 if(quality < bestPopulationValue) { 455 bestPopulationVector = (RealVector)selectionVector.Clone(); 456 bestPopulationValue = quality; 457 } 458 } 459 460 iterations = iterations + 1; 461 462 //update the results 463 ResultsEvaluations = evals; 464 ResultsIterations = iterations; 465 ResultsBestSolution = bestPopulationVector; 466 ResultsBestQuality = bestPopulationValue; 467 468 //update the results in view 469 if(iterations % 10 == 0) ResultsQualitiesBest.Values.Add(bestPopulationValue); 470 if(bestPopulationValue < Problem.BestKnownQuality.Value + ValueToReachParameter.Value.Value) { 471 VTRBestQuality = bestPopulationValue; 472 } 473 } 474 } 475 476 public override bool SupportsPause { get { return false; } } // TODO (can we pause?) 477 478 //evaluate the vector 479 public double Obj(RealVector x) { 480 evals = evals + 1; 481 if(Problem.Maximization.Value) 482 return -Problem.Evaluator.Evaluate(x); 483 484 return Problem.Evaluator.Evaluate(x); 485 } 486 487 // Get ith row from the matrix 488 public double[] getMatrixRow(double[,] Mat, int i) { 489 double[] tmp = new double[Mat.GetUpperBound(1) + 1]; 490 491 for(int j = 0; j <= Mat.GetUpperBound(1); j++) { 492 tmp[j] = Mat[i, j]; 493 } 494 495 return tmp; 496 } 497 498 /* 499 Return random value from Cauchy distribution with mean "mu" and variance "gamma" 500 http://www.sat.t.u-tokyo.ac.jp/~omi/random_variables_generation.html#Cauchy 501 */ 502 private double cauchy_g(double mu, double gamma) { 503 return mu + gamma * Math.Tan(Math.PI * (_random.NextDouble() - 0.5)); 504 } 505 506 /* 507 Return random value from normal distribution with mean "mu" and variance "gamma" 508 http://www.sat.t.u-tokyo.ac.jp/~omi/random_variables_generation.html#Gauss 509 */ 510 private double gauss(double mu, double sigma) { 511 return mu + sigma * Math.Sqrt(-2.0 * Math.Log(_random.NextDouble())) * Math.Sin(2.0 * Math.PI * _random.NextDouble()); 512 } 513 514 private double[,] makeNewIndividuals() { 515 //problem variables 516 var dim = Problem.ProblemSize.Value; 517 var lb = Problem.Bounds[0, 0]; 518 var ub = Problem.Bounds[0, 1]; 519 var range = ub - lb; 520 double[,] population = new double[PopulationSizeParameter.Value.Value, Problem.ProblemSize.Value]; 521 522 //create initial population 523 //population is a matrix of size PopulationSize*ProblemSize 524 for(int i = 0; i < PopulationSizeParameter.Value.Value; i++) { 525 for(int j = 0; j < Problem.ProblemSize.Value; j++) { 526 population[i, j] = _random.NextDouble() * range + lb; 527 } 528 } 529 return population; 530 } 531 532 private static void Quicksort(double[] elements, int left, int right, int[] index) { 533 int i = left, j = right; 534 double pivot = elements[(left + right) / 2]; 535 double tmp_var = 0; 536 int tmp_index = 0; 537 538 while(i <= j) { 539 while(elements[i].CompareTo(pivot) < 0) { 540 i++; 541 } 542 543 while(elements[j].CompareTo(pivot) > 0) { 544 j--; 545 } 546 547 if(i <= j) { 548 // Swap 549 tmp_var = elements[i]; 550 elements[i] = elements[j]; 551 elements[j] = tmp_var; 552 553 tmp_index = index[i]; 554 index[i] = index[j]; 555 index[j] = tmp_index; 556 557 i++; 558 j--; 559 } 560 } 561 562 // Recursive calls 563 if(left < j) { 564 Quicksort(elements, left, j, index); 565 } 566 567 if(i < right) { 568 Quicksort(elements, i, right, index); 569 } 570 } 571 572 // current to best selection scheme with archive 573 // analyze how the archive is implemented 574 private double[,] operateCurrentToPBest1BinWithArchive(double[,] pop, double[,] children, int target, int p_best_individual, double scaling_factor, double cross_rate) { 575 int r1, r2; 576 int num_arc_inds = 0; 577 var lb = Problem.Bounds[0, 0]; 578 var ub = Problem.Bounds[0, 1]; 579 580 do { 581 r1 = (int)(_random.NextDouble() * PopulationSizeParameter.Value.Value); 582 } while(r1 == target); 583 do { 584 r2 = (int)(_random.NextDouble() * (PopulationSizeParameter.Value.Value + num_arc_inds)); 585 } while((r2 == target) || (r2 == r1)); 586 587 int random_variable = (int)(_random.NextDouble() * Problem.ProblemSize.Value); 588 589 if(r2 >= PopulationSizeParameter.Value.Value) { 590 r2 -= PopulationSizeParameter.Value.Value; 591 for(int i = 0; i < Problem.ProblemSize.Value; i++) { 592 if((_random.NextDouble() < cross_rate) || (i == random_variable)) children[target, i] = pop[target, i] + scaling_factor * (pop[p_best_individual, i] - pop[target, i]) + scaling_factor * (pop[r1, i] - archive[r2, i]); 593 else children[target, i] = pop[target, i]; 594 } 595 } else { 596 for(int i = 0; i < Problem.ProblemSize.Value; i++) { 597 if((_random.NextDouble() < cross_rate) || (i == random_variable)) children[target, i] = pop[target, i] + scaling_factor * (pop[p_best_individual, i] - pop[target, i]) + scaling_factor * (pop[r1, i] - pop[r2, i]); 598 else children[target, i] = pop[target, i]; 599 } 600 } 601 602 for(int i = 0; i < Problem.ProblemSize.Value; i++) { 603 if(children[target, i] < lb) children[target, i] = (lb + pop[target, i]) / 2.0; 604 else if(children[target, i] > ub) children[target, i] = (ub + pop[target, i]) / 2.0; 605 } 606 607 return children; 608 } 609 } 683 610 }
Note: See TracChangeset
for help on using the changeset viewer.