Changeset 14404 for branches/MOCMAEvolutionStrategy
- Timestamp:
- 11/21/16 15:52:53 (8 years ago)
- Location:
- branches
- Files:
-
- 7 added
- 6 deleted
- 8 edited
- 2 copied
- 1 moved
Legend:
- Unmodified
- Added
- Removed
-
branches
-
Property
svn:global-ignores
set to
bin
-
Property
svn:global-ignores
set to
-
branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/CrowdingIndicator.cs
r14269 r14404 20 20 #endregion 21 21 22 using HeuristicLab.Data;23 22 using System; 23 using System.Collections.Generic; 24 24 using System.Linq; 25 using HeuristicLab.Common; 26 using HeuristicLab.Core; 25 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 26 using HeuristicLab.Core;27 using HeuristicLab.Common;28 28 using HeuristicLab.Problems.TestFunctions.MultiObjective; 29 29 … … 31 31 [Item("CrowdingIndicator", "Selection of Offspring based on CrowdingDistance")] 32 32 [StorableClass] 33 class CrowdingIndicator : Item, IIndicator {33 internal class CrowdingIndicator : Item, IIndicator { 34 34 35 public int LeastContributer<TR>(IEnumerable<TR> front, Func<TR, double[]> extractor, MultiObjectiveTestFunctionProblem problem) { 36 var bounds = problem.Bounds; 37 var extracted = front.Select(extractor.Invoke).ToArray(); 38 var pointsums = new double[extracted.Length]; 35 39 36 public int LeastContributer<R>(R[] front, Func<R, double[]> extractor, MultiObjectiveTestFunctionProblem problem) { 37 DoubleMatrix bounds = problem.Bounds; 38 int mindex = 1; 39 double min = Double.MaxValue; 40 double[] pointsums = new double[front.Count()]; 41 42 for (int dim = 0; dim < problem.Objectives; dim++) { 43 double[] arr = front.Select(x => extractor.Invoke(x)[dim]).ToArray(); 40 for (var dim = 0; dim < problem.Objectives; dim++) { 41 var arr = extracted.Select(x => x[dim]).ToArray(); 44 42 Array.Sort(arr); 45 double fmax = problem.Bounds[dim % bounds.Rows, 1]; 46 double fmin = bounds[dim % bounds.Rows, 0]; 47 int pointIdx = 0; 48 foreach (R point in front) { 49 double d = 0; 50 int pos = Array.BinarySearch(arr, extractor.Invoke(point)[dim]); 51 52 d = pos != 0 && pos != arr.Count() - 1 ? (arr[pos + 1] - arr[pos - 1]) / (fmax - fmin) : Double.PositiveInfinity; 43 var fmax = problem.Bounds[dim % bounds.Rows, 1]; 44 var fmin = bounds[dim % bounds.Rows, 0]; 45 var pointIdx = 0; 46 foreach (var point in extracted) { 47 var pos = Array.BinarySearch(arr, point[dim]); 48 var d = pos != 0 && pos != arr.Length - 1 ? (arr[pos + 1] - arr[pos - 1]) / (fmax - fmin) : double.PositiveInfinity; 53 49 pointsums[pointIdx] += d; 54 55 56 50 pointIdx++; 57 51 } 58 52 } 59 53 //find min 60 for (int pointIdx = 0; pointIdx < pointsums.Length; pointIdx++) { 61 double d = pointsums[pointIdx]; 62 if (!Double.IsInfinity(d) && min > d) { 63 mindex = pointIdx; 64 min = d; 65 } 66 } 67 68 return mindex; 69 } 70 71 72 73 public override IDeepCloneable Clone(Cloner cloner) { 74 return new CrowdingIndicator(this, cloner); 54 return pointsums.Where(x => !double.IsInfinity(x)).ArgMin(); 75 55 } 76 56 77 57 [StorableConstructor] 78 58 protected CrowdingIndicator(bool deserializing) : base(deserializing) { } 79 protected CrowdingIndicator(CrowdingIndicator original, Cloner cloner) 80 : base(original, cloner) { 81 } 59 protected CrowdingIndicator(CrowdingIndicator original, Cloner cloner) : base(original, cloner) { } 60 public override IDeepCloneable Clone(Cloner cloner) { return new CrowdingIndicator(this, cloner); } 82 61 public CrowdingIndicator() { } 83 62 } -
branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/HypervolumeIndicator.cs
r14269 r14404 26 26 using HeuristicLab.Common; 27 27 using HeuristicLab.Core; 28 using HeuristicLab.Data;29 28 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 30 29 using HeuristicLab.Problems.TestFunctions.MultiObjective; … … 32 31 [Item("HypervolumeIndicator", "Selection of Offspring based on contributing Hypervolume")] 33 32 [StorableClass] 34 class HypervolumeIndicator : Item, IIndicator {33 internal class HypervolumeIndicator : Item, IIndicator { 35 34 36 public int LeastContributer<R>(R[] front, Func<R, double[]> extractor, MultiObjectiveTestFunctionProblem problem) { 37 DoubleMatrix bounds = problem.Bounds; 38 int mindex = 1; 39 double min = Double.MaxValue; 40 List<double[]> frontArr = front.Select(x => extractor.Invoke(x)).ToList<double[]>(); 41 double[] refPoint = buildReference(frontArr, problem.Maximization); 42 refPoint = buildReference(refPoint, problem.TestFunction.ReferencePoint(problem.Objectives), problem.Maximization); 43 double hv = Hypervolume.Calculate(frontArr, refPoint, problem.Maximization); 44 for (int i = 0; i < frontArr.Count; i++) { 45 var d = Contribution(frontArr, i, problem.Maximization, refPoint, hv); 46 if (min > d) { 47 mindex = i; 48 min = d; 49 } 50 } 51 52 return mindex; 35 public int LeastContributer<TR>(IEnumerable<TR> front, Func<TR, double[]> extractor, MultiObjectiveTestFunctionProblem problem) { 36 var frontArr = front.Select(extractor.Invoke).ToList(); 37 var refPoint = BuildReference(frontArr.Concat(new[] { problem.TestFunction.ReferencePoint(problem.Objectives) }), problem.Maximization); 38 var hv = Hypervolume.Calculate(frontArr, refPoint, problem.Maximization); 39 return Enumerable.Range(0, frontArr.Count).Select(i => Contribution(frontArr, i, problem.Maximization, refPoint, hv)).ArgMin(); 53 40 } 54 41 55 private double Contribution(List<double[]> front, int idx, bool[] maximization, double[] refPoint, double hv) {42 private static double Contribution(IList<double[]> front, int idx, bool[] maximization, double[] refPoint, double hv) { 56 43 var point = front[idx]; 57 44 front.RemoveAt(idx); … … 60 47 return contribution; 61 48 } 62 63 private double[] buildReference(IEnumerable<double[]> front, bool[] maximization) { 64 double[] refPoint = new double[maximization.Length]; 65 foreach (double[] point in front) { 66 for (int i = 0; i < maximization.Length; i++) { 49 private static double[] BuildReference(IEnumerable<double[]> front, IReadOnlyList<bool> maximization) { 50 var refPoint = new double[maximization.Count]; 51 foreach (var point in front) 52 for (var i = 0; i < maximization.Count; i++) 67 53 refPoint[i] = maximization[i] ? Math.Min(refPoint[i], point[i]) : Math.Max(refPoint[i], point[i]); 68 }69 }70 54 return refPoint; 71 }72 private double[] buildReference(double[] point1, double[] point2, bool[] maximization) {73 double[] refPoint = new double[maximization.Length];74 75 for (int i = 0; i < maximization.Length; i++) {76 refPoint[i] = maximization[i] ? Math.Min(point2[i], point1[i]) : Math.Max(point2[i], point1[i]);77 }78 79 return refPoint;80 }81 82 public override IDeepCloneable Clone(Cloner cloner) {83 return new HypervolumeIndicator(this, cloner);84 55 } 85 56 86 57 [StorableConstructor] 87 58 protected HypervolumeIndicator(bool deserializing) : base(deserializing) { } 88 protected HypervolumeIndicator(HypervolumeIndicator original, Cloner cloner) 89 : base(original, cloner) { 90 } 59 protected HypervolumeIndicator(HypervolumeIndicator original, Cloner cloner) : base(original, cloner) { } 60 public override IDeepCloneable Clone(Cloner cloner) { return new HypervolumeIndicator(this, cloner); } 91 61 public HypervolumeIndicator() { } 92 62 } -
branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/IIndicator.cs
r14269 r14404 20 20 #endregion 21 21 using System; 22 using System.Collections.Generic; 22 23 using HeuristicLab.Core; 23 24 using HeuristicLab.Problems.TestFunctions.MultiObjective; … … 29 30 /// A particularly good point may not be as benefical in another front. 30 31 /// </summary> 31 /// <typeparam name=" R"></typeparam>32 /// <typeparam name="TR"></typeparam> 32 33 /// <param name="front">a front which will be evaluated</param> 33 34 /// <param name="extractor">some mapping function to map any R to double[]</param> 34 35 /// <param name="problem">The problem on which the front is evaluated (!! The function itself will NOT be evluated only bounds referencePoints & other metadate will be used</param> 35 36 /// <returns>the index of the least contributing point according to any type of quality criteria</returns> 36 int LeastContributer< R>(R[] front, Func<R, double[]> extractor, MultiObjectiveTestFunctionProblem problem);37 int LeastContributer<TR>(IEnumerable<TR> front, Func<TR, double[]> extractor, MultiObjectiveTestFunctionProblem problem); 37 38 } 38 39 } -
branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/MOCMASEvolutionStrategy.cs
r14269 r14404 23 23 using System; 24 24 using System.Collections.Generic; 25 using System.Linq; 25 26 using System.Threading; 27 using HeuristicLab.Algorithms.CMAEvolutionStrategy; 26 28 using HeuristicLab.Analysis; 27 29 using HeuristicLab.Common; … … 32 34 using HeuristicLab.Parameters; 33 35 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 36 using HeuristicLab.Problems.TestFunctions.MultiObjective; 34 37 using HeuristicLab.Random; 35 using System.Linq;36 using HeuristicLab.Problems.TestFunctions.MultiObjective;37 using HeuristicLab.Algorithms.MOCMAEvolutionStrategy;38 38 39 39 namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy { … … 42 42 [StorableClass] 43 43 public class MOCMASEvolutionStrategy : BasicAlgorithm { 44 public override Type ProblemType { 44 public override Type ProblemType 45 { 45 46 get { return typeof(MultiObjectiveTestFunctionProblem); } 46 47 } 47 public new MultiObjectiveTestFunctionProblem Problem { 48 public new MultiObjectiveTestFunctionProblem Problem 49 { 48 50 get { return (MultiObjectiveTestFunctionProblem)base.Problem; } 49 51 set { base.Problem = value; } 50 52 } 53 #region internal variables 54 private readonly IRandom random = new MersenneTwister(); 55 private NormalDistributedRandom gauss; 56 private MOCMAESIndividual[] solutions; 57 private MOCMAESParameters internals; 58 #endregion 51 59 52 60 #region ParameterNames … … 76 84 #endregion 77 85 78 #region internal variables79 80 private readonly IRandom random = new MersenneTwister();81 private NormalDistributedRandom gauss;82 private MOCMAESIndividual[] solutions;83 private const double penalizeFactor = 1e-6;84 85 //TODO OptionalValueParameter86 private double stepSizeLearningRate; //=cp learning rate in [0,1]87 private double stepSizeDampeningFactor; //d88 private double targetSuccessProbability;// p^target_succ89 private double evolutionPathLearningRate;//cc90 private double covarianceMatrixLearningRate;//ccov91 private double covarianceMatrixUnlearningRate; //from shark92 private double successThreshold; //ptresh93 #endregion94 95 86 #region ParameterProperties 96 public IFixedValueParameter<IntValue> MaximumRuntimeParameter { 87 public IFixedValueParameter<IntValue> MaximumRuntimeParameter 88 { 97 89 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumRuntimeName]; } 98 90 } 99 public IFixedValueParameter<IntValue> SeedParameter { 91 public IFixedValueParameter<IntValue> SeedParameter 92 { 100 93 get { return (IFixedValueParameter<IntValue>)Parameters[SeedName]; } 101 94 } 102 public FixedValueParameter<BoolValue> SetSeedRandomlyParameter { 95 public FixedValueParameter<BoolValue> SetSeedRandomlyParameter 96 { 103 97 get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyName]; } 104 98 } 105 private IFixedValueParameter<IntValue> PopulationSizeParameter { 99 private IFixedValueParameter<IntValue> PopulationSizeParameter 100 { 106 101 get { return (IFixedValueParameter<IntValue>)Parameters[PopulationSizeName]; } 107 102 } 108 private IFixedValueParameter<IntValue> MaximumGenerationsParameter { 103 private IFixedValueParameter<IntValue> MaximumGenerationsParameter 104 { 109 105 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumGenerationsName]; } 110 106 } 111 private IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter { 107 private IFixedValueParameter<IntValue> MaximumEvaluatedSolutionsParameter 108 { 112 109 get { return (IFixedValueParameter<IntValue>)Parameters[MaximumEvaluatedSolutionsName]; } 113 110 } 114 private IFixedValueParameter<DoubleValue> InitialSigmaParameter { 111 private IFixedValueParameter<DoubleValue> InitialSigmaParameter 112 { 115 113 get { return (IFixedValueParameter<DoubleValue>)Parameters[InitialSigmaName]; } 116 114 } 117 private IConstrainedValueParameter<IIndicator> IndicatorParameter { 115 private IConstrainedValueParameter<IIndicator> IndicatorParameter 116 { 118 117 get { return (IConstrainedValueParameter<IIndicator>)Parameters[IndicatorName]; } 119 118 } … … 121 120 122 121 #region Properties 123 public int MaximumRuntime { 122 public int MaximumRuntime 123 { 124 124 get { return MaximumRuntimeParameter.Value.Value; } 125 125 set { MaximumRuntimeParameter.Value.Value = value; } 126 126 } 127 public int Seed { 127 public int Seed 128 { 128 129 get { return SeedParameter.Value.Value; } 129 130 set { SeedParameter.Value.Value = value; } 130 131 } 131 public bool SetSeedRandomly { 132 public bool SetSeedRandomly 133 { 132 134 get { return SetSeedRandomlyParameter.Value.Value; } 133 135 set { SetSeedRandomlyParameter.Value.Value = value; } 134 136 } 135 public int PopulationSize { 137 public int PopulationSize 138 { 136 139 get { return PopulationSizeParameter.Value.Value; } 137 140 set { PopulationSizeParameter.Value.Value = value; } 138 141 } 139 public int MaximumGenerations { 142 public int MaximumGenerations 143 { 140 144 get { return MaximumGenerationsParameter.Value.Value; } 141 145 set { MaximumGenerationsParameter.Value.Value = value; } 142 146 } 143 public int MaximumEvaluatedSolutions { 147 public int MaximumEvaluatedSolutions 148 { 144 149 get { return MaximumEvaluatedSolutionsParameter.Value.Value; } 145 150 set { MaximumEvaluatedSolutionsParameter.Value.Value = value; } 146 151 } 147 public double InitialSigma { 152 public double InitialSigma 153 { 148 154 get { return InitialSigmaParameter.Value.Value; } 149 155 set { InitialSigmaParameter.Value.Value = value; } 150 156 } 151 public IIndicator Indicator { 157 public IIndicator Indicator 158 { 152 159 get { return IndicatorParameter.Value; } 153 160 set { IndicatorParameter.Value = value; } … … 156 163 157 164 #endregion 165 158 166 #region ResultsProperties 159 private int ResultsEvaluations { 167 private int ResultsEvaluations 168 { 160 169 get { return ((IntValue)Results[EvaluationsResultName].Value).Value; } 161 170 set { ((IntValue)Results[EvaluationsResultName].Value).Value = value; } 162 171 } 163 private int ResultsIterations { 172 private int ResultsIterations 173 { 164 174 get { return ((IntValue)Results[IterationsResultName].Value).Value; } 165 175 set { ((IntValue)Results[IterationsResultName].Value).Value = value; } 166 176 } 167 177 #region Datatable 168 private DataTable ResultsQualities { 178 private DataTable ResultsQualities 179 { 169 180 get { return (DataTable)Results[TimetableResultName].Value; } 170 181 } 171 private DataRow ResultsBestHypervolumeDataLine { 182 private DataRow ResultsBestHypervolumeDataLine 183 { 172 184 get { return ResultsQualities.Rows[BestHypervolumeResultName]; } 173 185 } 174 private DataRow ResultsHypervolumeDataLine { 186 private DataRow ResultsHypervolumeDataLine 187 { 175 188 get { return ResultsQualities.Rows[HypervolumeResultName]; } 176 189 } 177 private DataRow ResultsGenerationalDistanceDataLine { 190 private DataRow ResultsGenerationalDistanceDataLine 191 { 178 192 get { return ResultsQualities.Rows[GenerationalDistanceResultName]; } 179 193 } 180 private DataRow ResultsInvertedGenerationalDistanceDataLine { 194 private DataRow ResultsInvertedGenerationalDistanceDataLine 195 { 181 196 get { return ResultsQualities.Rows[InvertedGenerationalDistanceResultName]; } 182 197 } 183 private DataRow ResultsCrowdingDataLine { 198 private DataRow ResultsCrowdingDataLine 199 { 184 200 get { return ResultsQualities.Rows[CrowdingResultName]; } 185 201 } 186 private DataRow ResultsSpacingDataLine { 202 private DataRow ResultsSpacingDataLine 203 { 187 204 get { return ResultsQualities.Rows[SpacingResultName]; } 188 205 } 189 private DataRow ResultsHypervolumeDifferenceDataLine { 206 private DataRow ResultsHypervolumeDifferenceDataLine 207 { 190 208 get { return ResultsQualities.Rows[DifferenceToBestKnownHypervolumeResultName]; } 191 209 } 192 210 #endregion 193 211 //QualityIndicators 194 private double ResultsHypervolume { 212 private double ResultsHypervolume 213 { 195 214 get { return ((DoubleValue)Results[HypervolumeResultName].Value).Value; } 196 215 set { ((DoubleValue)Results[HypervolumeResultName].Value).Value = value; } 197 216 } 198 private double ResultsGenerationalDistance { 217 private double ResultsGenerationalDistance 218 { 199 219 get { return ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value; } 200 220 set { ((DoubleValue)Results[GenerationalDistanceResultName].Value).Value = value; } 201 221 } 202 private double ResultsInvertedGenerationalDistance { 222 private double ResultsInvertedGenerationalDistance 223 { 203 224 get { return ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value; } 204 225 set { ((DoubleValue)Results[InvertedGenerationalDistanceResultName].Value).Value = value; } 205 226 } 206 private double ResultsCrowding { 227 private double ResultsCrowding 228 { 207 229 get { return ((DoubleValue)Results[CrowdingResultName].Value).Value; } 208 230 set { ((DoubleValue)Results[CrowdingResultName].Value).Value = value; } 209 231 } 210 private double ResultsSpacing { 232 private double ResultsSpacing 233 { 211 234 get { return ((DoubleValue)Results[SpacingResultName].Value).Value; } 212 235 set { ((DoubleValue)Results[SpacingResultName].Value).Value = value; } 213 236 } 214 private double ResultsBestHypervolume { 237 private double ResultsBestHypervolume 238 { 215 239 get { return ((DoubleValue)Results[BestHypervolumeResultName].Value).Value; } 216 240 set { ((DoubleValue)Results[BestHypervolumeResultName].Value).Value = value; } 217 241 } 218 private double ResultsBestKnownHypervolume { 242 private double ResultsBestKnownHypervolume 243 { 219 244 get { return ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value; } 220 245 set { ((DoubleValue)Results[BestKnownHypervolumeResultName].Value).Value = value; } 221 246 } 222 private double ResultsDifferenceBestKnownHypervolume { 247 private double ResultsDifferenceBestKnownHypervolume 248 { 223 249 get { return ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value; } 224 250 set { ((DoubleValue)Results[DifferenceToBestKnownHypervolumeResultName].Value).Value = value; } … … 226 252 } 227 253 //Solutions 228 private DoubleMatrix ResultsSolutions { 254 private DoubleMatrix ResultsSolutions 255 { 229 256 get { return ((DoubleMatrix)Results[SolutionsResultName].Value); } 230 257 set { Results[SolutionsResultName].Value = value; } 231 258 } 232 private ScatterPlotContent ResultsScatterPlot { 259 private ScatterPlotContent ResultsScatterPlot 260 { 233 261 get { return ((ScatterPlotContent)Results[ScatterPlotResultName].Value); } 234 262 set { Results[ScatterPlotResultName].Value = value; } … … 236 264 #endregion 237 265 238 #region constructors and hlBoilerPlate-code 239 [StorableConstructor] 240 protected MOCMASEvolutionStrategy(bool deserializing) : base(deserializing) { } 241 242 protected MOCMASEvolutionStrategy(MOCMASEvolutionStrategy original, Cloner cloner) 243 : base(original, cloner) { 244 } 245 246 public override IDeepCloneable Clone(Cloner cloner) { 247 return new MOCMASEvolutionStrategy(this, cloner); 248 } 249 266 #region Constructors 250 267 public MOCMASEvolutionStrategy() { 251 268 Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(3600))); … … 256 273 Parameters.Add(new FixedValueParameter<IntValue>(MaximumGenerationsName, "The maximum number of generations which should be processed.", new IntValue(1000))); 257 274 Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluatedSolutionsName, "The maximum number of evaluated solutions that should be computed.", new IntValue(int.MaxValue))); 258 259 ItemSet<IIndicator> set = new ItemSet<IIndicator>(); 260 var default_ = new HypervolumeIndicator(); 261 set.Add(default_); 262 set.Add(new CrowdingIndicator()); 263 Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, default_)); 264 } 265 #endregion 266 267 private void AddResult<T>(String name, String desc, T defaultValue) where T : class, IItem { 268 Results.Add(new Result(name, desc, defaultValue)); 269 } 270 271 #region updates 272 private void UpdatePopulation(MOCMAESIndividual[] parents) { 273 int[] offspringSucess = new int[solutions.Length]; 274 int offspringLength = parents.Length - solutions.Length; 275 276 for (int i = 0; i < offspringLength; i++) { 277 if (parents[i + solutions.Length].selected) { 278 UpdateAsOffspring(parents[i + solutions.Length]); 279 280 //TODO this may change if more offspring per parent is allowed 281 offspringSucess[i] += MOCMAESIndividual.success; 282 } 283 } 284 for (int i = 0; i < solutions.Length; i++) { 285 if (parents[i].selected) { 286 UpdateAsParent(parents[i], offspringSucess[i]); 287 } 288 } 289 290 solutions = new MOCMAESIndividual[solutions.Length]; 291 int j = 0; 292 foreach (MOCMAESIndividual ind in parents) { 293 if (ind.selected) { 294 solutions[j++] = ind; 295 } 296 } 297 298 } 299 300 private void UpdateAsParent(MOCMAESIndividual c, int v) { 301 c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate * (v == MOCMAESIndividual.success ? 1 : 0); 302 c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability)); 303 if (v != MOCMAESIndividual.failure) return; 304 if (c.successProbability < successThreshold) { 305 double stepNormSqr = c.GetSetpNormSqr(); 306 double rate = covarianceMatrixUnlearningRate; 307 if (stepNormSqr > 1 && 1 < covarianceMatrixUnlearningRate * (2 * stepNormSqr - 1)) { 308 rate = 1 / (2 * stepNormSqr - 1); 309 } 310 RankOneUpdate(c, 1 + rate, -rate, c.lastStep); 311 312 } else { 313 RoundUpdate(c); 314 } 315 316 } 317 318 private void UpdateAsOffspring(MOCMAESIndividual c) { 319 c.successProbability = (1 - stepSizeLearningRate) * c.successProbability + stepSizeLearningRate; 320 c.sigma *= Math.Exp(1 / stepSizeDampeningFactor * (c.successProbability - targetSuccessProbability) / (1 - targetSuccessProbability)); 321 double evolutionpathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate); 322 if (c.successProbability < successThreshold) { 323 c.UpdateEvolutionPath(1 - evolutionPathLearningRate, evolutionpathUpdateWeight); 324 RankOneUpdate(c, 1 - covarianceMatrixLearningRate, covarianceMatrixLearningRate, c.evolutionPath); 325 } else { 326 RoundUpdate(c); 327 } 328 } 329 330 private void RankOneUpdate(MOCMAESIndividual c, double v1, double v2, RealVector lastStep) { 331 c.CholeskyUpdate(lastStep, v1, v2); 332 } 333 334 private void RoundUpdate(MOCMAESIndividual c) { 335 double evolutionPathUpdateWeight = evolutionPathLearningRate * (2.0 - evolutionPathLearningRate); 336 c.UpdateEvolutionPath(1 - evolutionPathLearningRate, 0); 337 RankOneUpdate(c, 1 - covarianceMatrixLearningRate + evolutionPathUpdateWeight, 338 covarianceMatrixLearningRate, c.evolutionPath); 339 } 340 #endregion 341 #region selection 342 343 private void Selection(MOCMAESIndividual[] parents, int length) { 344 //perform a nondominated sort to assign the rank to every element 345 var fronts = NonDominatedSort(parents); 346 347 //deselect the highest rank fronts until we would end up with less or equal mu elements 348 int rank = fronts.Count - 1; 349 int popSize = parents.Length; 350 while (popSize - fronts[rank].Count >= length) { 351 var front = fronts[rank]; 352 foreach (var i in front) i.selected = false; 353 popSize -= front.Count; 354 rank--; 355 } 356 357 //now use the indicator to deselect the worst approximating elements of the last selected front 358 359 var front_ = fronts[rank]; 360 for (; popSize > length; popSize--) { 361 int lc = Indicator.LeastContributer<MOCMAESIndividual>(front_.ToArray(), x => x.penalizedFitness, Problem); 362 front_[lc].selected = false; 363 front_.Swap(lc, front_.Count - 1); 364 front_.RemoveAt(front_.Count - 1); 365 } 366 } 367 #endregion 368 #region penalize Box-Constraints 369 private void PenalizeEvaluate(IEnumerable<MOCMAESIndividual> offspring) { 370 foreach (MOCMAESIndividual child in offspring) PenalizeEvaluate(child); 371 } 372 373 private void PenalizeEvaluate(MOCMAESIndividual offspring) { 374 if (IsFeasable(offspring.x)) { 375 offspring.fitness = Evaluate(offspring.x); 376 offspring.penalizedFitness = offspring.fitness; 377 } else { 378 RealVector t = ClosestFeasible(offspring.x); 379 offspring.fitness = Evaluate(t); 380 offspring.penalizedFitness = Penalize(offspring.x, t, offspring.fitness); 381 } 382 } 383 384 private double[] Evaluate(RealVector x) { 385 double[] res = Problem.Evaluate(x); 386 ResultsEvaluations++; 387 return res; 388 } 389 390 private double[] Penalize(RealVector x, RealVector t, double[] fitness) { 391 double penalty = Penalize(x, t); 392 return fitness.Select(v => v + penalty).ToArray(); 393 } 394 395 private double Penalize(RealVector x, RealVector t) { 396 double sum = 0; 397 for (int i = 0; i < x.Length; i++) { 398 double d = x[i] - t[i]; 399 sum += d * d; 400 } 401 return sum; 402 } 403 404 private RealVector ClosestFeasible(RealVector x) { 405 DoubleMatrix bounds = Problem.Bounds; 406 RealVector r = new RealVector(x.Length); 407 408 for (int i = 0; i < x.Length; i++) { 409 int dim = i % bounds.Rows; 410 r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]); 411 } 412 return r; 413 } 414 415 private bool IsFeasable(RealVector offspring) { 416 DoubleMatrix bounds = Problem.Bounds; 417 418 for (int i = 0; i < offspring.Length; i++) { 419 int dim = i % bounds.Rows; 420 if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false; 421 } 422 return true; 423 } 424 #endregion 425 426 #region mutation 427 private MOCMAESIndividual[] GenerateOffspring() { 428 MOCMAESIndividual[] offspring = new MOCMAESIndividual[PopulationSize]; 429 for (int i = 0; i < PopulationSize; i++) { 430 offspring[i] = new MOCMAESIndividual(solutions[i]); 431 offspring[i].Mutate(gauss); 432 } 433 return offspring; 434 } 435 #endregion 436 437 #region initialization 275 var set = new ItemSet<IIndicator> { new HypervolumeIndicator(), new CrowdingIndicator() }; 276 Parameters.Add(new ConstrainedValueParameter<IIndicator>(IndicatorName, "The selection mechanism on non-dominated solutions", set, set.First())); 277 } 278 279 [StorableConstructor] 280 protected MOCMASEvolutionStrategy(bool deserializing) : base(deserializing) { } 281 protected MOCMASEvolutionStrategy(MOCMASEvolutionStrategy original, Cloner cloner) : base(original, cloner) { } 282 public override IDeepCloneable Clone(Cloner cloner) { return new MOCMASEvolutionStrategy(this, cloner); } 283 #endregion 284 285 #region Mainloop 286 protected override void Run(CancellationToken cancellationToken) { 287 // Set up the algorithm 288 if (SetSeedRandomly) Seed = new System.Random().Next(); 289 random.Reset(Seed); 290 gauss = new NormalDistributedRandom(random, 0, 1); 291 292 InitResults(); 293 InitStrategy(); 294 InitSolutions(); 295 AnalyzeSolutions(); 296 297 // Loop until iteration limit reached or canceled. 298 for (ResultsIterations = 1; ResultsIterations < MaximumGenerations; ResultsIterations++) { 299 try { 300 Iterate(); 301 cancellationToken.ThrowIfCancellationRequested(); 302 } 303 finally { 304 AnalyzeSolutions(); 305 } 306 } 307 } 308 private void Iterate() { 309 var offspring = MutateOffspring(); 310 PenalizeEvaluate(offspring); 311 var parents = solutions.Concat(offspring).ToArray(); 312 SelectParents(parents, solutions.Length); 313 UpdatePopulation(parents); 314 } 315 protected override void OnExecutionTimeChanged() { 316 base.OnExecutionTimeChanged(); 317 if (CancellationTokenSource == null) return; 318 if (MaximumRuntime == -1) return; 319 if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel(); 320 } 321 #endregion 322 323 #region Initialization 438 324 private MOCMAESIndividual InitializeIndividual(RealVector x) { 439 325 var zeros = new RealVector(x.Length); 440 326 var identity = new double[x.Length, x.Length]; 441 for (int i = 0; i < x.Length; i++) { 442 identity[i, i] = 1; 443 } 444 return new MOCMAESIndividual(x, targetSuccessProbability, InitialSigma, zeros, identity); 445 } 446 327 for (var i = 0; i < x.Length; i++) identity[i, i] = 1; 328 return new MOCMAESIndividual(x, internals.TargetSuccessProbability, InitialSigma, zeros, identity, internals); 329 } 447 330 private void InitSolutions() { 448 331 solutions = new MOCMAESIndividual[PopulationSize]; 449 for ( inti = 0; i < PopulationSize; i++) {450 RealVector x = new RealVector(Problem.ProblemSize); // Uniform distibution in all dimesions assumed.451 332 for (var i = 0; i < PopulationSize; i++) { 333 var x = new RealVector(Problem.ProblemSize); // Uniform distibution in all dimensions assumed. 334 // There is the UniformSolutionCreater associated with the Encoding, but it was considered not usable here 452 335 var bounds = Problem.Bounds; 453 for ( int j = 0; j < Problem.Objectives; j++) {454 intdim = j % bounds.Rows;336 for (var j = 0; j < Problem.ProblemSize; j++) { 337 var dim = j % bounds.Rows; 455 338 x[j] = random.NextDouble() * (bounds[dim, 1] - bounds[dim, 0]) + bounds[dim, 0]; 456 339 } … … 459 342 PenalizeEvaluate(solutions); 460 343 } 461 462 344 private void InitStrategy() { 463 int lambda = 1;345 const int lambda = 1; 464 346 double n = Problem.ProblemSize; 465 gauss = new NormalDistributedRandom(random, 0, 1); 466 targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0);467 stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda);468 stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda);469 evolutionPathLearningRate = 2.0 / (n + 2.0);470 covarianceMatrixLearningRate = 2.0 / (n * n + 6.0);471 covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1);472 successThreshold = 0.44;473 474 } 475 347 348 var targetSuccessProbability = 1.0 / (5.0 + Math.Sqrt(lambda) / 2.0); 349 var stepSizeDampeningFactor = 1.0 + n / (2.0 * lambda); 350 var stepSizeLearningRate = targetSuccessProbability * lambda / (2.0 + targetSuccessProbability * lambda); 351 var evolutionPathLearningRate = 2.0 / (n + 2.0); 352 var covarianceMatrixLearningRate = 2.0 / (n * n + 6.0); 353 var covarianceMatrixUnlearningRate = 0.4 / (Math.Pow(n, 1.6) + 1); 354 var successThreshold = 0.44; 355 internals = new MOCMAESParameters(stepSizeLearningRate, stepSizeDampeningFactor, targetSuccessProbability, evolutionPathLearningRate, covarianceMatrixLearningRate, covarianceMatrixUnlearningRate, successThreshold); 356 357 } 476 358 private void InitResults() { 477 359 AddResult(IterationsResultName, "The number of gererations evaluated", new IntValue(0)); … … 479 361 AddResult(HypervolumeResultName, "The hypervolume of the current front considering the Referencepoint defined in the Problem", new DoubleValue(0.0)); 480 362 AddResult(BestHypervolumeResultName, "The best hypervolume of the current run considering the Referencepoint defined in the Problem", new DoubleValue(0.0)); 481 AddResult(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue( Double.NaN));482 AddResult(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue( Double.NaN));483 AddResult(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue( Double.NaN));484 AddResult(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue( Double.NaN));363 AddResult(BestKnownHypervolumeResultName, "The best knwon hypervolume considering the Referencepoint defined in the Problem", new DoubleValue(double.NaN)); 364 AddResult(DifferenceToBestKnownHypervolumeResultName, "The difference between the current and the best known hypervolume", new DoubleValue(double.NaN)); 365 AddResult(GenerationalDistanceResultName, "The generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)); 366 AddResult(InvertedGenerationalDistanceResultName, "The inverted generational distance to an optimal pareto front defined in the Problem", new DoubleValue(double.NaN)); 485 367 AddResult(CrowdingResultName, "The average crowding value for the current front (excluding infinities)", new DoubleValue(0.0)); 486 368 AddResult(SpacingResultName, "The spacing for the current front (excluding infinities)", new DoubleValue(0.0)); … … 506 388 #endregion 507 389 508 #region analyze 390 private MOCMAESIndividual[] MutateOffspring() { 391 var offspring = new MOCMAESIndividual[PopulationSize]; 392 for (var i = 0; i < PopulationSize; i++) { 393 offspring[i] = new MOCMAESIndividual(solutions[i]); 394 offspring[i].Mutate(gauss); 395 } 396 return offspring; 397 } 398 399 #region Evaluation 400 private void PenalizeEvaluate(IEnumerable<MOCMAESIndividual> offspring) { 401 foreach (var child in offspring) { 402 if (IsFeasable(child.Mean)) { 403 child.Fitness = Evaluate(child.Mean); 404 child.PenalizedFitness = child.Fitness; 405 } else { 406 var t = ClosestFeasible(child.Mean); 407 child.Fitness = Evaluate(t); 408 child.PenalizedFitness = Penalize(child.Mean, t, child.Fitness); 409 } 410 } 411 } 412 private double[] Evaluate(RealVector x) { 413 var res = Problem.Evaluate(x); 414 ResultsEvaluations++; 415 return res; 416 } 417 private static double[] Penalize(RealVector x, RealVector t, IEnumerable<double> fitness) { 418 var penalty = x.Select((t1, i) => t1 - t[i]).Sum(d => d * d); 419 return fitness.Select(v => v + penalty).ToArray(); 420 } 421 private RealVector ClosestFeasible(RealVector x) { 422 var bounds = Problem.Bounds; 423 var r = new RealVector(x.Length); 424 for (var i = 0; i < x.Length; i++) { 425 var dim = i % bounds.Rows; 426 r[i] = Math.Min(Math.Max(bounds[dim, 0], x[i]), bounds[dim, 1]); 427 } 428 return r; 429 } 430 private bool IsFeasable(RealVector offspring) { 431 var bounds = Problem.Bounds; 432 for (var i = 0; i < offspring.Length; i++) { 433 var dim = i % bounds.Rows; 434 if (bounds[dim, 0] > offspring[i] || offspring[i] > bounds[dim, 1]) return false; 435 } 436 return true; 437 } 438 #endregion 439 440 private void SelectParents(IReadOnlyList<MOCMAESIndividual> parents, int length) { 441 //perform a nondominated sort to assign the rank to every element 442 var fronts = NonDominatedSort(parents); 443 444 //deselect the highest rank fronts until we would end up with less or equal mu elements 445 var rank = fronts.Count - 1; 446 var popSize = parents.Count; 447 while (popSize - fronts[rank].Count >= length) { 448 var front = fronts[rank]; 449 foreach (var i in front) i.Selected = false; 450 popSize -= front.Count; 451 rank--; 452 } 453 454 //now use the indicator to deselect the approximatingly worst elements of the last selected front 455 var front1 = fronts[rank]; 456 for (; popSize > length; popSize--) { 457 var lc = Indicator.LeastContributer(front1.ToArray(), x => x.PenalizedFitness, Problem); 458 front1[lc].Selected = false; 459 front1.Swap(lc, front1.Count - 1); 460 front1.RemoveAt(front1.Count - 1); 461 } 462 } 463 464 private void UpdatePopulation(IReadOnlyList<MOCMAESIndividual> parents) { 465 var offspringSucess = new int[solutions.Length]; 466 var offspringLength = parents.Count - solutions.Length; 467 for (var i = 0; i < offspringLength; i++) { 468 if (!parents[i + solutions.Length].Selected) continue; 469 parents[i + solutions.Length].UpdateAsOffspring(); 470 offspringSucess[i] += MOCMAESIndividual.Success; 471 } 472 for (var i = 0; i < solutions.Length; i++) if (parents[i].Selected) parents[i].UpdateAsParent(offspringSucess[i]); 473 solutions = new MOCMAESIndividual[solutions.Length]; 474 var j = 0; 475 foreach (var ind in parents) if (ind.Selected) solutions[j++] = ind; 476 } 477 478 #region Analysis 509 479 private void AnalyzeSolutions() { 510 //solutions 511 ResultsScatterPlot = new ScatterPlotContent(solutions.Select(x => x.fitness).ToArray<double[]>(), 512 solutions.Select(x => ToArray(x.x)).ToArray<double[]>(), 480 ResultsScatterPlot = new ScatterPlotContent(solutions.Select(x => x.Fitness).ToArray(), 481 solutions.Select(x => x.Mean.ToArray()).ToArray(), 513 482 ResultsScatterPlot.ParetoFront, 514 483 ResultsScatterPlot.Objectives); 515 ResultsSolutions = ToMatrix(solutions.Select(x => ToArray(x.x)));484 ResultsSolutions = ToMatrix(solutions.Select(x => x.Mean.ToArray())); 516 485 AnalyzeQualityIndicators(); 517 518 } 519 486 } 520 487 private void AnalyzeQualityIndicators() { 521 522 //var front = NonDominatedSelect.selectNonDominated(solutions.Select(x => x.fitness), Problem.Maximization, true); 523 var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.fitness), Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, true); 524 //front = NonDominatedSelect.removeNonReferenceDominatingVectors(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, false); 525 var bounds = ToArray(Problem.Bounds); 488 var front = NonDominatedSelect.GetDominatingVectors(solutions.Select(x => x.Fitness), Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization, true).ToArray(); 489 var bounds = Problem.Bounds.CloneAsMatrix(); 526 490 ResultsCrowding = Crowding.Calculate(front, bounds); 527 491 ResultsSpacing = Spacing.Calculate(front); 528 ResultsGenerationalDistance = Problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : Double.NaN;529 530 ResultsInvertedGenerationalDistance = Problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : Double.NaN;492 ResultsGenerationalDistance = Problem.BestKnownFront != null ? GenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : double.NaN; 493 494 ResultsInvertedGenerationalDistance = Problem.BestKnownFront != null ? InvertedGenerationalDistance.Calculate(front, Utilities.ToArray(Problem.BestKnownFront), 1) : double.NaN; 531 495 532 496 ResultsHypervolume = Hypervolume.Calculate(front, Problem.TestFunction.ReferencePoint(Problem.Objectives), Problem.Maximization); 533 497 ResultsBestHypervolume = Math.Max(ResultsHypervolume, ResultsBestHypervolume); 534 ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume; //Take best of this run or current HV???498 ResultsDifferenceBestKnownHypervolume = ResultsBestKnownHypervolume - ResultsBestHypervolume; 535 499 536 500 //Datalines … … 546 510 #endregion 547 511 548 //MU = populationSize 549 #region mainloop 550 protected override void Run(CancellationToken cancellationToken) { 551 // Set up the algorithm 552 if (SetSeedRandomly) Seed = new System.Random().Next(); 553 random.Reset(Seed); 554 555 InitResults(); 556 InitStrategy(); 557 InitSolutions(); 558 559 // Loop until iteration limit reached or canceled. 560 for (ResultsIterations = 1; ResultsIterations < MaximumGenerations; ResultsIterations++) { 561 try { 562 Iterate(); 563 cancellationToken.ThrowIfCancellationRequested(); 564 } 565 finally { 566 AnalyzeSolutions(); 567 } 568 } 569 } 570 571 protected override void OnExecutionTimeChanged() { 572 base.OnExecutionTimeChanged(); 573 if (CancellationTokenSource == null) return; 574 if (MaximumRuntime == -1) return; 575 if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel(); 576 } 577 578 private void Iterate() { 579 MOCMAESIndividual[] offspring = GenerateOffspring(); 580 PenalizeEvaluate(offspring); 581 var parents = Merge(solutions, offspring); 582 Selection(parents, solutions.Length); 583 UpdatePopulation(parents); 584 585 } 586 #endregion 587 588 private class MOCMAESIndividual { 589 public static readonly int success = 1; 590 public static readonly int noSuccess = 2; 591 public static readonly int failure = 3; 592 593 594 //Chromosome 595 public RealVector x; 596 public double successProbability; 597 public double sigma;//stepsize 598 public RealVector evolutionPath; // pc 599 public RealVector lastStep; 600 public RealVector lastZ; 601 private double[,] lowerCholesky; 602 603 604 //Phenotype 605 public double[] fitness; 606 public double[] penalizedFitness; 607 public bool selected = true; 608 609 internal double rank; 610 611 /// <summary> 612 /// 613 /// </summary> 614 /// <param name="x">has to be 0-vector with correct lenght</param> 615 /// <param name="p_succ">has to be ptargetsucc</param> 616 /// <param name="sigma">initialSigma</param> 617 /// <param name="pc">has to be 0-vector with correct lenght</param> 618 /// <param name="C">has to be a symmetric positive definit Covariance matrix</param> 619 public MOCMAESIndividual(RealVector x, double p_succ, double sigma, RealVector pc, double[,] C) { 620 this.x = x; 621 this.successProbability = p_succ; 622 this.sigma = sigma; 623 this.evolutionPath = pc; 624 CholeskyDecomposition(C); 625 } 626 627 private void CholeskyDecomposition(double[,] C) { 628 if (!alglib.spdmatrixcholesky(ref C, C.GetLength(0), false)) { 629 throw new ArgumentException("Covariancematrix is not symmetric positiv definit"); 630 } 631 lowerCholesky = (double[,])C.Clone(); 632 } 633 634 public MOCMAESIndividual(MOCMAESIndividual other) { 635 this.successProbability = other.successProbability; 636 this.sigma = other.sigma; 637 638 this.evolutionPath = (RealVector)other.evolutionPath.Clone(); 639 this.x = (RealVector)other.x.Clone(); 640 641 this.lowerCholesky = (double[,])other.lowerCholesky.Clone(); 642 } 643 644 public void UpdateEvolutionPath(double learningRate, double updateWeight) { 645 updateWeight = Math.Sqrt(updateWeight); 646 for (int i = 0; i < evolutionPath.Length; i++) { 647 evolutionPath[i] *= learningRate; 648 evolutionPath[i] += updateWeight * lastStep[i]; 649 } 650 } 651 652 public double GetSetpNormSqr() { 653 double sum = 0; 654 foreach (double d in lastZ) { 655 sum += d * d; 656 } 657 return sum; 658 } 659 660 public void CholeskyUpdate(RealVector v, double alpha, double beta) { 661 int n = v.Length; 662 double[] temp = new double[n]; 663 for (int i = 0; i < n; i++) temp[i] = v[i]; 664 double betaPrime = 1; 665 double a = Math.Sqrt(alpha); 666 for (int j = 0; j < n; j++) { 667 double Ljj = a * lowerCholesky[j, j]; 668 double dj = Ljj * Ljj; 669 double wj = temp[j]; 670 double swj2 = beta * wj * wj; 671 double gamma = dj * betaPrime + swj2; 672 double x = dj + swj2 / betaPrime; 673 if (x < 0.0) throw new ArgumentException("Update makes Covariancematrix indefinite"); 674 double nLjj = Math.Sqrt(x); 675 lowerCholesky[j, j] = nLjj; 676 betaPrime += swj2 / dj; 677 if (j + 1 < n) { 678 for (int i = j + 1; i < n; i++) { 679 lowerCholesky[i, j] *= a; 680 } 681 for (int i = j + 1; i < n; i++) { 682 temp[i] = wj / Ljj * lowerCholesky[i, j]; 683 } 684 if (gamma == 0) continue; 685 for (int i = j + 1; i < n; i++) { 686 lowerCholesky[i, j] *= nLjj / Ljj; 687 } 688 for (int i = j + 1; i < n; i++) { 689 lowerCholesky[i, j] += (nLjj * beta * wj / gamma) * temp[i]; 690 } 691 } 692 693 } 694 695 } 696 697 public void Mutate(NormalDistributedRandom gauss) { 698 699 //sampling a random z from N(0,I) where I is the Identity matrix; 700 lastZ = new RealVector(x.Length); 701 int n = lastZ.Length; 702 for (int i = 0; i < n; i++) { 703 lastZ[i] = gauss.NextDouble(); 704 } 705 //Matrixmultiplication: lastStep = lowerCholesky * lastZ; 706 lastStep = new RealVector(x.Length); 707 for (int i = 0; i < n; i++) { 708 double sum = 0; 709 for (int j = 0; j <= i; j++) { 710 sum += lowerCholesky[i, j] * lastZ[j]; 711 } 712 lastStep[i] = sum; 713 } 714 715 //add the step to x weighted by stepsize; 716 for (int i = 0; i < x.Length; i++) { 717 x[i] += sigma * lastStep[i]; 718 } 719 720 } 721 722 } 723 512 513 514 #region Helpers 515 public DoubleMatrix ToMatrix(IEnumerable<double[]> data) { 516 var d2 = data.ToArray(); 517 var mat = new DoubleMatrix(d2.Length, d2[0].Length); 518 for (var i = 0; i < mat.Rows; i++) { 519 for (var j = 0; j < mat.Columns; j++) { 520 mat[i, j] = d2[i][j]; 521 } 522 } 523 return mat; 524 } 525 private void AddResult<T>(string name1, string desc, T defaultValue) where T : class, IItem { 526 Results.Add(new Result(name1, desc, defaultValue)); 527 } 724 528 //blatantly stolen form HeuristicLab.Optimization.Operators.FastNonDominatedSort 529 //however: Operators.FastNonDominatedSort does not return ranked fronts => rerank after sorting would not be sensible 725 530 #region FastNonDominatedSort 726 531 private enum DominationResult { Dominates, IsDominated, IsNonDominated }; 727 728 private List<List<MOCMAESIndividual>> NonDominatedSort(MOCMAESIndividual[] individuals) { 729 bool dominateOnEqualQualities = false; 730 bool[] maximization = Problem.Maximization; 532 private List<List<MOCMAESIndividual>> NonDominatedSort(IReadOnlyList<MOCMAESIndividual> individuals) { 533 const bool dominateOnEqualQualities = false; 534 var maximization = Problem.Maximization; 731 535 if (individuals == null) throw new InvalidOperationException(Name + ": No qualities found."); 732 int populationSize = individuals.Length;733 734 List<List<MOCMAESIndividual>>fronts = new List<List<MOCMAESIndividual>>();735 Dictionary<MOCMAESIndividual, List<int>>dominatedScopes = new Dictionary<MOCMAESIndividual, List<int>>();736 int[]dominationCounter = new int[populationSize];737 738 for ( intpI = 0; pI < populationSize - 1; pI++) {739 MOCMAESIndividualp = individuals[pI];536 var populationSize = individuals.Count; 537 538 var fronts = new List<List<MOCMAESIndividual>>(); 539 var dominatedScopes = new Dictionary<MOCMAESIndividual, List<int>>(); 540 var dominationCounter = new int[populationSize]; 541 542 for (var pI = 0; pI < populationSize - 1; pI++) { 543 var p = individuals[pI]; 740 544 List<int> dominatedScopesByp; 741 545 if (!dominatedScopes.TryGetValue(p, out dominatedScopesByp)) 742 546 dominatedScopes[p] = dominatedScopesByp = new List<int>(); 743 for ( intqI = pI + 1; qI < populationSize; qI++) {744 DominationResulttest = Dominates(individuals[pI], individuals[qI], maximization, dominateOnEqualQualities);547 for (var qI = pI + 1; qI < populationSize; qI++) { 548 var test = Dominates(individuals[pI], individuals[qI], maximization, dominateOnEqualQualities); 745 549 if (test == DominationResult.Dominates) { 746 550 dominatedScopesByp.Add(qI); … … 762 566 } 763 567 } 764 inti = 0;568 var i = 0; 765 569 while (i < fronts.Count && fronts[i].Count > 0) { 766 List<MOCMAESIndividual>nextFront = new List<MOCMAESIndividual>();767 foreach ( MOCMAESIndividualp in fronts[i]) {570 var nextFront = new List<MOCMAESIndividual>(); 571 foreach (var p in fronts[i]) { 768 572 List<int> dominatedScopesByp; 769 if (dominatedScopes.TryGetValue(p, out dominatedScopesByp)) { 770 for (int k = 0; k < dominatedScopesByp.Count; k++) { 771 int dominatedScope = dominatedScopesByp[k]; 772 dominationCounter[dominatedScope] -= 1; 773 if (dominationCounter[dominatedScope] == 0) { 774 nextFront.Add(individuals[dominatedScope]); 775 } 776 } 573 if (!dominatedScopes.TryGetValue(p, out dominatedScopesByp)) continue; 574 foreach (var dominatedScope in dominatedScopesByp) { 575 dominationCounter[dominatedScope] -= 1; 576 if (dominationCounter[dominatedScope] != 0) continue; 577 nextFront.Add(individuals[dominatedScope]); 777 578 } 778 579 } … … 781 582 } 782 583 783 MOCMAESIndividual[] result = new MOCMAESIndividual[individuals.Length];784 785 584 for (i = 0; i < fronts.Count; i++) { 786 585 foreach (var p in fronts[i]) { 787 p. rank = i;586 p.Rank = i; 788 587 } 789 588 } 790 589 return fronts; 791 590 } 792 793 private static void AddToFront(MOCMAESIndividual p, List<List<MOCMAESIndividual>> fronts, int i) { 591 private static void AddToFront(MOCMAESIndividual p, IList<List<MOCMAESIndividual>> fronts, int i) { 794 592 if (i == fronts.Count) fronts.Add(new List<MOCMAESIndividual>()); 795 593 fronts[i].Add(p); 796 594 } 797 798 595 private static DominationResult Dominates(MOCMAESIndividual left, MOCMAESIndividual right, bool[] maximizations, bool dominateOnEqualQualities) { 799 return Dominates(left.penalizedFitness, right.penalizedFitness, maximizations, dominateOnEqualQualities); 800 } 801 802 private static DominationResult Dominates(double[] left, double[] right, bool[] maximizations, bool dominateOnEqualQualities) { 596 return Dominates(left.PenalizedFitness, right.PenalizedFitness, maximizations, dominateOnEqualQualities); 597 } 598 private static DominationResult Dominates(IReadOnlyList<double> left, IReadOnlyList<double> right, IReadOnlyList<bool> maximizations, bool dominateOnEqualQualities) { 803 599 //mkommend Caution: do not use LINQ.SequenceEqual for comparing the two quality arrays (left and right) due to performance reasons 804 600 if (dominateOnEqualQualities) { 805 601 var equal = true; 806 for ( int i = 0; i < left.Length; i++) {602 for (var i = 0; i < left.Count; i++) { 807 603 if (left[i] != right[i]) { 808 604 equal = false; … … 814 610 815 611 bool leftIsBetter = false, rightIsBetter = false; 816 for ( int i = 0; i < left.Length; i++) {612 for (var i = 0; i < left.Count; i++) { 817 613 if (IsDominated(left[i], right[i], maximizations[i])) rightIsBetter = true; 818 614 else if (IsDominated(right[i], left[i], maximizations[i])) leftIsBetter = true; … … 824 620 return DominationResult.IsNonDominated; 825 621 } 826 827 622 private static bool IsDominated(double left, double right, bool maximization) { 828 623 return maximization && left < right 829 624 || !maximization && left > right; 830 625 } 831 832 #endregion 833 834 #region conversions 835 836 private T[] Merge<T>(T[] parents, T[] offspring) { 837 T[] merged = new T[parents.Length + offspring.Length]; 838 for (int i = 0; i < parents.Length; i++) { 839 merged[i] = parents[i]; 840 } 841 for (int i = 0; i < offspring.Length; i++) { 842 merged[i + parents.Length] = offspring[i]; 843 } 844 return merged; 845 } 846 847 public double[] ToArray(RealVector r) { 848 double[] d = new double[r.Length]; 849 for (int i = 0; i < r.Length; i++) { 850 d[i] = r[i]; 851 } 852 return d; 853 } 854 public DoubleMatrix ToMatrix(IEnumerable<double[]> data) { 855 var d2 = data.ToArray<double[]>(); 856 DoubleMatrix mat = new DoubleMatrix(d2.Length, d2[0].Length); 857 for (int i = 0; i < mat.Rows; i++) { 858 for (int j = 0; j < mat.Columns; j++) { 859 mat[i, j] = d2[i][j]; 860 } 861 } 862 return mat; 863 } 864 public double[,] ToArray(DoubleMatrix data) { 865 866 double[,] mat = new double[data.Rows, data.Columns]; 867 for (int i = 0; i < data.Rows; i++) { 868 for (int j = 0; j < data.Columns; j++) { 869 mat[i, j] = data[i, j]; 870 } 871 } 872 return mat; 873 } 626 #endregion 874 627 #endregion 875 628 } -
branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/Plugin.cs.frame
r14269 r14404 26 26 /// Plugin class for HeuristicLab.Algorithms.EvolutionStrategy plugin. 27 27 /// </summary> 28 [Plugin("HeuristicLab.Algorithms.MOCMAEvolutionStrategy", "3.3. 23.$WCREV$")]28 [Plugin("HeuristicLab.Algorithms.MOCMAEvolutionStrategy", "3.3.14.$WCREV$")] 29 29 [PluginFile("HeuristicLab.Algorithms.MOCMAEvolutionStrategy-3.3.dll", PluginFileType.Assembly)] 30 30 [PluginDependency("HeuristicLab.ALGLIB", "3.7")] … … 42 42 [PluginDependency("HeuristicLab.Persistence", "3.3")] 43 43 [PluginDependency("HeuristicLab.Problems.Instances", "3.3")] 44 [PluginDependency("HeuristicLab.Problems. MultiObjectiveTestFunctions", "3.3")]44 [PluginDependency("HeuristicLab.Problems.TestFunctions.MultiObjective", "3.3")] 45 45 [PluginDependency("HeuristicLab.Random", "3.3")] 46 46 public class HeuristicLabAlgorithmsMOCMAEvolutionStrategyPlugin : PluginBase { -
branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/Properties/AssemblyInfo.cs.frame
r14269 r14404 53 53 // by using the '*' as shown below: 54 54 [assembly: AssemblyVersion("3.3.0.0")] 55 [assembly: AssemblyFileVersion("3.3. 23.$WCREV$")]55 [assembly: AssemblyFileVersion("3.3.14.$WCREV$")] -
branches/MOCMAEvolutionStrategy/HeuristicLab.Algorithms.MOCMAEvolutionStrategy/3.3/Utilities.cs
r14269 r14404 1 using System; 2 using System.Collections.Generic; 3 using System.Linq; 4 using System.Text; 5 using System.Threading.Tasks; 1 using System.Collections.Generic; 6 2 using HeuristicLab.Data; 7 3 8 4 namespace HeuristicLab.Algorithms.MOCMAEvolutionStrategy { 9 internal class Utilities {5 internal static class Utilities { 10 6 internal static double[][] ToArray(DoubleMatrix m) { 11 inti = m.Rows - 1;12 double[][]a = new double[i][];7 var i = m.Rows - 1; 8 var a = new double[i][]; 13 9 for (i--; i >= 0; i--) { 14 intj = m.Columns;10 var j = m.Columns; 15 11 a[i] = new double[j]; 16 12 for (j--; j >= 0; j--) a[i][j] = m[i, j]; … … 19 15 } 20 16 17 public static int ArgMin<T>(IEnumerable<T> values, IComparer<T> comparer) { 18 var mindex = 0; 19 var min = default(T); 20 var i = 0; 21 foreach (var v in values) { 22 if (mindex < 0 || comparer.Compare(v, min) < 0) { 23 min = v; 24 mindex = i; 25 } 26 i++; 27 } 28 return mindex; 29 30 } 31 public static int ArgMax<T>(IEnumerable<T> values, IComparer<T> comparer) { 32 var mindex = 0; 33 var min = default(T); 34 var i = 0; 35 foreach (var v in values) { 36 if (mindex < 0 || comparer.Compare(v, min) > 0) { 37 min = v; 38 mindex = i; 39 } 40 i++; 41 } 42 return mindex; 43 } 44 private class DoubleComparer : IComparer<double> { 45 public int Compare(double x, double y) { 46 return x.CompareTo(y); 47 } 48 } 49 public static int ArgMax(this IEnumerable<double> values) { 50 return ArgMax(values, new DoubleComparer()); 51 } 52 public static int ArgMin(this IEnumerable<double> values) { 53 return ArgMin(values, new DoubleComparer()); 54 } 21 55 } 22 56 }
Note: See TracChangeset
for help on using the changeset viewer.