[11656] | 1 | #region License Information
|
---|
| 2 | /* HeuristicLab
|
---|
[15583] | 3 | * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
[11838] | 4 | * and the BEACON Center for the Study of Evolution in Action.
|
---|
[11656] | 5 | *
|
---|
| 6 | * This file is part of HeuristicLab.
|
---|
| 7 | *
|
---|
| 8 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
| 9 | * it under the terms of the GNU General Public License as published by
|
---|
| 10 | * the Free Software Foundation, either version 3 of the License, or
|
---|
| 11 | * (at your option) any later version.
|
---|
| 12 | *
|
---|
| 13 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
| 14 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 15 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 16 | * GNU General Public License for more details.
|
---|
| 17 | *
|
---|
| 18 | * You should have received a copy of the GNU General Public License
|
---|
| 19 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
| 20 | */
|
---|
| 21 | #endregion
|
---|
| 22 |
|
---|
| 23 | using System;
|
---|
| 24 | using System.Collections.Generic;
|
---|
| 25 | using System.Linq;
|
---|
[11662] | 26 | using HeuristicLab.Common;
|
---|
[11656] | 27 | using HeuristicLab.Core;
|
---|
[11987] | 28 | using HeuristicLab.Encodings.BinaryVectorEncoding;
|
---|
[15301] | 29 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
[11939] | 30 | using HeuristicLab.Random;
|
---|
[11656] | 31 |
|
---|
| 32 | namespace HeuristicLab.Algorithms.ParameterlessPopulationPyramid {
|
---|
[11838] | 33 | // This code is based off the publication
|
---|
| 34 | // B. W. Goldman and W. F. Punch, "Parameter-less Population Pyramid," GECCO, pp. 785–792, 2014
|
---|
| 35 | // and the original source code in C++11 available from: https://github.com/brianwgoldman/Parameter-less_Population_Pyramid
|
---|
[15301] | 36 | [StorableClass]
|
---|
| 37 | public class LinkageTree : DeepCloneable {
|
---|
| 38 | [Storable]
|
---|
[11662] | 39 | private readonly int[][][] occurances;
|
---|
[15301] | 40 | [Storable]
|
---|
[11662] | 41 | private readonly List<int>[] clusters;
|
---|
[15301] | 42 | [Storable]
|
---|
[11663] | 43 | private List<int> clusterOrdering;
|
---|
[15301] | 44 | [Storable]
|
---|
[11662] | 45 | private readonly int length;
|
---|
[15301] | 46 | [Storable]
|
---|
[11663] | 47 | private readonly IRandom rand;
|
---|
[15301] | 48 | [Storable]
|
---|
[11663] | 49 | private bool rebuildRequired = false;
|
---|
[11656] | 50 |
|
---|
[15301] | 51 |
|
---|
| 52 | [StorableConstructor]
|
---|
| 53 | protected LinkageTree(bool deserializing) : base() { }
|
---|
| 54 |
|
---|
| 55 |
|
---|
| 56 | protected LinkageTree(LinkageTree original, Cloner cloner) : base(original, cloner) {
|
---|
| 57 | occurances = new int[original.occurances.Length][][];
|
---|
| 58 | //mkommend: first entry is not used, cf. ctor line 83
|
---|
| 59 | for (int i = 1; i < original.occurances.Length; i++) {
|
---|
| 60 | occurances[i] = new int[original.occurances[i].Length][];
|
---|
| 61 | for (int j = 0; j < original.occurances[i].Length; j++)
|
---|
| 62 | occurances[i][j] = original.occurances[i][j].ToArray();
|
---|
| 63 | }
|
---|
| 64 |
|
---|
| 65 | clusters = original.clusters.Select(c => c.ToList()).ToArray();
|
---|
| 66 | clusterOrdering = new List<int>(original.clusterOrdering);
|
---|
| 67 | length = original.length;
|
---|
| 68 | rand = cloner.Clone(original.rand);
|
---|
| 69 | rebuildRequired = original.rebuildRequired;
|
---|
| 70 | }
|
---|
| 71 |
|
---|
| 72 |
|
---|
| 73 | public override IDeepCloneable Clone(Cloner cloner) {
|
---|
| 74 | return new LinkageTree(this, cloner);
|
---|
| 75 | }
|
---|
| 76 |
|
---|
[11663] | 77 | public LinkageTree(int length, IRandom rand) {
|
---|
[11662] | 78 | this.length = length;
|
---|
[11663] | 79 | this.rand = rand;
|
---|
[11662] | 80 | occurances = new int[length][][];
|
---|
[11656] | 81 |
|
---|
[11663] | 82 | // Create a lower triangular matrix without the diagonal
|
---|
[11662] | 83 | for (int i = 1; i < length; i++) {
|
---|
[11656] | 84 | occurances[i] = new int[i][];
|
---|
| 85 | for (int j = 0; j < i; j++) {
|
---|
| 86 | occurances[i][j] = new int[4];
|
---|
| 87 | }
|
---|
| 88 | }
|
---|
[11662] | 89 | clusters = new List<int>[2 * length - 1];
|
---|
[11656] | 90 | for (int i = 0; i < clusters.Length; i++) {
|
---|
| 91 | clusters[i] = new List<int>();
|
---|
| 92 | }
|
---|
[11662] | 93 | clusterOrdering = new List<int>();
|
---|
[11656] | 94 |
|
---|
| 95 | // first "length" clusters just contain a single gene
|
---|
[11662] | 96 | for (int i = 0; i < length; i++) {
|
---|
[11656] | 97 | clusters[i].Add(i);
|
---|
| 98 | }
|
---|
| 99 | }
|
---|
| 100 |
|
---|
[11987] | 101 | public void Add(BinaryVector solution) {
|
---|
[11662] | 102 | if (solution.Length != length) throw new ArgumentException("The individual has not the correct length.");
|
---|
[11656] | 103 | for (int i = 1; i < solution.Length; i++) {
|
---|
| 104 | for (int j = 0; j < i; j++) {
|
---|
| 105 | // Updates the entry of the 4 long array based on the two bits
|
---|
[11674] | 106 |
|
---|
| 107 | var pattern = (Convert.ToByte(solution[j]) << 1) + Convert.ToByte(solution[i]);
|
---|
[11656] | 108 | occurances[i][j][pattern]++;
|
---|
| 109 | }
|
---|
| 110 | }
|
---|
[11663] | 111 | rebuildRequired = true;
|
---|
[11656] | 112 | }
|
---|
| 113 |
|
---|
| 114 | // While "total" always has an integer value, it is a double to reduce
|
---|
| 115 | // how often type casts are needed to prevent integer divison
|
---|
[11672] | 116 | // In the GECCO paper, calculates Equation 2
|
---|
[11656] | 117 | private static double NegativeEntropy(int[] counts, double total) {
|
---|
| 118 | double sum = 0;
|
---|
[11674] | 119 | for (int i = 0; i < counts.Length; i++) {
|
---|
| 120 | if (counts[i] != 0) {
|
---|
| 121 | sum += ((counts[i] / total) * Math.Log(counts[i] / total));
|
---|
| 122 | }
|
---|
[11656] | 123 | }
|
---|
| 124 | return sum;
|
---|
| 125 | }
|
---|
[11662] | 126 |
|
---|
[11672] | 127 | // Uses the frequency table to calcuate the entropy distance between two indices.
|
---|
| 128 | // In the GECCO paper, calculates Equation 1
|
---|
[11656] | 129 | private double EntropyDistance(int i, int j) {
|
---|
[15301] | 130 | int[] bits = new int[4];
|
---|
[11663] | 131 | // This ensures you are using the lower triangular part of "occurances"
|
---|
[11656] | 132 | if (i < j) {
|
---|
| 133 | int temp = i;
|
---|
| 134 | i = j;
|
---|
| 135 | j = temp;
|
---|
| 136 | }
|
---|
| 137 | var entry = occurances[i][j];
|
---|
| 138 | // extracts the occurrences of the individual bits
|
---|
| 139 | bits[0] = entry[0] + entry[2]; // i zero
|
---|
| 140 | bits[1] = entry[1] + entry[3]; // i one
|
---|
| 141 | bits[2] = entry[0] + entry[1]; // j zero
|
---|
| 142 | bits[3] = entry[2] + entry[3]; // j one
|
---|
| 143 | double total = bits[0] + bits[1];
|
---|
| 144 | // entropy of the two bits on their own
|
---|
| 145 | double separate = NegativeEntropy(bits, total);
|
---|
| 146 | // entropy of the two bits as a single unit
|
---|
| 147 | double together = NegativeEntropy(entry, total);
|
---|
| 148 | // If together there is 0 entropy, the distance is zero
|
---|
[11662] | 149 | if (together.IsAlmost(0)) {
|
---|
| 150 | return 0.0;
|
---|
[11656] | 151 | }
|
---|
[11662] | 152 | return 2 - (separate / together);
|
---|
[11656] | 153 | }
|
---|
| 154 |
|
---|
[11674] | 155 |
|
---|
| 156 |
|
---|
[11672] | 157 | // Performs O(N^2) clustering based on the method described in:
|
---|
| 158 | // "Optimal implementations of UPGMA and other common clustering algorithms"
|
---|
| 159 | // by I. Gronau and S. Moran
|
---|
| 160 | // In the GECCO paper, Figure 2 is a simplified version of this algorithm.
|
---|
[11663] | 161 | private void Rebuild() {
|
---|
[15301] | 162 | double[][] distances = null;
|
---|
[11674] | 163 | if (distances == null) {
|
---|
| 164 | distances = new double[clusters.Length * 2 - 1][];
|
---|
| 165 | for (int i = 0; i < distances.Length; i++)
|
---|
| 166 | distances[i] = new double[clusters.Length * 2 - 1];
|
---|
| 167 | }
|
---|
| 168 |
|
---|
| 169 |
|
---|
[11656] | 170 | // Keep track of which clusters have not been merged
|
---|
[11674] | 171 | var topLevel = new List<int>(length);
|
---|
| 172 | for (int i = 0; i < length; i++)
|
---|
| 173 | topLevel.Add(i);
|
---|
[11656] | 174 |
|
---|
[11674] | 175 | bool[] useful = new bool[clusters.Length];
|
---|
| 176 | for (int i = 0; i < useful.Length; i++)
|
---|
| 177 | useful[i] = true;
|
---|
| 178 |
|
---|
[11656] | 179 | // Store the distances between all clusters
|
---|
[11662] | 180 | for (int i = 1; i < length; i++) {
|
---|
[11656] | 181 | for (int j = 0; j < i; j++) {
|
---|
[11674] | 182 | distances[i][j] = EntropyDistance(clusters[i][0], clusters[j][0]);
|
---|
[11656] | 183 | // make it symmetric
|
---|
[11674] | 184 | distances[j][i] = distances[i][j];
|
---|
[11656] | 185 | }
|
---|
| 186 | }
|
---|
| 187 | // Each iteration we add some amount to the path, and remove the last
|
---|
| 188 | // two elements. This keeps track of how much of usable is in the path.
|
---|
| 189 | int end_of_path = 0;
|
---|
| 190 |
|
---|
| 191 | // build all clusters of size greater than 1
|
---|
[11662] | 192 | for (int index = length; index < clusters.Length; index++) {
|
---|
[11656] | 193 | // Shuffle everything not yet in the path
|
---|
[11674] | 194 | topLevel.ShuffleInPlace(rand, end_of_path, topLevel.Count - 1);
|
---|
[11656] | 195 |
|
---|
| 196 | // if nothing in the path, just add a random usable node
|
---|
| 197 | if (end_of_path == 0) {
|
---|
| 198 | end_of_path = 1;
|
---|
| 199 | }
|
---|
| 200 | while (end_of_path < topLevel.Count) {
|
---|
| 201 | // last node in the path
|
---|
| 202 | int final = topLevel[end_of_path - 1];
|
---|
| 203 |
|
---|
| 204 | // best_index stores the location of the best thing in the top level
|
---|
| 205 | int best_index = end_of_path;
|
---|
[11674] | 206 | double min_dist = distances[final][topLevel[best_index]];
|
---|
[11656] | 207 | // check all options which might be closer to "final" than "topLevel[best_index]"
|
---|
| 208 | for (int option = end_of_path + 1; option < topLevel.Count; option++) {
|
---|
[11674] | 209 | if (distances[final][topLevel[option]] < min_dist) {
|
---|
| 210 | min_dist = distances[final][topLevel[option]];
|
---|
[11656] | 211 | best_index = option;
|
---|
| 212 | }
|
---|
| 213 | }
|
---|
| 214 | // If the current last two in the path are minimally distant
|
---|
[11674] | 215 | if (end_of_path > 1 && min_dist >= distances[final][topLevel[end_of_path - 2]]) {
|
---|
[11656] | 216 | break;
|
---|
| 217 | }
|
---|
| 218 |
|
---|
| 219 | // move the best to the end of the path
|
---|
| 220 | topLevel.Swap(end_of_path, best_index);
|
---|
| 221 | end_of_path++;
|
---|
| 222 | }
|
---|
| 223 | // Last two elements in the path are the clusters to join
|
---|
| 224 | int first = topLevel[end_of_path - 2];
|
---|
| 225 | int second = topLevel[end_of_path - 1];
|
---|
| 226 |
|
---|
| 227 | // Only keep a cluster if the distance between the joining clusters is > zero
|
---|
[11674] | 228 | bool keep = !distances[first][second].IsAlmost(0.0);
|
---|
[11656] | 229 | useful[first] = keep;
|
---|
| 230 | useful[second] = keep;
|
---|
| 231 |
|
---|
| 232 | // create the new cluster
|
---|
| 233 | clusters[index] = clusters[first].Concat(clusters[second]).ToList();
|
---|
| 234 | // Calculate distances from all clusters to the newly created cluster
|
---|
| 235 | int i = 0;
|
---|
| 236 | int end = topLevel.Count - 1;
|
---|
| 237 | while (i <= end) {
|
---|
| 238 | int x = topLevel[i];
|
---|
| 239 | // Moves 'first' and 'second' to after "end" in topLevel
|
---|
| 240 | if (x == first || x == second) {
|
---|
| 241 | topLevel.Swap(i, end);
|
---|
| 242 | end--;
|
---|
| 243 | continue;
|
---|
| 244 | }
|
---|
| 245 | // Use the previous distances to calculate the joined distance
|
---|
[11674] | 246 | double first_distance = distances[first][x];
|
---|
[11656] | 247 | first_distance *= clusters[first].Count;
|
---|
[11674] | 248 | double second_distance = distances[second][x];
|
---|
[11656] | 249 | second_distance *= clusters[second].Count;
|
---|
[11674] | 250 | distances[x][index] = ((first_distance + second_distance)
|
---|
[11656] | 251 | / (clusters[first].Count + clusters[second].Count));
|
---|
| 252 | // make it symmetric
|
---|
[11674] | 253 | distances[index][x] = distances[x][index];
|
---|
[11656] | 254 | i++;
|
---|
| 255 | }
|
---|
| 256 |
|
---|
| 257 | // Remove first and second from the path
|
---|
| 258 | end_of_path -= 2;
|
---|
| 259 | topLevel.RemoveAt(topLevel.Count - 1);
|
---|
| 260 | topLevel[topLevel.Count - 1] = index;
|
---|
| 261 | }
|
---|
| 262 | // Extract the useful clusters
|
---|
[11662] | 263 | clusterOrdering.Clear();
|
---|
[11656] | 264 | // Add all useful clusters. The last one is never useful.
|
---|
| 265 | for (int i = 0; i < useful.Length - 1; i++) {
|
---|
[11662] | 266 | if (useful[i]) clusterOrdering.Add(i);
|
---|
[11656] | 267 | }
|
---|
[11663] | 268 |
|
---|
| 269 | // Shuffle before sort to ensure ties are broken randomly
|
---|
[11662] | 270 | clusterOrdering.ShuffleInPlace(rand);
|
---|
[11663] | 271 | clusterOrdering = clusterOrdering.OrderBy(i => clusters[i].Count).ToList();
|
---|
[11656] | 272 | }
|
---|
| 273 |
|
---|
[11663] | 274 | public IEnumerable<List<int>> Clusters {
|
---|
| 275 | get {
|
---|
| 276 | // Just in time rebuilding
|
---|
| 277 | if (rebuildRequired) Rebuild();
|
---|
| 278 | foreach (var index in clusterOrdering) {
|
---|
| 279 | // Send out the clusters in the desired order
|
---|
| 280 | yield return clusters[index];
|
---|
| 281 | }
|
---|
| 282 | }
|
---|
[11656] | 283 | }
|
---|
| 284 | }
|
---|
[11663] | 285 | } |
---|