Changeset 7105
- Timestamp:
- 11/30/11 17:21:30 (13 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/gp-crossover/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Crossovers/SymbolicDataAnalysisExpressionProbabilisticFunctionalCrossover.cs
r7089 r7105 59 59 public static ISymbolicExpressionTree Cross(IRandom random, ISymbolicExpressionTree parent0, ISymbolicExpressionTree parent1, 60 60 ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, T problemData, IEnumerable<int> rows, int maxDepth, int maxLength) { 61 List<CutPoint>crossoverPoints0 = new List<CutPoint>();61 var crossoverPoints0 = new List<CutPoint>(); 62 62 parent0.Root.ForEachNodePostfix((n) => { 63 63 if (n.Subtrees.Any() && n != parent0.Root) … … 65 65 crossoverPoints0.Add(new CutPoint(n, child)); 66 66 }); 67 CutPointcrossoverPoint0 = crossoverPoints0[random.Next(crossoverPoints0.Count)];67 var crossoverPoint0 = crossoverPoints0[random.Next(crossoverPoints0.Count)]; 68 68 int level = parent0.Root.GetBranchLevel(crossoverPoint0.Child); 69 69 int length = parent0.Root.GetLength() - crossoverPoint0.Child.GetLength(); 70 70 71 List<ISymbolicExpressionTreeNode>allowedBranches = new List<ISymbolicExpressionTreeNode>();71 var allowedBranches = new List<ISymbolicExpressionTreeNode>(); 72 72 parent1.Root.ForEachNodePostfix((n) => { 73 73 if (n.Subtrees.Any() && n != parent1.Root) … … 88 88 var rootSymbol = new ProgramRootSymbol(); 89 89 var startSymbol = new StartSymbol(); 90 var tree0 = CreateTreeFromNode(random, crossoverPoint0.Child, rootSymbol, startSymbol); 90 var tree0 = CreateTreeFromNode(random, crossoverPoint0.Child, rootSymbol, startSymbol); // this will change crossoverPoint0.Child.Parent 91 91 IEnumerable<double> estimatedValues0 = interpreter.GetSymbolicExpressionTreeValues(tree0, dataset, rows); 92 92 double min0 = estimatedValues0.Min(); 93 93 double max0 = estimatedValues0.Max(); 94 crossoverPoint0.Child.Parent = crossoverPoint0.Parent; // restore correct parent 94 95 95 List<double>weights = new List<double>();96 var weights = new List<double>(); 96 97 foreach (var node in allowedBranches) { 98 var parent = node.Parent; 97 99 var tree1 = CreateTreeFromNode(random, node, rootSymbol, startSymbol); 98 100 IEnumerable<double> estimatedValues1 = interpreter.GetSymbolicExpressionTreeValues(tree1, dataset, rows); 99 101 double min1 = estimatedValues1.Min(); 100 102 double max1 = estimatedValues1.Max(); 101 102 double behavioralDistance = (Math.Abs(min0 - min1) + Math.Abs(max0 - max1)) / 2; 103 103 double behavioralDistance = (Math.Abs(min0 - min1) + Math.Abs(max0 - max1)) / 2; // this can be NaN of Infinity because some trees are crazy like exp(exp(exp(...))), we correct that below 104 104 weights.Add(behavioralDistance); 105 node.Parent = parent; // restore correct node parent 105 106 } 106 107 107 ISymbolicExpressionTreeNode selectedBranch = SelectRandomBranch(random, allowedBranches, weights); 108 // remove branches with an infinite or NaN behavioral distance 109 for (int i = 0; i != weights.Count; ++i) 110 if (Double.IsNaN(weights[i]) || Double.IsInfinity(weights[i])) { 111 weights.RemoveAt(i); 112 allowedBranches.RemoveAt(i); 113 } 114 115 // check if there are any allowed branches left 116 if (allowedBranches.Count == 0) 117 return parent0; 118 119 ISymbolicExpressionTreeNode selectedBranch; 120 double sum = weights.Sum(); 121 if (sum == 0.0) 122 selectedBranch = allowedBranches[0]; // just return the first, since we don't care (all weights are zero) 123 else { 124 // transform similarity distances into probabilities by normalizing and inverting the values 125 for (int i = 0; i != weights.Count; ++i) 126 weights[i] = (1 - weights[i] / sum); 127 128 selectedBranch = SelectRandomBranch(random, allowedBranches, weights); 129 } 108 130 swap(crossoverPoint0, selectedBranch); 109 110 131 return parent0; 111 132 } … … 128 149 129 150 private static ISymbolicExpressionTreeNode SelectRandomBranch(IRandom random, List<ISymbolicExpressionTreeNode> nodes, List<double> weights) { 130 // transform similarity distances into probabilities by normalizing and inverting the values131 double sum = weights.Sum();132 for (int i = 0; i != weights.Count; ++i)133 weights[i] = (1 - weights[i] / sum);134 151 double r = weights.Sum() * random.NextDouble(); 135 152 for (int i = 0; i != nodes.Count; ++i) { … … 138 155 r -= weights[i]; 139 156 } 140 return n ull;157 return nodes.Last(); 141 158 } 142 159 }
Note: See TracChangeset
for help on using the changeset viewer.