- Timestamp:
- 02/05/15 07:03:15 (10 years ago)
- Location:
- branches/HeuristicLab.Problems.GrammaticalOptimization
- Files:
-
- 2 added
- 10 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/HeuristicLab.Problems.GrammaticalOptimization/HeuristicLab.Algorithms.GeneticProgramming/GenericSymbExprEvaluator.cs
r11857 r11895 6 6 using HeuristicLab.Operators; 7 7 using HeuristicLab.Parameters; 8 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 8 9 using HeuristicLab.PluginInfrastructure; 9 10 10 11 namespace HeuristicLab.Problems.GrammaticalOptimization { 11 // this type is not storable because we use a func<ITree,double> for evaluation, which references back to the original grammatical optimization problem12 12 [NonDiscoverableType] 13 [StorableClass] 13 14 [Item("GenericSymbExprEvaluator", "Evaluator for grammatical optimization problems (using a symbolic expression tree encoding).")] 14 15 public class GenericSymbExprEvaluator : SingleSuccessorOperator, IGenericSymbExprEvaluator { … … 22 23 } 23 24 25 // cannot save these (eval won't work when loaded from file 24 26 private Func<string, double> evalFunc; 25 27 private Func<ISymbolicExpressionTree, string> toStringFunc; 26 28 29 [StorableConstructor] 30 private GenericSymbExprEvaluator(bool deserializing) : base(deserializing) { } 27 31 public GenericSymbExprEvaluator(GenericSymbExprEvaluator original, Cloner cloner) 28 32 : base(original, cloner) { -
branches/HeuristicLab.Problems.GrammaticalOptimization/HeuristicLab.Algorithms.GeneticProgramming/HeuristicLab.Algorithms.GeneticProgramming.csproj
r11857 r11895 36 36 <Reference Include="HeuristicLab.Algorithms.OffspringSelectionGeneticAlgorithm-3.3"> 37 37 <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.OffspringSelectionGeneticAlgorithm-3.3.dll</HintPath> 38 </Reference> 39 <Reference Include="HeuristicLab.Analysis-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 40 <SpecificVersion>False</SpecificVersion> 41 <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Analysis-3.3.dll</HintPath> 38 42 </Reference> 39 43 <Reference Include="HeuristicLab.Collections-3.3"> … … 75 79 <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Selection-3.3.dll</HintPath> 76 80 </Reference> 81 <Reference Include="HeuristicLab.SequentialEngine-3.3"> 82 <HintPath>..\..\..\..\..\Program Files\HeuristicLab 3.3\HeuristicLab.SequentialEngine-3.3.dll</HintPath> 83 </Reference> 77 84 <Reference Include="System" /> 78 85 <Reference Include="System.Core" /> … … 80 87 </ItemGroup> 81 88 <ItemGroup> 89 <Compile Include="IGPSolver.cs" /> 82 90 <Compile Include="GenericSymbExprEvaluator.cs" /> 83 91 <Compile Include="GenericSymbExprGrammar.cs" /> -
branches/HeuristicLab.Problems.GrammaticalOptimization/HeuristicLab.Algorithms.GeneticProgramming/OffspringSelectionGP.cs
r11851 r11895 1 1 using System; 2 using System.IO; 3 using System.IO.Compression; 2 4 using System.Linq; 3 5 using System.Threading; 4 6 using HeuristicLab.Algorithms.GrammaticalOptimization; 7 using HeuristicLab.Analysis; 8 using HeuristicLab.Common; 5 9 using HeuristicLab.Data; 6 10 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; 7 using HeuristicLab. Optimization;11 using HeuristicLab.Persistence.Default.Xml; 8 12 using HeuristicLab.Problems.GrammaticalOptimization; 9 using HeuristicLab.Selection;10 using HeuristicLab.Algorithms.GeneticAlgorithm;11 13 12 14 namespace HeuristicLab.Algorithms.GeneticProgramming { 13 public class OffspringSelectionGP : SolverBase {15 public class OffspringSelectionGP : SolverBase, IGPSolver { 14 16 public int PopulationSize { get; set; } 15 17 public double MutationRate { get; set; } … … 19 21 private readonly ISymbolicExpressionTreeProblem problem; 20 22 private readonly Random random; 23 private readonly bool saveAlg; 21 24 22 public OffspringSelectionGP(ISymbolicExpressionTreeProblem problem, Random random ) {25 public OffspringSelectionGP(ISymbolicExpressionTreeProblem problem, Random random, bool saveAlg = false) { 23 26 this.problem = problem; 24 27 this.random = random; … … 28 31 MaxSolutionSize = 100; 29 32 MaxSolutionDepth = 17; 33 this.saveAlg = saveAlg; 30 34 } 31 35 … … 33 37 var hlProblem = new GenericSymbExprProblem(problem); 34 38 var onEvalLocker = new object(); 35 hlProblem.Evaluator.SolutionEvaluated += (sentence, quality) => {36 // raise solution evaluated event for each GP solution, don't scale quality to 0..137 // need to synchronize in case we are using a parallel engine38 lock (onEvalLocker) {39 OnSolutionEvaluated(sentence, quality);40 }41 };42 39 hlProblem.MaximumSymbolicExpressionTreeLength.Value = MaxSolutionSize; 43 40 hlProblem.MaximumSymbolicExpressionTreeDepth.Value = MaxSolutionDepth; … … 45 42 using (var wh = new AutoResetEvent(false)) { 46 43 var osga = new OffspringSelectionGeneticAlgorithm.OffspringSelectionGeneticAlgorithm(); 47 osga.Engine = new ParallelEngine.ParallelEngine(); 44 // osga.Engine = new ParallelEngine.ParallelEngine(); 45 osga.Engine = new SequentialEngine.SequentialEngine(); 48 46 osga.ExceptionOccurred += (sender, args) => { Console.WriteLine(args.Value.Message); wh.Set(); }; 49 47 osga.Stopped += (sender, args) => { wh.Set(); }; 48 49 int numEvals = 0; 50 hlProblem.Evaluator.SolutionEvaluated += (sentence, quality) => { 51 // raise solution evaluated event for each GP solution, don't scale quality to 0..1 52 // need to synchronize in case we are using a parallel engine 53 lock (onEvalLocker) { 54 OnSolutionEvaluated(sentence, quality); 55 56 // stop when maxEvals has been reached 57 if (numEvals++ >= maxEvaluations) { 58 osga.Stop(); 59 } 60 } 61 }; 62 50 63 51 64 osga.Problem = hlProblem; … … 59 72 osga.Crossover = osga.CrossoverParameter.ValidValues.Single(op => op.Name == "SubtreeSwappingCrossover"); 60 73 osga.Selector = osga.SelectorParameter.ValidValues.Single(op => op.Name == "GenderSpecificSelection"); 74 var multiAnalzer = (MultiAnalyzer)osga.Analyzer; 75 multiAnalzer.Operators.Add(new BestSymbolicExpressionTreeAnalyzer()); 61 76 62 77 osga.PopulationSize.Value = PopulationSize; 63 osga.MaximumGenerations.Value = 100000 ; // some very large value (we stop based on evaluations)64 osga.MaximumSelectionPressure.Value = 1000 ;78 osga.MaximumGenerations.Value = 1000000; // some very large value (we stop based on evaluations) 79 osga.MaximumSelectionPressure.Value = 1000000; 65 80 osga.MaximumEvaluatedSolutions.Value = maxEvaluations; 66 81 osga.MutationProbability.Value = MutationRate; 82 osga.ComparisonFactorLowerBound.Value = 1.0; 83 osga.ComparisonFactorUpperBound.Value = 1.0; 84 osga.SuccessRatio.Value = 1.0; 67 85 68 86 osga.SetSeedRandomly = new BoolValue(false); … … 73 91 74 92 wh.WaitOne(); 93 94 95 if (saveAlg) { 96 var path = @"C:\Users\P24581\Desktop"; 97 var fileName = string.Format("osgp-{0}{1:D2}{2:D2}{3:D2}{4:D2}.hl", DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day, DateTime.Now.Hour, DateTime.Now.Minute); 98 var fullPath = Path.Combine(path, fileName); 99 HeuristicLab.Persistence.Core.ConfigurationService.Instance.LoadSettings(); 100 XmlGenerator.Serialize(osga, fullPath, CompressionLevel.Fastest); 101 } 75 102 } 76 103 } -
branches/HeuristicLab.Problems.GrammaticalOptimization/HeuristicLab.Algorithms.GeneticProgramming/StandardGP.cs
r11851 r11895 1 1 using System; 2 using System.IO; 3 using System.IO.Compression; 2 4 using System.Linq; 3 5 using System.Threading; 4 6 using HeuristicLab.Algorithms.GrammaticalOptimization; 7 using HeuristicLab.Common; 5 8 using HeuristicLab.Data; 6 9 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; 7 using HeuristicLab. Optimization;10 using HeuristicLab.Persistence.Default.Xml; 8 11 using HeuristicLab.Problems.GrammaticalOptimization; 9 12 using HeuristicLab.Selection; 10 using HeuristicLab.Algorithms.GeneticAlgorithm;11 13 12 14 namespace HeuristicLab.Algorithms.GeneticProgramming { 13 public class StandardGP : SolverBase {15 public class StandardGP : SolverBase, IGPSolver { 14 16 public int PopulationSize { get; set; } 15 17 public double MutationRate { get; set; } … … 20 22 private readonly ISymbolicExpressionTreeProblem problem; 21 23 private readonly Random random; 24 private readonly bool saveAlg; 22 25 23 public StandardGP(ISymbolicExpressionTreeProblem problem, Random random ) {26 public StandardGP(ISymbolicExpressionTreeProblem problem, Random random, bool saveAlg = false) { 24 27 this.problem = problem; 25 28 this.random = random; … … 30 33 MaxSolutionSize = 100; 31 34 MaxSolutionDepth = 17; 35 this.saveAlg = saveAlg; 32 36 } 33 37 … … 35 39 var hlProblem = new GenericSymbExprProblem(problem); 36 40 var onEvalLocker = new object(); 37 hlProblem.Evaluator.SolutionEvaluated += (sentence, quality) => {38 // raise solution evaluated event for each GP solution, don't scale quality to 0..139 // need to synchronize in case we are using a parallel engine40 lock (onEvalLocker) {41 OnSolutionEvaluated(sentence, quality);42 }43 };44 41 hlProblem.MaximumSymbolicExpressionTreeLength.Value = MaxSolutionSize; 45 42 hlProblem.MaximumSymbolicExpressionTreeDepth.Value = MaxSolutionDepth; … … 51 48 ga.ExceptionOccurred += (sender, args) => { Console.WriteLine(args.Value.Message); wh.Set(); }; 52 49 ga.Stopped += (sender, args) => { wh.Set(); }; 50 51 int numEvals = 0; 52 hlProblem.Evaluator.SolutionEvaluated += (sentence, quality) => { 53 // raise solution evaluated event for each GP solution, don't scale quality to 0..1 54 // need to synchronize in case we are using a parallel engine 55 lock (onEvalLocker) { 56 OnSolutionEvaluated(sentence, quality); 57 58 // stop when maxEvals has been reached 59 if (numEvals++ >= maxEvaluations) { 60 ga.Stop(); 61 } 62 } 63 }; 64 53 65 54 66 ga.Problem = hlProblem; … … 66 78 67 79 ga.PopulationSize.Value = PopulationSize; 68 ga.MaximumGenerations.Value = maxEvaluations / PopulationSize + 1; // one extra generation in case maxEvaluations is not a multiple of PopulationSize80 ga.MaximumGenerations.Value = 1000000; // very large value (we stop in the evaluate handler) 69 81 ga.MutationProbability.Value = MutationRate; 70 82 … … 76 88 77 89 wh.WaitOne(); 90 91 92 if (saveAlg) { 93 var path = @"C:\Users\P24581\Desktop"; 94 var fileName = string.Format("osgp-{0}{1:D2}{2:D2}{3:D2}{4:D2}.hl", DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day, DateTime.Now.Hour, DateTime.Now.Minute); 95 var fullPath = Path.Combine(path, fileName); 96 HeuristicLab.Persistence.Core.ConfigurationService.Instance.LoadSettings(); 97 XmlGenerator.Serialize(ga, fullPath, CompressionLevel.Fastest); 98 } 78 99 } 79 100 } -
branches/HeuristicLab.Problems.GrammaticalOptimization/HeuristicLab.Problems.GrammaticalOptimization.SymbReg/HeuristicLab.Problems.GrammaticalOptimization.SymbReg.csproj
r11851 r11895 31 31 </PropertyGroup> 32 32 <ItemGroup> 33 <Reference Include="ALGLIB-3.7.0, Version=3.7.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 34 <SpecificVersion>False</SpecificVersion> 35 <HintPath>..\..\..\trunk\sources\bin\ALGLIB-3.7.0.dll</HintPath> 36 </Reference> 37 <Reference Include="AutoDiff-1.0"> 38 <HintPath>..\..\..\trunk\sources\bin\AutoDiff-1.0.dll</HintPath> 39 </Reference> 33 40 <Reference Include="HeuristicLab.Common-3.3"> 34 41 <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Common-3.3.dll</HintPath> … … 39 46 <Reference Include="HeuristicLab.Data-3.3"> 40 47 <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Data-3.3.dll</HintPath> 48 </Reference> 49 <Reference Include="HeuristicLab.Encodings.SymbolicExpressionTreeEncoding-3.4, Version=3.4.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 50 <SpecificVersion>False</SpecificVersion> 51 <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Encodings.SymbolicExpressionTreeEncoding-3.4.dll</HintPath> 41 52 </Reference> 42 53 <Reference Include="HeuristicLab.Problems.DataAnalysis-3.4"> … … 53 64 </ItemGroup> 54 65 <ItemGroup> 66 <Compile Include="ExpressionCompiler.cs" /> 55 67 <Compile Include="Properties\AssemblyInfo.cs" /> 56 68 <Compile Include="SymbolicRegressionProblem.cs" /> -
branches/HeuristicLab.Problems.GrammaticalOptimization/HeuristicLab.Problems.GrammaticalOptimization.SymbReg/SymbolicRegressionProblem.cs
r11851 r11895 1 1 using System; 2 using System.CodeDom.Compiler; 2 3 using System.Collections.Generic; 4 using System.Diagnostics; 3 5 using System.Linq; 4 6 using System.Security; 5 7 using System.Security.AccessControl; 8 using System.Security.Authentication.ExtendedProtection.Configuration; 6 9 using System.Text; 10 using AutoDiff; 7 11 using HeuristicLab.Common; 12 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; 8 13 using HeuristicLab.Problems.DataAnalysis; 9 14 using HeuristicLab.Problems.Instances; … … 12 17 namespace HeuristicLab.Problems.GrammaticalOptimization.SymbReg { 13 18 // provides bridge to HL regression problem instances 14 public class SymbolicRegressionProblem : I Problem {19 public class SymbolicRegressionProblem : ISymbolicExpressionTreeProblem { 15 20 16 21 private const string grammarString = @" 17 22 G(E): 18 E -> V | V+E | V-E | V*E | V/E | (E) | C | C+E | C-E | C*E | C/E23 E -> V | C | V+E | V-E | V*E | V%E | (E) | C+E | C-E | C*E | C%E 19 24 C -> 0..9 20 25 V -> <variables> … … 22 27 // C represents Koza-style ERC (the symbol is the index of the ERC), the values are initialized below 23 28 29 // S .. Sum (+), N .. Neg. sum (-), P .. Product (*), D .. Division (%) 30 private const string treeGrammarString = @" 31 G(E): 32 E -> V | C | S | N | P | D 33 S -> EE | EEE 34 N -> EE | EEE 35 P -> EE | EEE 36 D -> EE 37 C -> 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 38 V -> <variables> 39 "; 40 41 // when we use constants optimization we can completely ignore all constants by a simple strategy: 42 // introduce a constant factor for each complete term 43 // introduce a constant offset for each complete expression (including expressions in brackets) 44 // e.g. 1*(2*a + b - 3 + 4) is the same as c0*a + c1*b + c2 45 24 46 private readonly IGrammar grammar; 25 47 26 48 private readonly int N; 27 private readonly double[ ][] x;49 private readonly double[,] x; 28 50 private readonly double[] y; 29 51 private readonly int d; … … 40 62 this.d = problemData.AllowedInputVariables.Count(); 41 63 if (d > 26) throw new NotSupportedException(); // we only allow single-character terminal symbols so far 42 this.x = new double[N ][];64 this.x = new double[N, d]; 43 65 this.y = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).ToArray(); 44 66 45 67 int i = 0; 46 68 foreach (var r in problemData.TrainingIndices) { 47 x[i] = new double[d];48 69 int j = 0; 49 70 foreach (var inputVariable in problemData.AllowedInputVariables) { 50 x[i ][j++] = problemData.Dataset.GetDoubleValue(inputVariable, r);71 x[i, j++] = problemData.Dataset.GetDoubleValue(inputVariable, r); 51 72 } 52 73 i++; … … 58 79 char lastVar = Convert.ToChar(Convert.ToByte('a') + d - 1); 59 80 this.grammar = new Grammar(grammarString.Replace("<variables>", firstVar + " .. " + lastVar)); 81 this.TreeBasedGPGrammar = new Grammar(treeGrammarString.Replace("<variables>", firstVar + " .. " + lastVar)); 60 82 } 61 83 … … 71 93 72 94 public double Evaluate(string sentence) { 95 return OptimizeConstantsAndEvaluate(sentence); 96 } 97 98 public double SimpleEvaluate(string sentence) { 73 99 var interpreter = new ExpressionInterpreter(); 74 return HeuristicLab.Common.Extensions.RSq(y, Enumerable.Range(0, N).Select(i => interpreter.Interpret(sentence, x[i], erc))); 100 var rowData = new double[d]; 101 return HeuristicLab.Common.Extensions.RSq(y, Enumerable.Range(0, N).Select(i => { 102 for (int j = 0; j < d; j++) rowData[j] = x[i, j]; 103 return interpreter.Interpret(sentence, rowData, erc); 104 })); 75 105 } 76 106 … … 83 113 } 84 114 85 public IEnumerable<Feature> GetFeatures(string phrase) 86 { 115 public IEnumerable<Feature> GetFeatures(string phrase) { 87 116 throw new NotImplementedException(); 88 117 } 89 118 90 119 91 /* 92 public static double OptimizeConstants(string sentence) { 93 94 List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>(); 95 List<AutoDiff.Variable> parameters = new List<AutoDiff.Variable>(); 96 List<string> variableNames = new List<string>(); 97 120 121 public double OptimizeConstantsAndEvaluate(string sentence) { 98 122 AutoDiff.Term func; 99 if (!TryTransformToAutoDiff(sentence, 0, variables, parameters, out func)) 100 throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree."); 101 if (variableNames.Count == 0) return 0.0; 102 103 AutoDiff.IParametricCompiledTerm compiledFunc = AutoDiff.TermUtils.Compile(func, variables.ToArray(), parameters.ToArray()); 104 105 List<SymbolicExpressionTreeTerminalNode> terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList(); 106 double[] c = new double[variables.Count]; 107 108 { 109 c[0] = 0.0; 110 c[1] = 1.0; 111 //extract inital constants 112 int i = 2; 113 foreach (var node in terminalNodes) { 114 ConstantTreeNode constantTreeNode = node as ConstantTreeNode; 115 VariableTreeNode variableTreeNode = node as VariableTreeNode; 116 if (constantTreeNode != null) 117 c[i++] = constantTreeNode.Value; 118 else if (variableTreeNode != null) 119 c[i++] = variableTreeNode.Weight; 120 } 121 } 122 double[] originalConstants = (double[])c.Clone(); 123 double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 123 int pos = 0; 124 125 var compiler = new ExpressionCompiler(); 126 Variable[] variables; 127 Variable[] constants; 128 compiler.Compile(sentence, out func, out variables, out constants); 129 130 // constants are manipulated 131 132 if (!constants.Any()) return SimpleEvaluate(sentence); 133 134 AutoDiff.IParametricCompiledTerm compiledFunc = func.Compile(constants, variables); // variate constants leave variables fixed to data 135 136 double[] c = constants.Select(_ => 1.0).ToArray(); // start with ones 124 137 125 138 alglib.lsfitstate state; … … 127 140 int info; 128 141 129 Dataset ds = problemData.Dataset;130 double[,] x = new double[rows.Count(), variableNames.Count];131 int row = 0;132 foreach (var r in rows) {133 for (int col = 0; col < variableNames.Count; col++) {134 x[row, col] = ds.GetDoubleValue(variableNames[col], r);135 }136 row++;137 }138 double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();139 142 int n = x.GetLength(0); 140 143 int m = x.GetLength(1); … … 144 147 alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(compiledFunc); 145 148 149 const int maxIterations = 10; 146 150 try { 147 151 alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state); … … 151 155 alglib.lsfitresults(state, out info, out c, out rep); 152 156 } catch (ArithmeticException) { 153 return originalQuality;157 return 0.0; 154 158 } catch (alglib.alglibexception) { 155 return originalQuality;159 return 0.0; 156 160 } 157 161 158 162 //info == -7 => constant optimization failed due to wrong gradient 159 if (info != -7) throw new ArgumentException(); 163 if (info == -7) throw new ArgumentException(); 164 { 165 var rowData = new double[d]; 166 return HeuristicLab.Common.Extensions.RSq(y, Enumerable.Range(0, N).Select(i => { 167 for (int j = 0; j < d; j++) rowData[j] = x[i, j]; 168 return compiledFunc.Evaluate(c, rowData); 169 })); 170 } 160 171 } 161 172 … … 175 186 } 176 187 177 private static bool TryTransformToAutoDiff(string phrase, int symbolPos, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, out AutoDiff.Term term) 178 { 179 var curSy = phrase[0]; 180 if () { 181 var var = new AutoDiff.Variable(); 182 variables.Add(var); 183 term = var; 184 return true; 185 } 186 if (node.Symbol is Variable) { 187 var varNode = node as VariableTreeNode; 188 var par = new AutoDiff.Variable(); 189 parameters.Add(par); 190 variableNames.Add(varNode.VariableName); 191 var w = new AutoDiff.Variable(); 192 variables.Add(w); 193 term = AutoDiff.TermBuilder.Product(w, par); 194 return true; 195 } 196 if (node.Symbol is Addition) { 197 List<AutoDiff.Term> terms = new List<Term>(); 198 foreach (var subTree in node.Subtrees) { 199 AutoDiff.Term t; 200 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out t)) { 201 term = null; 202 return false; 203 } 204 terms.Add(t); 188 public IGrammar TreeBasedGPGrammar { get; private set; } 189 public string ConvertTreeToSentence(ISymbolicExpressionTree tree) { 190 var sb = new StringBuilder(); 191 192 TreeToSentence(tree.Root.GetSubtree(0).GetSubtree(0), sb); 193 194 return sb.ToString(); 195 } 196 197 private void TreeToSentence(ISymbolicExpressionTreeNode treeNode, StringBuilder sb) { 198 if (treeNode.SubtreeCount == 0) { 199 // terminal 200 sb.Append(treeNode.Symbol.Name); 201 } else { 202 string op = string.Empty; 203 switch (treeNode.Symbol.Name) { 204 case "S": op = "+"; break; 205 case "N": op = "-"; break; 206 case "P": op = "*"; break; 207 case "D": op = "%"; break; 208 default: { 209 Debug.Assert(treeNode.SubtreeCount == 1); 210 break; 211 } 205 212 } 206 term = AutoDiff.TermBuilder.Sum(terms); 207 return true; 208 } 209 if (node.Symbol is Subtraction) { 210 List<AutoDiff.Term> terms = new List<Term>(); 211 for (int i = 0; i < node.SubtreeCount; i++) { 212 AutoDiff.Term t; 213 if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) { 214 term = null; 215 return false; 216 } 217 if (i > 0) t = -t; 218 terms.Add(t); 213 // nonterminal 214 if (op == "+" || op == "-") sb.Append("("); 215 TreeToSentence(treeNode.Subtrees.First(), sb); 216 foreach (var subTree in treeNode.Subtrees.Skip(1)) { 217 sb.Append(op); 218 TreeToSentence(subTree, sb); 219 219 } 220 term = AutoDiff.TermBuilder.Sum(terms); 221 return true; 222 } 223 if (node.Symbol is Multiplication) { 224 AutoDiff.Term a, b; 225 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) || 226 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) { 227 term = null; 228 return false; 229 } else { 230 List<AutoDiff.Term> factors = new List<Term>(); 231 foreach (var subTree in node.Subtrees.Skip(2)) { 232 AutoDiff.Term f; 233 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) { 234 term = null; 235 return false; 236 } 237 factors.Add(f); 238 } 239 term = AutoDiff.TermBuilder.Product(a, b, factors.ToArray()); 240 return true; 241 } 242 } 243 if (node.Symbol is Division) { 244 // only works for at least two subtrees 245 AutoDiff.Term a, b; 246 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) || 247 !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) { 248 term = null; 249 return false; 250 } else { 251 List<AutoDiff.Term> factors = new List<Term>(); 252 foreach (var subTree in node.Subtrees.Skip(2)) { 253 AutoDiff.Term f; 254 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) { 255 term = null; 256 return false; 257 } 258 factors.Add(1.0 / f); 259 } 260 term = AutoDiff.TermBuilder.Product(a, 1.0 / b, factors.ToArray()); 261 return true; 262 } 263 } 264 265 if (node.Symbol is StartSymbol) { 266 var alpha = new AutoDiff.Variable(); 267 var beta = new AutoDiff.Variable(); 268 variables.Add(beta); 269 variables.Add(alpha); 270 AutoDiff.Term branchTerm; 271 if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out branchTerm)) { 272 term = branchTerm * alpha + beta; 273 return true; 274 } else { 275 term = null; 276 return false; 277 } 278 } 279 term = null; 280 return false; 281 } 282 */ 220 if (op == "+" || op == "-") sb.Append(")"); 221 } 222 } 283 223 } 284 224 } -
branches/HeuristicLab.Problems.GrammaticalOptimization/HeuristicLab.Problems.GrammaticalOptimization/ExpressionInterpreter.cs
r11848 r11895 17 17 // interprets sentences from L(G(Expr)): 18 18 // Expr -> Term { ('+' | '-' | '^' ) Term } 19 // Term -> Fact { ('*' | ' /') Fact }19 // Term -> Fact { ('*' | '%') Fact } 20 20 // Fact -> '!' Expr | '(' Expr ')' | Var | const 21 21 // Var -> 'a'..'z' 22 22 // const -> '0' .. '9' 23 23 24 // uses protected division symbol % 24 25 // constants are Koza-style ephemeral random constants (ERC). for now we only allow up to 10 constants. 25 26 // The constant symbols '0' .. '9' are treated as ERC indices … … 35 36 var d = new double[vars.Length]; 36 37 for (int i = 0; i < vars.Length; i++) d[i] = vars[i] ? 1.0 : 0.0; 37 return ! 38 return !DoubleExtensions.IsAlmost(Expr(d, erc), 0); 38 39 } 39 40 … … 73 74 if (curSy == '+') { 74 75 NewSy(); 75 r += Expr(d, erc);76 r += Term(d, erc); 76 77 } else if (curSy == '-') { 77 78 NewSy(); 78 r -= Expr(d, erc);79 r -= Term(d, erc); 79 80 } else { 80 81 NewSy(); 81 var e = Expr(d, erc); 82 r = Not(r) * e + r * Not(e); // xor = (!x AND y) OR (x AND !y) 82 throw new NotImplementedException(); 83 // var e = Expr(d, erc); 84 // r = Not(r) * e + r * Not(e); // xor = (!x AND y) OR (x AND !y) 83 85 } 84 86 curSy = CurSy(); … … 91 93 r = Fact(d, erc); 92 94 var curSy = CurSy(); 93 while (curSy == '*' || curSy == ' /') {95 while (curSy == '*' || curSy == '%') { 94 96 if (curSy == '*') { 95 97 NewSy(); 96 r *= Term(d, erc);98 r *= Fact(d, erc); 97 99 } else { 98 100 NewSy(); 99 r /= Term(d, erc); 101 var nom = Fact(d, erc); 102 if (HeuristicLab.Common.Extensions.IsAlmost(nom, 0.0)) nom = 1.0; 103 r /= nom; 100 104 } 101 105 curSy = CurSy(); -
branches/HeuristicLab.Problems.GrammaticalOptimization/Main/Program.cs
r11865 r11895 3 3 using System.Diagnostics; 4 4 using System.Globalization; 5 using System.Runtime.Remoting.Messaging; 5 6 using System.Text; 6 7 using System.Threading; … … 13 14 using HeuristicLab.Algorithms.GrammaticalOptimization; 14 15 using HeuristicLab.Problems.GrammaticalOptimization; 16 using HeuristicLab.Problems.GrammaticalOptimization.SymbReg; 15 17 using BoltzmannExplorationPolicy = HeuristicLab.Algorithms.Bandits.BanditPolicies.BoltzmannExplorationPolicy; 16 18 using EpsGreedyPolicy = HeuristicLab.Algorithms.Bandits.BanditPolicies.EpsGreedyPolicy; … … 25 27 26 28 //RunDemo(); 27 //RunGpDemo();29 RunGpDemo(); 28 30 // RunGridTest(); 29 RunGpGridTest();31 //RunGpGridTest(); 30 32 } 31 33 … … 297 299 const int maxIterations = 100000; 298 300 301 //var prob = new SymbolicRegressionProblem(new Random(31415), "Tower"); 302 var prob = new SymbolicRegressionPoly10Problem(); 303 var sgp = new OffspringSelectionGP(prob, new Random(seed), true); 304 RunGP(sgp, prob, 200000, 500, 0.15, 50); 299 305 } 300 306 … … 303 309 const int nReps = 20; 304 310 const int seed = 31415; 305 const int maxIters = 100000;311 const int maxIters = 200000; 306 312 var rand = new Random(seed); 307 313 var problemFactories = new Func<ISymbolicExpressionTreeProblem>[] 308 314 { 315 () => new SymbolicRegressionPoly10Problem(), 309 316 () => new SantaFeAntProblem(), 310 () => new SymbolicRegressionPoly10Problem(), 311 }; 312 foreach (var popSize in new int[] { 50, 100, 250, 500, 1000, 2500, 5000 }) { 313 foreach (var mutationRate in new double[] {/* 0.05, 0.10, */ 0.15, /* 0.25, 0.3 */ }) { 314 foreach (var maxSize in new int[] { 30, 50, 100 }) { 315 foreach (var problemFactory in problemFactories) 316 for (int i = 0; i < nReps; i++) { 317 var solverSeed = rand.Next(); 318 { 319 var prob = problemFactory(); 320 RunStandardGP(prob, solverSeed, maxIters, popSize, mutationRate, maxSize); 317 }; 318 foreach (var popSize in new int[] { 50, 100, 250, 500, 1000, 2500, 5000, 10000 }) { 319 foreach (var mutationRate in new double[] { /* 0.05, /* 0.10, */ 0.15, /* 0.25, 0.3 */}) { 320 foreach (var maxSize in new int[] { 30, 50, 100, 150, 250 }) { 321 // skip experiments that are already done 322 if (popSize == 10000 || maxSize == 150 || maxSize == 250) { 323 foreach (var problemFactory in problemFactories) 324 for (int i = 0; i < nReps; i++) { 325 var solverSeed = rand.Next(); 326 { 327 var prob = problemFactory(); 328 var sgp = new StandardGP(prob, new Random(solverSeed)); 329 RunGP(sgp, prob, maxIters, popSize, mutationRate, maxSize); 330 } 331 // { 332 // var prob = problemFactory(); 333 // var osgp = new OffspringSelectionGP(prob, new Random(solverSeed)); 334 // RunGP(osgp, prob, maxIters, popSize, mutationRate, maxSize); 335 // } 321 336 } 322 { 323 var prob = problemFactory(); 324 RunOSGP(prob, solverSeed, maxIters, popSize, mutationRate, maxSize); 325 } 326 } 337 } 327 338 } 328 339 } … … 330 341 } 331 342 332 private static void Run StandardGP(ISymbolicExpressionTreeProblem prob, int solverSeed, int maxIters, int popSize, double mutationRate, int maxSize) {343 private static void RunGP(IGPSolver gp, ISymbolicExpressionTreeProblem prob, int maxIters, int popSize, double mutationRate, int maxSize) { 333 344 int iterations = 0; 334 345 var globalStatistics = new SentenceSetStatistics(prob.BestKnownQuality(maxSize)); 335 336 var gp = new StandardGP(prob, new Random(solverSeed));346 var gpName = gp.GetType().Name; 347 var probName = prob.GetType().Name; 337 348 gp.SolutionEvaluated += (sentence, quality) => { 338 349 iterations++; 339 350 globalStatistics.AddSentence(sentence, quality); 340 351 341 if (iterations % 1000 0== 0) {342 Console.WriteLine("\"{0,25}\" \"{1,25}\" {2}", gp, prob, globalStatistics);352 if (iterations % 1000 == 0) { 353 Console.WriteLine("\"{0,25}\" {1} {2:N2} {3} \"{4,25}\" {5}", gpName, popSize, mutationRate, maxSize, probName, globalStatistics); 343 354 } 344 355 }; … … 349 360 gp.MaxSolutionDepth = maxSize + 2; 350 361 351 var sw = new Stopwatch();352 353 sw.Start();354 362 gp.Run(maxIters); 355 sw.Stop();356 357 Console.WriteLine("\"{0,25}\" \"{1,25}\" {2}", gp, prob, globalStatistics);358 359 // Console.WriteLine("{0:F2} sec {1,10:F1} sols/sec {2,10:F1} ns/sol",360 // sw.Elapsed.TotalSeconds,361 // maxIters / (double)sw.Elapsed.TotalSeconds,362 // (double)sw.ElapsedMilliseconds * 1000 / maxIters);363 }364 365 private static void RunOSGP(ISymbolicExpressionTreeProblem prob, int solverSeed, int maxIters, int popSize, double mutationRate, int maxSize) {366 int iterations = 0;367 var globalStatistics = new SentenceSetStatistics(prob.BestKnownQuality(maxSize));368 369 var gp = new OffspringSelectionGP(prob, new Random(solverSeed));370 gp.SolutionEvaluated += (sentence, quality) => {371 iterations++;372 globalStatistics.AddSentence(sentence, quality);373 374 if (iterations % 10000 == 0) {375 Console.WriteLine("\"{0,25}\" \"{1,25}\" {2}", gp, prob, globalStatistics);376 }377 };378 379 gp.PopulationSize = popSize;380 gp.MutationRate = mutationRate;381 gp.MaxSolutionSize = maxSize + 2;382 gp.MaxSolutionDepth = maxSize + 2;383 384 var sw = new Stopwatch();385 386 sw.Start();387 gp.Run(maxIters);388 sw.Stop();389 390 Console.WriteLine("\"{0,25}\" \"{1,25}\" {2}", gp, prob, globalStatistics);391 392 // Console.WriteLine("{0:F2} sec {1,10:F1} sols/sec {2,10:F1} ns/sol",393 // sw.Elapsed.TotalSeconds,394 // maxIters / (double)sw.Elapsed.TotalSeconds,395 // (double)sw.ElapsedMilliseconds * 1000 / maxIters);396 363 } 397 364 } -
branches/HeuristicLab.Problems.GrammaticalOptimization/Test/TestSolvers.cs
r11851 r11895 2 2 using HeuristicLab.Algorithms.GeneticProgramming; 3 3 using HeuristicLab.Algorithms.GrammaticalOptimization; 4 using HeuristicLab.Problems.GrammaticalOptimization.SymbReg; 4 5 using Microsoft.VisualStudio.TestTools.UnitTesting; 5 6 … … 154 155 155 156 [TestMethod] 156 public void TestOSGP() { 157 var prob = new SymbolicRegressionPoly10Problem(); 157 public void TestOSGP() 158 { 159 const int seed = 31415; 160 var prob = new SymbolicRegressionProblem(new Random(seed), "Tower"); 158 161 var rand = new Random(31415); 159 162 var osgp = new OffspringSelectionGP(prob, rand); 160 osgp.PopulationSize = 1000;161 osgp.MaxSolutionSize = 50;163 osgp.PopulationSize = 500; 164 osgp.MaxSolutionSize = 100; 162 165 osgp.MaxSolutionDepth = 20; 163 166 … … 166 169 167 170 osgp.FoundNewBestSolution += (sentence, quality) => { 168 Assert.Inconclusive(string.Format("{0:N3} {1}", quality, sentence));171 // Assert.Inconclusive(string.Format("{0:N3} {1}", quality, sentence)); 169 172 bestSentence = sentence; 170 173 bestQuality = quality; -
branches/HeuristicLab.Problems.GrammaticalOptimization/Test/TestSymbRegInstances.cs
r11832 r11895 1 1 using System; 2 2 using System.Linq; 3 using HeuristicLab.Algorithms.GeneticProgramming; 3 4 using HeuristicLab.Algorithms.GrammaticalOptimization; 4 5 using HeuristicLab.Problems.GrammaticalOptimization.SymbReg; … … 13 14 14 15 var problem = new SymbolicRegressionProblem(new Random(), "Tower"); 15 16 double r2; 17 Assert.AreEqual(problem.Evaluate("a*b"), problem.OptimizeConstantsAndEvaluate("a*b")); 18 Assert.AreEqual(problem.OptimizeConstantsAndEvaluate("a*b"), problem.Evaluate("a*b")); 19 Assert.AreEqual(problem.OptimizeConstantsAndEvaluate("0*a*b"), problem.Evaluate("a*b"), 1E-6); 20 Assert.AreEqual(problem.OptimizeConstantsAndEvaluate("0*a*b+1"), problem.Evaluate("a*b"), 1E-6); 21 Assert.IsTrue(problem.OptimizeConstantsAndEvaluate("0*a+b") >= problem.Evaluate("a+b")); 22 Assert.AreEqual(problem.OptimizeConstantsAndEvaluate("0*a+0*b"), problem.Evaluate("a+b"), 1E-6); 23 Assert.IsTrue(problem.OptimizeConstantsAndEvaluate("0*a+1*b") >= problem.Evaluate("a+b")); 16 24 } 17 25 }
Note: See TracChangeset
for help on using the changeset viewer.