Changeset 8331 for branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis
- Timestamp:
- 07/26/12 09:51:13 (12 years ago)
- Location:
- branches/ScatterSearch (trunk integration)
- Files:
-
- 1 deleted
- 54 edited
- 14 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/ScatterSearch (trunk integration)
- Property svn:ignore
-
old new 21 21 protoc.exe 22 22 _ReSharper.HeuristicLab 3.3 Tests 23 Google.ProtocolBuffers-2.4.1.473.dll
-
- Property svn:mergeinfo changed
- Property svn:ignore
-
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/CSV/ClassifiactionCSVInstanceProvider.cs
r8086 r8331 20 20 #endregion 21 21 22 23 22 using System; 24 23 using System.Collections.Generic; 24 using System.IO; 25 using System.Linq; 26 using System.Text; 25 27 using HeuristicLab.Problems.DataAnalysis; 28 26 29 namespace HeuristicLab.Problems.Instances.DataAnalysis { 27 30 public class ClassificationCSVInstanceProvider : ClassificationInstanceProvider { 28 31 public override string Name { 29 get { return "CSV Problem Provider"; }32 get { return "CSV File"; } 30 33 } 31 34 public override string Description { … … 48 51 throw new NotImplementedException(); 49 52 } 53 54 public override bool CanImportData { 55 get { return true; } 56 } 57 public override IClassificationProblemData ImportData(string path) { 58 TableFileParser csvFileParser = new TableFileParser(); 59 60 csvFileParser.Parse(path); 61 62 Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); 63 string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last(); 64 IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar)); 65 66 ClassificationProblemData claData = new ClassificationProblemData(dataset, allowedInputVars, targetVar); 67 68 int trainingPartEnd = csvFileParser.Rows * 2 / 3; 69 claData.TrainingPartition.Start = 0; 70 claData.TrainingPartition.End = trainingPartEnd; 71 claData.TestPartition.Start = trainingPartEnd; 72 claData.TestPartition.End = csvFileParser.Rows; 73 int pos = path.LastIndexOf('\\'); 74 if (pos < 0) 75 claData.Name = path; 76 else { 77 pos++; 78 claData.Name = path.Substring(pos, path.Length - pos); 79 } 80 81 return claData; 82 } 83 84 public override bool CanExportData { 85 get { return true; } 86 } 87 public override void ExportData(IClassificationProblemData instance, string path) { 88 StringBuilder strBuilder = new StringBuilder(); 89 90 foreach (var variable in instance.InputVariables) { 91 strBuilder.Append(variable + ";"); 92 } 93 strBuilder.Remove(strBuilder.Length - 1, 1); 94 strBuilder.AppendLine(); 95 96 Dataset dataset = instance.Dataset; 97 98 for (int i = 0; i < dataset.Rows; i++) { 99 for (int j = 0; j < dataset.Columns; j++) { 100 strBuilder.Append(dataset.GetValue(i, j) + ";"); 101 } 102 strBuilder.Remove(strBuilder.Length - 1, 1); 103 strBuilder.AppendLine(); 104 } 105 106 using (StreamWriter writer = new StreamWriter(path)) { 107 writer.Write(strBuilder); 108 } 109 } 50 110 } 51 111 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/ClassificationInstanceProvider.cs
r8086 r8331 20 20 #endregion 21 21 22 using System;23 using System.Collections;24 using System.Collections.Generic;25 using System.IO;26 using System.Linq;27 using System.Text;28 22 using HeuristicLab.Problems.DataAnalysis; 29 23 30 24 namespace HeuristicLab.Problems.Instances.DataAnalysis { 31 public abstract class ClassificationInstanceProvider : IProblemInstanceProvider<IClassificationProblemData> { 32 public IClassificationProblemData LoadData(string path) { 33 TableFileParser csvFileParser = new TableFileParser(); 34 35 csvFileParser.Parse(path); 36 37 Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); 38 string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last(); 39 IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar)); 40 41 ClassificationProblemData claData = new ClassificationProblemData(dataset, allowedInputVars, targetVar); 42 43 int trainingPartEnd = csvFileParser.Rows * 2 / 3; 44 claData.TrainingPartition.Start = 0; 45 claData.TrainingPartition.End = trainingPartEnd; 46 claData.TestPartition.Start = trainingPartEnd; 47 claData.TestPartition.End = csvFileParser.Rows; 48 int pos = path.LastIndexOf('\\'); 49 if (pos < 0) 50 claData.Name = path; 51 else { 52 pos++; 53 claData.Name = path.Substring(pos, path.Length - pos); 54 } 55 56 return claData; 57 } 58 59 public void SaveData(IClassificationProblemData instance, string path) { 60 StringBuilder strBuilder = new StringBuilder(); 61 62 foreach (var variable in instance.InputVariables) { 63 strBuilder.Append(variable + ";"); 64 } 65 strBuilder.Remove(strBuilder.Length - 1, 1); 66 strBuilder.AppendLine(); 67 68 Dataset dataset = instance.Dataset; 69 70 for (int i = 0; i < dataset.Rows; i++) { 71 for (int j = 0; j < dataset.Columns; j++) { 72 strBuilder.Append(dataset.GetValue(i, j) + ";"); 73 } 74 strBuilder.Remove(strBuilder.Length - 1, 1); 75 strBuilder.AppendLine(); 76 } 77 78 using (StreamWriter writer = new StreamWriter(path)) { 79 writer.Write(strBuilder); 80 } 81 } 82 83 public abstract IEnumerable<IDataDescriptor> GetDataDescriptors(); 84 public abstract IClassificationProblemData LoadData(IDataDescriptor descriptor); 85 86 public abstract string Name { get; } 87 public abstract string Description { get; } 88 public abstract Uri WebLink { get; } 89 public abstract string ReferencePublication { get; } 25 public abstract class ClassificationInstanceProvider : ProblemInstanceProvider<IClassificationProblemData> { 90 26 } 91 27 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/RealWorld/ClassificationRealWorldInstanceProvider.cs
r8086 r8331 47 47 public override IEnumerable<IDataDescriptor> GetDataDescriptors() { 48 48 List<IDataDescriptor> descriptorList = new List<IDataDescriptor>(); 49 descriptorList.Add(new Iris());50 49 descriptorList.Add(new Mammography()); 51 50 var solutionsArchiveName = GetResourceName(FileName + @"\.zip"); -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Clustering/CSV/ClusteringCSVInstanceProvider.cs
r8086 r8331 22 22 using System; 23 23 using System.Collections.Generic; 24 using System.IO; 25 using System.Text; 24 26 using HeuristicLab.Problems.DataAnalysis; 25 27 … … 27 29 public class ClusteringCSVInstanceProvider : ClusteringInstanceProvider { 28 30 public override string Name { 29 get { return "CSV Problem Provider"; }31 get { return "CSV File"; } 30 32 } 31 33 public override string Description { … … 48 50 throw new NotImplementedException(); 49 51 } 52 53 public override bool CanImportData { 54 get { return true; } 55 } 56 public override IClusteringProblemData ImportData(string path) { 57 var csvFileParser = new TableFileParser(); 58 59 csvFileParser.Parse(path); 60 61 var dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); 62 var claData = new ClusteringProblemData(dataset, dataset.DoubleVariables); 63 64 int trainingPartEnd = csvFileParser.Rows * 2 / 3; 65 claData.TrainingPartition.Start = 0; 66 claData.TrainingPartition.End = trainingPartEnd; 67 claData.TestPartition.Start = trainingPartEnd; 68 claData.TestPartition.End = csvFileParser.Rows; 69 int pos = path.LastIndexOf('\\'); 70 if (pos < 0) 71 claData.Name = path; 72 else { 73 pos++; 74 claData.Name = path.Substring(pos, path.Length - pos); 75 } 76 77 return claData; 78 } 79 80 public override bool CanExportData { 81 get { return true; } 82 } 83 public override void ExportData(IClusteringProblemData instance, string path) { 84 var strBuilder = new StringBuilder(); 85 86 foreach (var variable in instance.InputVariables) { 87 strBuilder.Append(variable + ";"); 88 } 89 strBuilder.Remove(strBuilder.Length - 1, 1); 90 strBuilder.AppendLine(); 91 92 var dataset = instance.Dataset; 93 94 for (int i = 0; i < dataset.Rows; i++) { 95 for (int j = 0; j < dataset.Columns; j++) { 96 strBuilder.Append(dataset.GetValue(i, j) + ";"); 97 } 98 strBuilder.Remove(strBuilder.Length - 1, 1); 99 strBuilder.AppendLine(); 100 } 101 102 using (var writer = new StreamWriter(path)) { 103 writer.Write(strBuilder); 104 } 105 } 50 106 } 51 107 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Clustering/ClusteringInstanceProvider.cs
r8086 r8331 20 20 #endregion 21 21 22 using System;23 using System.Collections.Generic;24 using System.IO;25 using System.Text;26 22 using HeuristicLab.Problems.DataAnalysis; 27 23 28 24 namespace HeuristicLab.Problems.Instances.DataAnalysis { 29 public abstract class ClusteringInstanceProvider : IProblemInstanceProvider<IClusteringProblemData> { 30 public IClusteringProblemData LoadData(string path) { 31 var csvFileParser = new TableFileParser(); 32 33 csvFileParser.Parse(path); 34 35 var dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); 36 var claData = new ClusteringProblemData(dataset, dataset.DoubleVariables); 37 38 int trainingPartEnd = csvFileParser.Rows * 2 / 3; 39 claData.TrainingPartition.Start = 0; 40 claData.TrainingPartition.End = trainingPartEnd; 41 claData.TestPartition.Start = trainingPartEnd; 42 claData.TestPartition.End = csvFileParser.Rows; 43 int pos = path.LastIndexOf('\\'); 44 if (pos < 0) 45 claData.Name = path; 46 else { 47 pos++; 48 claData.Name = path.Substring(pos, path.Length - pos); 49 } 50 51 return claData; 52 } 53 54 public void SaveData(IClusteringProblemData instance, string path) { 55 var strBuilder = new StringBuilder(); 56 57 foreach (var variable in instance.InputVariables) { 58 strBuilder.Append(variable + ";"); 59 } 60 strBuilder.Remove(strBuilder.Length - 1, 1); 61 strBuilder.AppendLine(); 62 63 var dataset = instance.Dataset; 64 65 for (int i = 0; i < dataset.Rows; i++) { 66 for (int j = 0; j < dataset.Columns; j++) { 67 strBuilder.Append(dataset.GetValue(i, j) + ";"); 68 } 69 strBuilder.Remove(strBuilder.Length - 1, 1); 70 strBuilder.AppendLine(); 71 } 72 73 using (var writer = new StreamWriter(path)) { 74 writer.Write(strBuilder); 75 } 76 } 77 78 public abstract IEnumerable<IDataDescriptor> GetDataDescriptors(); 79 public abstract IClusteringProblemData LoadData(IDataDescriptor descriptor); 80 81 public abstract string Name { get; } 82 public abstract string Description { get; } 83 public abstract Uri WebLink { get; } 84 public abstract string ReferencePublication { get; } 25 public abstract class ClusteringInstanceProvider : ProblemInstanceProvider<IClusteringProblemData> { 85 26 } 86 27 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/HeuristicLab.Problems.Instances.DataAnalysis-3.3.csproj
r8086 r8331 130 130 <Compile Include="Regression\Keijzer\KeijzerFunctionFourteen.cs" /> 131 131 <Compile Include="Regression\Keijzer\KeijzerFunctionEleven.cs" /> 132 <Compile Include="Regression\Keijzer\KeijzerFunctionNine.cs" />133 132 <Compile Include="Regression\Keijzer\KeijzerFunctionFive.cs" /> 134 133 <Compile Include="Regression\Keijzer\KeijzerFunctionEight.cs" /> 135 134 <Compile Include="Regression\Keijzer\KeijzerFunctionFifteen.cs" /> 136 135 <Compile Include="Regression\Keijzer\KeijzerFunctionFour.cs" /> 136 <Compile Include="Regression\Keijzer\KeijzerFunctionNine.cs" /> 137 <Compile Include="Regression\Keijzer\KeijzerFunctionOne.cs" /> 137 138 <Compile Include="Regression\Keijzer\KeijzerFunctionSeven.cs" /> 138 139 <Compile Include="Regression\Keijzer\KeijzerFunctionSix.cs" /> 139 <Compile Include="Regression\Keijzer\KeijzerFunction Sixteen.cs" />140 <Compile Include="Regression\Keijzer\KeijzerFunctionTen.cs" /> 140 141 <Compile Include="Regression\Keijzer\KeijzerFunctionThirteen.cs" /> 142 <Compile Include="Regression\Keijzer\KeijzerFunctionThree.cs" /> 141 143 <Compile Include="Regression\Keijzer\KeijzerFunctionTwelve.cs" /> 144 <Compile Include="Regression\Keijzer\KeijzerFunctionTwo.cs" /> 142 145 <Compile Include="Regression\Keijzer\KeijzerInstanceProvider.cs" /> 143 146 <Compile Include="Regression\Korns\KornsFunctionEight.cs" /> -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Plugin.cs.frame
r8086 r8331 23 23 24 24 namespace HeuristicLab.Problems.Instances.DataAnalysis { 25 [Plugin("HeuristicLab.Problems.Instances.DataAnalysis", "3.3. 6.$WCREV$")]25 [Plugin("HeuristicLab.Problems.Instances.DataAnalysis", "3.3.7.$WCREV$")] 26 26 [PluginFile("HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll", PluginFileType.Assembly)] 27 [PluginDependency("HeuristicLab.Common", "3.3")]28 27 [PluginDependency("HeuristicLab.Core", "3.3")] 29 28 [PluginDependency("HeuristicLab.Data", "3.3")] -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Properties/AssemblyInfo.cs.frame
r8086 r8331 55 55 // [assembly: AssemblyVersion("1.0.*")] 56 56 [assembly: AssemblyVersion("3.3.0.0")] 57 [assembly: AssemblyFileVersion("3.3. 6.$WCREV$")]57 [assembly: AssemblyFileVersion("3.3.7.$WCREV$")] -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/CSV/RegressionCSVInstanceProvider.cs
r8086 r8331 20 20 #endregion 21 21 22 23 22 using System; 24 23 using System.Collections.Generic; 24 using System.IO; 25 using System.Linq; 26 using System.Text; 25 27 using HeuristicLab.Problems.DataAnalysis; 28 26 29 namespace HeuristicLab.Problems.Instances.DataAnalysis { 27 30 public class RegressionCSVInstanceProvider : RegressionInstanceProvider { 28 31 public override string Name { 29 get { return "CSV Problem Provider"; }32 get { return "CSV File"; } 30 33 } 31 34 public override string Description { … … 44 47 return new List<IDataDescriptor>(); 45 48 } 46 47 49 public override IRegressionProblemData LoadData(IDataDescriptor descriptor) { 48 50 throw new NotImplementedException(); 49 51 } 52 53 public override bool CanImportData { 54 get { return true; } 55 } 56 public override IRegressionProblemData ImportData(string path) { 57 TableFileParser csvFileParser = new TableFileParser(); 58 csvFileParser.Parse(path); 59 60 Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); 61 string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last(); 62 63 IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar)); 64 65 IRegressionProblemData regData = new RegressionProblemData(dataset, allowedInputVars, targetVar); 66 67 int trainingPartEnd = csvFileParser.Rows * 2 / 3; 68 regData.TrainingPartition.Start = 0; 69 regData.TrainingPartition.End = trainingPartEnd; 70 regData.TestPartition.Start = trainingPartEnd; 71 regData.TestPartition.End = csvFileParser.Rows; 72 73 int pos = path.LastIndexOf('\\'); 74 if (pos < 0) 75 regData.Name = path; 76 else { 77 pos++; 78 regData.Name = path.Substring(pos, path.Length - pos); 79 } 80 return regData; 81 } 82 83 public override bool CanExportData { 84 get { return true; } 85 } 86 public override void ExportData(IRegressionProblemData instance, string path) { 87 StringBuilder strBuilder = new StringBuilder(); 88 89 foreach (var variable in instance.InputVariables) { 90 strBuilder.Append(variable + ";"); 91 } 92 strBuilder.Remove(strBuilder.Length - 1, 1); 93 strBuilder.AppendLine(); 94 95 Dataset dataset = instance.Dataset; 96 97 for (int i = 0; i < dataset.Rows; i++) { 98 for (int j = 0; j < dataset.Columns; j++) { 99 strBuilder.Append(dataset.GetValue(i, j) + ";"); 100 } 101 strBuilder.Remove(strBuilder.Length - 1, 1); 102 strBuilder.AppendLine(); 103 } 104 105 using (StreamWriter writer = new StreamWriter(path)) { 106 writer.Write(strBuilder); 107 } 108 } 50 109 } 51 110 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Keijzer/KeijzerFunctionNine.cs
r8086 r8331 27 27 public class KeijzerFunctionNine : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Keijzer 9 f(x) = sqrt(x)"; } }29 public override string Name { get { return "Keijzer 9 f(x) = arcsinh(x) i.e. ln(x + sqrt(x² + 1))"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Improving Symbolic Regression with Interval Arithmetic and Linear Scaling" + Environment.NewLine 33 33 + "Authors: Maarten Keijzer" + Environment.NewLine 34 + "Function: f(x) = sqrt(x)" + Environment.NewLine34 + "Function: f(x) = arcsinh(x) i.e. ln(x + sqrt(x² + 1))" + Environment.NewLine 35 35 + "range(train): x = [0:1:100]" + Environment.NewLine 36 36 + "range(test): x = [0:0.1:100]" + Environment.NewLine … … 42 42 protected override string[] AllowedInputVariables { get { return new string[] { "X" }; } } 43 43 protected override int TrainingPartitionStart { get { return 0; } } 44 protected override int TrainingPartitionEnd { get { return 10 1; } }45 protected override int TestPartitionStart { get { return 10 1; } }46 protected override int TestPartitionEnd { get { return 110 2; } }44 protected override int TrainingPartitionEnd { get { return 100; } } 45 protected override int TestPartitionStart { get { return 100; } } 46 protected override int TestPartitionEnd { get { return 1100; } } 47 47 48 48 protected override List<List<double>> GenerateValues() { … … 55 55 for (int i = 0; i < data[0].Count; i++) { 56 56 x = data[0][i]; 57 results.Add(Math. Sqrt(x));57 results.Add(Math.Log(x + Math.Sqrt(x*x + 1))); 58 58 } 59 59 data.Add(results); -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Keijzer/KeijzerInstanceProvider.cs
r8086 r8331 32 32 } 33 33 public override Uri WebLink { 34 get { return new Uri("http:// groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }34 get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Keijzer"); } 35 35 } 36 36 public override string ReferencePublication { 37 get { return " "; }37 get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; } 38 38 } 39 39 40 40 public override IEnumerable<IDataDescriptor> GetDataDescriptors() { 41 41 List<IDataDescriptor> descriptorList = new List<IDataDescriptor>(); 42 descriptorList.Add(new KeijzerFunctionOne()); 43 descriptorList.Add(new KeijzerFunctionTwo()); 44 descriptorList.Add(new KeijzerFunctionThree()); 42 45 descriptorList.Add(new KeijzerFunctionFour()); 43 46 descriptorList.Add(new KeijzerFunctionFive()); … … 46 49 descriptorList.Add(new KeijzerFunctionEight()); 47 50 descriptorList.Add(new KeijzerFunctionNine()); 51 descriptorList.Add(new KeijzerFunctionTen()); 48 52 descriptorList.Add(new KeijzerFunctionEleven()); 49 53 descriptorList.Add(new KeijzerFunctionTwelve()); … … 51 55 descriptorList.Add(new KeijzerFunctionFourteen()); 52 56 descriptorList.Add(new KeijzerFunctionFifteen()); 53 descriptorList.Add(new KeijzerFunctionSixteen());54 57 return descriptorList; 55 58 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionEight.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 6.87 + (11 * sqrt(7.23 * X0 * X3 * X4))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 48 47 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 49 48 protected override int TrainingPartitionStart { get { return 0; } } 50 protected override int TrainingPartitionEnd { get { return 5000; } }51 protected override int TestPartitionStart { get { return 5000; } }52 protected override int TestPartitionEnd { get { return 10000; } }49 protected override int TrainingPartitionEnd { get { return 10000; } } 50 protected override int TestPartitionStart { get { return 10000; } } 51 protected override int TestPartitionEnd { get { return 20000; } } 53 52 54 53 protected override List<List<double>> GenerateValues() { 55 54 List<List<double>> data = new List<List<double>>(); 56 for (int i = 0; i < AllowedInputVariables.Count(); i++) { 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); 58 } 55 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 56 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 58 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 59 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 59 60 60 61 double x0, x3, x4; -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionEleven.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 6.87 + (11 * cos(7.23 * X0 * X0 * X0))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFive.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 3.0 + (2.13 * log(X4))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 48 47 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 49 48 protected override int TrainingPartitionStart { get { return 0; } } 50 protected override int TrainingPartitionEnd { get { return 5000; } }51 protected override int TestPartitionStart { get { return 5000; } }52 protected override int TestPartitionEnd { get { return 10000; } }49 protected override int TrainingPartitionEnd { get { return 10000; } } 50 protected override int TestPartitionStart { get { return 10000; } } 51 protected override int TestPartitionEnd { get { return 20000; } } 53 52 54 53 protected override List<List<double>> GenerateValues() { 55 54 List<List<double>> data = new List<List<double>>(); 56 for (int i = 0; i < AllowedInputVariables.Count(); i++) { 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); 58 } 55 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 56 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 58 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 59 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 59 60 60 61 double x4; -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFiveteen.cs
r8086 r8331 27 27 public class KornsFunctionFiveteen : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Korns 15 y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (l og(X2) - tan(X3))))"; } }29 public override string Name { get { return "Korns 15 y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (ln(X2) - tan(X3))))"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 + "Function: y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (log(X2) - tan(X3))))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 34 + "Function: y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (ln(X2) - tan(X3))))" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 48 47 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 49 48 protected override int TrainingPartitionStart { get { return 0; } } 50 protected override int TrainingPartitionEnd { get { return 5000; } }51 protected override int TestPartitionStart { get { return 5000; } }52 protected override int TestPartitionEnd { get { return 10000; } }49 protected override int TrainingPartitionEnd { get { return 10000; } } 50 protected override int TestPartitionStart { get { return 10000; } } 51 protected override int TestPartitionEnd { get { return 20000; } } 53 52 54 53 protected override List<List<double>> GenerateValues() { 55 54 List<List<double>> data = new List<List<double>>(); 56 for (int i = 0; i < AllowedInputVariables.Count(); i++) { 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); 58 } 55 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 56 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 58 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 59 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 59 60 60 61 double x0, x1, x2, x3; -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFour.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = -2.3 + (0.13 * sin(X2))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } } 50 protected override int TestPartitionStart { get { return 5000; } } 51 protected override int TestPartitionEnd { get { return 10000; } } 48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 51 52 52 53 53 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFourteen.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 22.0 + (4.2 * ((cos(X0) - tan(X1)) * (tanh(X2) / sin(X3))))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 52 51 53 52 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionNine.cs
r8086 r8331 27 27 public class KornsFunctionNine : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Korns 9 y = ((sqrt(X0) / log(X1)) * (exp(X2) / square(X3)))"; } }29 public override string Name { get { return "Korns 9 y = ((sqrt(X0) / log(X1)) * (exp(X2) / X3²)"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 + "Function: y = ((sqrt(X0) / log(X1)) * (exp(X2) / square(X3)))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 34 + "Function: y = (sqrt(X0) / log(X1)) * (exp(X2) / X3²)" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 48 47 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 49 48 protected override int TrainingPartitionStart { get { return 0; } } 50 protected override int TrainingPartitionEnd { get { return 5000; } }51 protected override int TestPartitionStart { get { return 5000; } }52 protected override int TestPartitionEnd { get { return 10000; } }49 protected override int TrainingPartitionEnd { get { return 10000; } } 50 protected override int TestPartitionStart { get { return 10000; } } 51 protected override int TestPartitionEnd { get { return 20000; } } 53 52 54 53 protected override List<List<double>> GenerateValues() { 55 54 List<List<double>> data = new List<List<double>>(); 56 for (int i = 0; i < AllowedInputVariables.Count(); i++) { 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); 58 } 55 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 56 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 58 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 59 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 59 60 60 61 double x0, x1, x2, x3; -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionOne.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 1.57 + (24.3 * X3)" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine36 35 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 36 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 37 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 38 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 39 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 40 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 47 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 48 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }49 protected override int TrainingPartitionEnd { get { return 10000; } } 50 protected override int TestPartitionStart { get { return 10000; } } 51 protected override int TestPartitionEnd { get { return 20000; } } 52 52 53 53 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionSeven.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 213.80940889 - (213.80940889 * exp(-0.54723748542 * X0))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 52 51 53 52 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionSix.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 1.3 + (0.13 * sqrt(X0))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 48 47 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 49 48 protected override int TrainingPartitionStart { get { return 0; } } 50 protected override int TrainingPartitionEnd { get { return 5000; } }51 protected override int TestPartitionStart { get { return 5000; } }52 protected override int TestPartitionEnd { get { return 10000; } }49 protected override int TrainingPartitionEnd { get { return 10000; } } 50 protected override int TestPartitionStart { get { return 10000; } } 51 protected override int TestPartitionEnd { get { return 20000; } } 53 52 54 53 protected override List<List<double>> GenerateValues() { 55 54 List<List<double>> data = new List<List<double>>(); 56 for (int i = 0; i < AllowedInputVariables.Count(); i++) { 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); 58 } 55 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper) 56 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 57 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 58 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 59 data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList()); 59 60 60 61 double x0; -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTen.cs
r8086 r8331 27 27 public class KornsFunctionTen : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Korns 10 y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * square(X2))) / ((4.0 * cube(X3)) + (5.0 * quart(X4)))))"; } }29 public override string Name { get { return "Korns 10 y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * X2²)) / ((4.0 * X3³) + (5.0 * X4^4))))"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 + "Function: y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * square(X2))) / ((4.0 * cube(X3)) + (5.0 * quart(X4)))))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 34 + "Function: y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * X2²)) / ((4.0 * X3³) + (5.0 * X4^4))))" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 52 51 53 52 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionThirteen.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 32.0 - (3.0 * ((tan(X0) / tan(X1)) * (tan(X2) / tan(X3))))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 52 51 53 52 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionThree.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 1.57 + (24.3 * X3)" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 52 51 53 52 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTwelve.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 2.0 - (2.1 * (cos(9.8 * X0) * sin(1.3 * X4)))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 52 51 53 52 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTwo.cs
r8086 r8331 33 33 + "Authors: Michael F. Korns" + Environment.NewLine 34 34 + "Function: y = 0.23 + (14.2 * ((X3 + X1) / (3.0 * X4)))" + Environment.NewLine 35 + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine 36 + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine 37 + "Binary Operators: +, -, *, /" + Environment.NewLine 38 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine 35 + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine 36 + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine 37 + "Constants: random finit 64-bit IEEE double" + Environment.NewLine 39 38 + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. " 40 39 + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. " … … 47 46 protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } } 48 47 protected override int TrainingPartitionStart { get { return 0; } } 49 protected override int TrainingPartitionEnd { get { return 5000; } }50 protected override int TestPartitionStart { get { return 5000; } }51 protected override int TestPartitionEnd { get { return 10000; } }48 protected override int TrainingPartitionEnd { get { return 10000; } } 49 protected override int TestPartitionStart { get { return 10000; } } 50 protected override int TestPartitionEnd { get { return 20000; } } 52 51 53 52 protected override List<List<double>> GenerateValues() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsInstanceProvider.cs
r8086 r8331 32 32 } 33 33 public override Uri WebLink { 34 get { return new Uri("http:// groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }34 get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Korns"); } 35 35 } 36 36 public override string ReferencePublication { 37 get { return " "; }37 get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; } 38 38 } 39 39 -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionEight.cs
r8086 r8331 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 34 + "Function: F8 = Sqrt(x)" + Environment.NewLine 35 + "Fitcases: 20 random points ⊆[0, 4]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";35 + "Fitcases: 20 random points in [0, 4]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionEleven.cs
r8086 r8331 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 34 + "Function: F11 = x^y" + Environment.NewLine 35 + "Fitcases: 100 random points ⊆[0, 1]x[0, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";35 + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } … … 42 42 protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } } 43 43 protected override int TrainingPartitionStart { get { return 0; } } 44 protected override int TrainingPartitionEnd { get { return 100; } }44 protected override int TrainingPartitionEnd { get { return 20; } } 45 45 protected override int TestPartitionStart { get { return 500; } } 46 46 protected override int TestPartitionEnd { get { return 1000; } } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionFive.cs
r8086 r8331 27 27 public class NguyenFunctionFive : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F5 = sin(x ^2)cos(x) - 1"; } }29 public override string Name { get { return "Nguyen F5 = sin(x²)cos(x) - 1"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F5 = sin(x ^2)cos(x) - 1" + Environment.NewLine35 + "Fitcases: 20 random points ⊆[-1, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F5 = sin(x²)cos(x) - 1" + Environment.NewLine 35 + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionFour.cs
r8086 r8331 27 27 public class NguyenFunctionFour : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F4 = x^6 + x^5 + x^4 + x ^3 + x^2+ x"; } }29 public override string Name { get { return "Nguyen F4 = x^6 + x^5 + x^4 + x³ + x² + x"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F4 = x^6 + x^5 + x^4 + x ^3 + x^2+ x" + Environment.NewLine35 + "Fitcases: 20 random points ⊆[-1, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F4 = x^6 + x^5 + x^4 + x³ + x² + x" + Environment.NewLine 35 + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionNine.cs
r8086 r8331 27 27 public class NguyenFunctionNine : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F9 = sin(x) + sin(y ^2)"; } }29 public override string Name { get { return "Nguyen F9 = sin(x) + sin(y²)"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F9 = sin(x) + sin(y ^2)" + Environment.NewLine35 + "Fitcases: 100 random points ⊆[0, 1]x[0, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F9 = sin(x) + sin(y²)" + Environment.NewLine 35 + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } … … 42 42 protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } } 43 43 protected override int TrainingPartitionStart { get { return 0; } } 44 protected override int TrainingPartitionEnd { get { return 100; } }44 protected override int TrainingPartitionEnd { get { return 20; } } 45 45 protected override int TestPartitionStart { get { return 500; } } 46 46 protected override int TestPartitionEnd { get { return 1000; } } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionOne.cs
r8086 r8331 27 27 public class NguyenFunctionOne : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F1 = x ^3 + x^2+ x"; } }29 public override string Name { get { return "Nguyen F1 = x³ + x² + x"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F1 = x ^3 + x^2+ x" + Environment.NewLine35 + "Fitcases: 20 random points ⊆[-1, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F1 = x³ + x² + x" + Environment.NewLine 35 + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionSeven.cs
r8086 r8331 27 27 public class NguyenFunctionSeven : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F7 = log(x + 1) + log(x ^2+ 1)"; } }29 public override string Name { get { return "Nguyen F7 = log(x + 1) + log(x² + 1)"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F7 = l og(x + 1) + log(x^2+ 1)" + Environment.NewLine35 + "Fitcases: 20 random points ⊆[0, 2]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F7 = ln(x + 1) + ln(x² + 1)" + Environment.NewLine 35 + "Fitcases: 20 random points in [0, 2]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionSix.cs
r8086 r8331 27 27 public class NguyenFunctionSix : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F6 = sin(x) + sin(x + x ^2)"; } }29 public override string Name { get { return "Nguyen F6 = sin(x) + sin(x + x²)"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F6 = sin(x) + sin(x + x ^2)" + Environment.NewLine35 + "Fitcases: 20 random points ⊆[-1, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F6 = sin(x) + sin(x + x²)" + Environment.NewLine 35 + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTen.cs
r8086 r8331 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 34 + "Function: F10 = 2sin(x)cos(y)" + Environment.NewLine 35 + "Fitcases: 100 random points ⊆[0, 1]x[0, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";35 + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } … … 42 42 protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } } 43 43 protected override int TrainingPartitionStart { get { return 0; } } 44 protected override int TrainingPartitionEnd { get { return 100; } }44 protected override int TrainingPartitionEnd { get { return 20; } } 45 45 protected override int TestPartitionStart { get { return 500; } } 46 46 protected override int TestPartitionEnd { get { return 1000; } } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionThree.cs
r8086 r8331 27 27 public class NguyenFunctionThree : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F3 = x^5 + x^4 + x ^3 + x^2+ x"; } }29 public override string Name { get { return "Nguyen F3 = x^5 + x^4 + x³ + x² + x"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F3 = x^5 + x^4 + x ^3 + x^2+ x" + Environment.NewLine35 + "Fitcases: 20 random points ⊆[-1, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F3 = x^5 + x^4 + x³ + x² + x" + Environment.NewLine 35 + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTwelve.cs
r8086 r8331 27 27 public class NguyenFunctionTwelve : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F12 = x^4 - x ^3 + y^2/2 - y"; } }29 public override string Name { get { return "Nguyen F12 = x^4 - x³ + y²/2 - y"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F12 = x^4 - x ^3 + y^2/2 - y" + Environment.NewLine35 + "Fitcases: 100 random points ⊆[0, 1]x[0, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F12 = x^4 - x³ + y²/2 - y" + Environment.NewLine 35 + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } … … 42 42 protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } } 43 43 protected override int TrainingPartitionStart { get { return 0; } } 44 protected override int TrainingPartitionEnd { get { return 100; } }44 protected override int TrainingPartitionEnd { get { return 20; } } 45 45 protected override int TestPartitionStart { get { return 500; } } 46 46 protected override int TestPartitionEnd { get { return 1000; } } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTwo.cs
r8086 r8331 27 27 public class NguyenFunctionTwo : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Nguyen F2 = x^4 + x ^3 + x^2+ x"; } }29 public override string Name { get { return "Nguyen F2 = x^4 + x³ + x² + x"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine 33 33 + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine 34 + "Function: F2 = x^4 + x ^3 + x^2+ x" + Environment.NewLine35 + "Fitcases: 20 random points ⊆[-1, 1]" + Environment.NewLine36 + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine37 + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";34 + "Function: F2 = x^4 + x³ + x² + x" + Environment.NewLine 35 + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine 36 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 37 + "Terminals: only variables (no random constants)"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenInstanceProvider.cs
r8086 r8331 32 32 } 33 33 public override Uri WebLink { 34 get { return new Uri("http:// groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }34 get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Nguyen_et_al"); } 35 35 } 36 36 public override string ReferencePublication { 37 get { return " "; }37 get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; } 38 38 } 39 39 -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/RegressionInstanceProvider.cs
r8086 r8331 20 20 #endregion 21 21 22 using System;23 using System.Collections;24 using System.Collections.Generic;25 using System.IO;26 using System.Linq;27 using System.Text;28 22 using HeuristicLab.Problems.DataAnalysis; 29 23 30 24 namespace HeuristicLab.Problems.Instances.DataAnalysis { 31 public abstract class RegressionInstanceProvider : IProblemInstanceProvider<IRegressionProblemData> { 32 33 public IRegressionProblemData LoadData(string path) { 34 TableFileParser csvFileParser = new TableFileParser(); 35 csvFileParser.Parse(path); 36 37 Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); 38 string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last(); 39 40 IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar)); 41 42 IRegressionProblemData regData = new RegressionProblemData(dataset, allowedInputVars, targetVar); 43 44 int trainingPartEnd = csvFileParser.Rows * 2 / 3; 45 regData.TrainingPartition.Start = 0; 46 regData.TrainingPartition.End = trainingPartEnd; 47 regData.TestPartition.Start = trainingPartEnd; 48 regData.TestPartition.End = csvFileParser.Rows; 49 50 int pos = path.LastIndexOf('\\'); 51 if (pos < 0) 52 regData.Name = path; 53 else { 54 pos++; 55 regData.Name = path.Substring(pos, path.Length - pos); 56 } 57 return regData; 58 } 59 60 public void SaveData(IRegressionProblemData instance, string path) { 61 StringBuilder strBuilder = new StringBuilder(); 62 63 foreach (var variable in instance.InputVariables) { 64 strBuilder.Append(variable + ";"); 65 } 66 strBuilder.Remove(strBuilder.Length - 1, 1); 67 strBuilder.AppendLine(); 68 69 Dataset dataset = instance.Dataset; 70 71 for (int i = 0; i < dataset.Rows; i++) { 72 for (int j = 0; j < dataset.Columns; j++) { 73 strBuilder.Append(dataset.GetValue(i, j) + ";"); 74 } 75 strBuilder.Remove(strBuilder.Length - 1, 1); 76 strBuilder.AppendLine(); 77 } 78 79 using (StreamWriter writer = new StreamWriter(path)) { 80 writer.Write(strBuilder); 81 } 82 } 83 84 public abstract IEnumerable<IDataDescriptor> GetDataDescriptors(); 85 public abstract IRegressionProblemData LoadData(IDataDescriptor descriptor); 86 87 public abstract string Name { get; } 88 public abstract string Description { get; } 89 public abstract Uri WebLink { get; } 90 public abstract string ReferencePublication { get; } 25 public abstract class RegressionInstanceProvider : ProblemInstanceProvider<IRegressionProblemData> { 91 26 } 92 27 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/ValueGenerator.cs
r8086 r8331 29 29 private static FastRandom rand = new FastRandom(); 30 30 31 /// <summary> 32 /// Generates a sequence of evenly spaced points between start and end (inclusive!). 33 /// </summary> 34 /// <param name="start">The smallest and first value of the sequence.</param> 35 /// <param name="end">The largest and last value of the sequence.</param> 36 /// <param name="stepWidth">The step size between subsequent values.</param> 37 /// <returns>An sequence of values from start to end (inclusive)</returns> 31 38 public static IEnumerable<double> GenerateSteps(double start, double end, double stepWidth) { 32 int steps = (int)Math.Round(((end - start) / stepWidth) + 1); 33 for (int i = 0; i < steps; i++) 34 yield return start + i * stepWidth; 39 if (start > end) throw new ArgumentException("start must be less than or equal end."); 40 if (stepWidth <= 0) throw new ArgumentException("stepwith must be larger than zero.", "stepWidth"); 41 double x = start; 42 while (x <= end) { 43 yield return x; 44 x += stepWidth; 45 } 35 46 } 36 47 37 public static IEnumerable<double> GenerateUniformDistributedValues(int amount, double start, double end) { 38 for (int i = 0; i < amount; i++) 39 yield return rand.NextDouble() * (end - start) + start; 48 /// <summary> 49 /// Generates uniformly distributed values between start and end (inclusive!) 50 /// </summary> 51 /// <param name="n">Number of values to generate.</param> 52 /// <param name="start">The lower value (inclusive)</param> 53 /// <param name="end">The upper value (inclusive)</param> 54 /// <returns>An enumerable including n values in [start, end]</returns> 55 public static IEnumerable<double> GenerateUniformDistributedValues(int n, double start, double end) { 56 for (int i = 0; i < n; i++) { 57 // we need to return a random value including end. 58 // so we cannot use rand.NextDouble() as it returns a value strictly smaller than 1. 59 double r = rand.NextUInt() / (double)uint.MaxValue; // r \in [0,1] 60 yield return r * (end - start) + start; 61 } 40 62 } 41 63 42 public static IEnumerable<double> GenerateNormalDistributedValues(int amount, double mu, double sigma) { 43 for (int i = 0; i < amount; i++) 64 /// <summary> 65 /// Generates normally distributed values sampling from N(mu, sigma) 66 /// </summary> 67 /// <param name="n">Number of values to generate.</param> 68 /// <param name="mu">The mu parameter of the normal distribution</param> 69 /// <param name="sigma">The sigma parameter of the normal distribution</param> 70 /// <returns>An enumerable including n values ~ N(mu, sigma)</returns> 71 public static IEnumerable<double> GenerateNormalDistributedValues(int n, double mu, double sigma) { 72 for (int i = 0; i < n; i++) 44 73 yield return NormalDistributedRandom.NextDouble(rand, mu, sigma); 45 74 } … … 82 111 } 83 112 } 84 85 //recursive approach86 /*public static IEnumerable<IEnumerable<double>> GenerateAllCombinationsOfValuesInLists(List<List<double>> lists) {87 int cur = 0;88 List<double> curCombination = new List<double>();89 List<List<double>> allCombinations = new List<List<double>>();90 for (int i = 0; i < lists.Count; i++) {91 allCombinations.Add(new List<double>());92 }93 if (lists.Count() > cur) {94 foreach (var item in lists[cur]) {95 curCombination.Clear();96 curCombination.Add(item);97 GetCombination(lists, cur + 1, curCombination, allCombinations);98 }99 }100 return allCombinations;101 }102 103 private static void GetCombination(List<List<double>> lists, int cur, List<double> curCombinations, List<List<double>> allCombinations) {104 if (lists.Count > cur) {105 foreach (var item in lists[cur]) {106 if (curCombinations.Count > cur) {107 curCombinations.RemoveAt(cur);108 }109 curCombinations.Add(item);110 GetCombination(lists, cur + 1, curCombinations, allCombinations);111 }112 } else {113 for (int i = 0; i < curCombinations.Count; i++) {114 allCombinations[i].Add(curCombinations[i]);115 }116 }117 } */118 119 //original120 /*public static IEnumerable<IEnumerable<double>> GenerateAllCombinationsOfValuesInLists(List<List<double>> sets) {121 122 var combinations = new List<List<double>>();123 124 foreach (var value in sets[0])125 combinations.Add(new List<double> { value });126 127 foreach (var set in sets.Skip(1))128 combinations = AddListToCombinations(combinations, set);129 130 IEnumerable<IEnumerable<double>> res = (from i in Enumerable.Range(0, sets.Count)131 select (from list in combinations132 select list.ElementAt(i)));133 134 return res;135 }136 137 private static List<List<double>> AddListToCombinations138 (List<List<double>> combinations, List<double> set) {139 var newCombinations = from value in set140 from combination in combinations141 select new List<double>(combination) { value };142 143 return newCombinations.ToList();144 } */145 113 } 146 114 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Various/SpatialCoevolution.cs
r8086 r8331 27 27 public class SpatialCoevolution : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Spatial co-evolution F(x,y) = 1/(1 +power(x,-4)) + 1/(1+pow(y,-4))"; } }29 public override string Name { get { return "Spatial co-evolution F(x,y) = 1/(1 + x^(-4)) + 1/(1 + y^(-4))"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Evolutionary consequences of coevolving targets" + Environment.NewLine 33 33 + "Authors: Ludo Pagie and Paulien Hogeweg" + Environment.NewLine 34 + "Function: F(x,y) = 1/(1+power(x,-4)) + 1/(1+pow(y,-4))" + Environment.NewLine 35 + "Terminal set: x, y" + Environment.NewLine 34 + "Function: F(x,y) = 1/(1 + x^(-4)) + 1/(1 + y^(-4))" + Environment.NewLine 35 + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine 36 + "Terminals: only variables (no random constants)" + Environment.NewLine 36 37 + "The fitness of a solution is defined as the mean of the absolute differences between " 37 38 + "the target function and the solution over all problems on the basis of which it is evaluated. " 38 39 + "A solution is considered completely ’correct’ if, for all 676 problems in the ’complete’ " 39 40 + "problem set used in the static evaluation scheme, the absolute difference between " 40 + "solution and target function is less than 0 :01 (this is a so-called hit).";41 + "solution and target function is less than 0.01 (this is a so-called hit)."; 41 42 } 42 43 } … … 45 46 protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } } 46 47 protected override int TrainingPartitionStart { get { return 0; } } 47 protected override int TrainingPartitionEnd { get { return 1000; } }48 protected override int TestPartitionStart { get { return 1000; } }48 protected override int TrainingPartitionEnd { get { return 676; } } 49 protected override int TestPartitionStart { get { return 676; } } 49 50 protected override int TestPartitionEnd { get { return 1676; } } 50 51 … … 52 53 List<List<double>> data = new List<List<double>>(); 53 54 54 List<double> oneVariableTestData= ValueGenerator.GenerateSteps(-5, 5, 0.4).ToList();55 List<List<double>> t estData = new List<List<double>>() { oneVariableTestData, oneVariableTestData};56 var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(t estData).ToList<IEnumerable<double>>();55 List<double> evenlySpacedSequence = ValueGenerator.GenerateSteps(-5, 5, 0.4).ToList(); 56 List<List<double>> trainingData = new List<List<double>>() { evenlySpacedSequence, evenlySpacedSequence }; 57 var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(trainingData).ToList(); 57 58 58 59 for (int i = 0; i < AllowedInputVariables.Count(); i++) { 59 data.Add( ValueGenerator.GenerateUniformDistributedValues(1000, -5, 5).ToList());60 data[i].AddRange( combinations[i]);60 data.Add(combinations[i].ToList()); 61 data[i].AddRange(ValueGenerator.GenerateUniformDistributedValues(1000, -5, 5).ToList()); 61 62 } 62 63 -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/KotanchekFunction.cs
r8086 r8331 27 27 public class KotanchekFunction : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva Kotanchek"; } }29 public override string Name { get { return "Vladislavleva-1 F1(X1,X2) = exp(-(X1 - 1))² / (1.2 + (X2 -2.5)²"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine 33 33 + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine 34 + "Function: F1(X1, X2) = e ^-(X1 - 1)^2 / (1.2 + (X2 -2.5)^2" + Environment.NewLine34 + "Function: F1(X1, X2) = exp(-(X1 - 1))² / (1.2 + (X2 -2.5)²" + Environment.NewLine 35 35 + "Training Data: 100 points X1, X2 = Rand(0.3, 4)" + Environment.NewLine 36 36 + "Test Data: 2026 points (X1, X2) = (-0.2:0.1:4.2)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sq aure, x^real, x + real, x + real, e^x, e^-x";37 + "Function Set: +, -, *, /, square, e^x, e^-x, x^eps, x + eps, x * eps"; 38 38 } 39 39 } … … 43 43 protected override int TrainingPartitionStart { get { return 0; } } 44 44 protected override int TrainingPartitionEnd { get { return 100; } } 45 protected override int TestPartitionStart { get { return 100 0; } }46 protected override int TestPartitionEnd { get { return 3025; } }45 protected override int TestPartitionStart { get { return 100; } } 46 protected override int TestPartitionEnd { get { return 2126; } } 47 47 48 48 protected override List<List<double>> GenerateValues() { … … 53 53 var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(testData).ToList<IEnumerable<double>>(); 54 54 for (int i = 0; i < AllowedInputVariables.Count(); i++) { 55 data.Add(ValueGenerator.GenerateUniformDistributedValues(100 0, 0.3, 4).ToList());55 data.Add(ValueGenerator.GenerateUniformDistributedValues(100, 0.3, 4).ToList()); 56 56 data[i].AddRange(combinations[i]); 57 57 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RationalPolynomialThreeDimensional.cs
r8086 r8331 27 27 public class RationalPolynomialThreeDimensional : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva RatPol3D"; } }29 public override string Name { get { return "Vladislavleva-5 F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2² * (X1 - 10))"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine 33 33 + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine 34 + "Function: F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2 ^2* (X1 - 10))" + Environment.NewLine34 + "Function: F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2² * (X1 - 10))" + Environment.NewLine 35 35 + "Training Data: 300 points X1, X3 = Rand(0.05, 2), X2 = Rand(1, 2)" + Environment.NewLine 36 36 + "Test Data: 2701 points X1, X3 = (-0.05:0.15:2.1), X2 = (0.95:0.1:2.05)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sq aure, x^real, x + real, x + real";37 + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps"; 38 38 } 39 39 } … … 49 49 List<List<double>> data = new List<List<double>>(); 50 50 51 int amountOfPoints= 1000;52 data.Add(ValueGenerator.GenerateUniformDistributedValues( amountOfPoints, 0.05, 2).ToList());53 data.Add(ValueGenerator.GenerateUniformDistributedValues( amountOfPoints, 1, 2).ToList());54 data.Add(ValueGenerator.GenerateUniformDistributedValues( amountOfPoints, 0.05, 2).ToList());51 int n = 1000; 52 data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 0.05, 2).ToList()); 53 data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 1, 2).ToList()); 54 data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 0.05, 2).ToList()); 55 55 56 56 List<List<double>> testData = new List<List<double>>() { -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RationalPolynomialTwoDimensional.cs
r8086 r8331 27 27 public class RationalPolynomialTwoDimensional : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva RatPol2D"; } }29 public override string Name { get { return "Vladislavleva-8 F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)³ - (X2 -3)) / ((X2 - 2)^4 + 10)"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine 33 33 + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine 34 + "Function: F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3) ^3- (X2 -3)) / ((X2 - 2)^4 + 10)" + Environment.NewLine34 + "Function: F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)³ - (X2 -3)) / ((X2 - 2)^4 + 10)" + Environment.NewLine 35 35 + "Training Data: 50 points X1, X2 = Rand(0.05, 6.05)" + Environment.NewLine 36 36 + "Test Data: 1157 points X1, X2 = (-0.25:0.2:6.35)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sq aure, x^real, x + real, x + real";37 + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RippleFunction.cs
r8086 r8331 27 27 public class RippleFunction : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva Ripple"; } }29 public override string Name { get { return "Vladislavleva-7 F7(X1, X2) = (X1 - 3)(X2 - 3) + 2 * sin((X1 - 4)(X2 - 4))"; } } 30 30 public override string Description { 31 31 get { … … 35 35 + "Training Data: 300 points X1, X2 = Rand(0.05, 6.05)" + Environment.NewLine 36 36 + "Test Data: 1000 points X1, X2 = Rand(-0.25, 6.35)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sq aure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)";37 + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SalutowiczFunctionOneDimensional.cs
r8086 r8331 27 27 public class SalutowiczFunctionOneDimensional : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva Salutowicz"; } }29 public override string Name { get { return "Vladislavleva-2 F2(X) = exp(-X) * X³ * cos(X) * sin(X) * (cos(X)sin(X)² - 1)"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine 33 33 + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine 34 + "Function: F2(X) = e ^-X * X^3 * cos(X) * sin(X) * (cos(X)sin(X)^2- 1)" + Environment.NewLine34 + "Function: F2(X) = exp(-X) * X³ * cos(X) * sin(X) * (cos(X)sin(X)² - 1)" + Environment.NewLine 35 35 + "Training Data: 100 points X = (0.05:0.1:10)" + Environment.NewLine 36 36 + "Test Data: 221 points X = (-0.5:0.05:10.5)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sq aure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)";37 + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SalutowiczFunctionTwoDimensional.cs
r8086 r8331 27 27 public class SalutowiczFunctionTwoDimensional : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva Salutowicz2D"; } }29 public override string Name { get { return "Vladislavleva-3 F3(X1, X2) = exp(-X1) * X1³ * cos(X1) * sin(X1) * (cos(X1)sin(X1)² - 1)(X2 - 5)"; } } 30 30 public override string Description { 31 31 get { 32 32 return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine 33 33 + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine 34 + "Function: F3(X1, X2) = e ^-X1 * X1^3 * cos(X1) * sin(X1) * (cos(X1)sin(X1)^2- 1)(X2 - 5)" + Environment.NewLine34 + "Function: F3(X1, X2) = exp(-X1) * X1³ * cos(X1) * sin(X1) * (cos(X1)sin(X1)² - 1)(X2 - 5)" + Environment.NewLine 35 35 + "Training Data: 601 points X1 = (0.05:0.1:10), X2 = (0.05:2:10.05)" + Environment.NewLine 36 + "Test Data: 2554 points X1 = (-0.5:0.05:10.5), X2 = (-0.5:0.5:10.5)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)" + Environment.NewLine + Environment.NewLine 38 + "Important: The stepwidth of the variable X1 in the test partition has been set to 0.1, to fit the amount of data points."; 36 + "Test Data: 4840 points X1 = (-0.5:0.05:10.5), X2 = (-0.5:0.5:10.5)" + Environment.NewLine 37 + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps"; 39 38 } 40 39 } … … 45 44 protected override int TrainingPartitionEnd { get { return 601; } } 46 45 protected override int TestPartitionStart { get { return 601; } } 47 protected override int TestPartitionEnd { get { return 3155; } }46 protected override int TestPartitionEnd { get { return 5441; } } 48 47 49 48 protected override List<List<double>> GenerateValues() { … … 55 54 56 55 List<List<double>> testData = new List<List<double>>() { 57 ValueGenerator.GenerateSteps(-0.5, 10.5, 0. 1).ToList(),56 ValueGenerator.GenerateSteps(-0.5, 10.5, 0.05).ToList(), 58 57 ValueGenerator.GenerateSteps(-0.5, 10.5, 0.5).ToList() 59 58 }; -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SineCosineFunction.cs
r8086 r8331 27 27 public class SineCosineFunction : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva SineCosine"; } }29 public override string Name { get { return "Vladislavleva-6 F6(X1, X2) = 6 * sin(X1) * cos(X2)"; } } 30 30 public override string Description { 31 31 get { … … 35 35 + "Training Data: 30 points X1, X2 = Rand(0.1, 5.9)" + Environment.NewLine 36 36 + "Test Data: 961 points X1, X2 = (-0.05:0.02:6.05)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sq aure, x^real, x + real, x + real, e^x, e^-x";37 + "Function Set: +, -, *, /, square, e^x, e^-x, x^eps, x + eps, x * eps"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/UnwrappedBallFunctionFiveDimensional.cs
r8086 r8331 27 27 public class UnwrappedBallFunctionFiveDimensional : ArtificialRegressionDataDescriptor { 28 28 29 public override string Name { get { return "Vladislavleva UBall5D"; } }29 public override string Name { get { return "Vladislavleva-4 F4(X1, X2, X3, X4, X5) = 10 / (5 + Sum(Xi - 3)^2)"; } } 30 30 public override string Description { 31 31 get { … … 35 35 + "Training Data: 1024 points Xi = Rand(0.05, 6.05)" + Environment.NewLine 36 36 + "Test Data: 5000 points Xi = Rand(-0.25, 6.35)" + Environment.NewLine 37 + "Function Set: +, -, *, /, sq aure, x^real, x + real, x + real";37 + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps"; 38 38 } 39 39 } -
branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/VladislavlevaInstanceProvider.cs
r8086 r8331 32 32 } 33 33 public override Uri WebLink { 34 get { return new Uri("http:// groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }34 get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Vladislavleva_et_al"); } 35 35 } 36 36 public override string ReferencePublication { 37 get { return " "; }37 get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; } 38 38 } 39 39 … … 41 41 List<IDataDescriptor> descriptorList = new List<IDataDescriptor>(); 42 42 descriptorList.Add(new KotanchekFunction()); 43 descriptorList.Add(new RationalPolynomialTwoDimensional());44 descriptorList.Add(new RationalPolynomialThreeDimensional());45 descriptorList.Add(new RippleFunction());46 43 descriptorList.Add(new SalutowiczFunctionOneDimensional()); 47 44 descriptorList.Add(new SalutowiczFunctionTwoDimensional()); 48 45 descriptorList.Add(new UnwrappedBallFunctionFiveDimensional()); 46 descriptorList.Add(new RationalPolynomialThreeDimensional()); 47 descriptorList.Add(new SineCosineFunction()); 48 descriptorList.Add(new RippleFunction()); 49 descriptorList.Add(new RationalPolynomialTwoDimensional()); 49 50 return descriptorList; 50 51 }
Note: See TracChangeset
for help on using the changeset viewer.