Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
07/26/12 09:51:13 (12 years ago)
Author:
jkarder
Message:

#1331: merged r8086:8330 from trunk

Location:
branches/ScatterSearch (trunk integration)
Files:
1 deleted
54 edited
14 copied

Legend:

Unmodified
Added
Removed
  • branches/ScatterSearch (trunk integration)

  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/CSV/ClassifiactionCSVInstanceProvider.cs

    r8086 r8331  
    2020#endregion
    2121
    22 
    2322using System;
    2423using System.Collections.Generic;
     24using System.IO;
     25using System.Linq;
     26using System.Text;
    2527using HeuristicLab.Problems.DataAnalysis;
     28
    2629namespace HeuristicLab.Problems.Instances.DataAnalysis {
    2730  public class ClassificationCSVInstanceProvider : ClassificationInstanceProvider {
    2831    public override string Name {
    29       get { return "CSV Problem Provider"; }
     32      get { return "CSV File"; }
    3033    }
    3134    public override string Description {
     
    4851      throw new NotImplementedException();
    4952    }
     53
     54    public override bool CanImportData {
     55      get { return true; }
     56    }
     57    public override IClassificationProblemData ImportData(string path) {
     58      TableFileParser csvFileParser = new TableFileParser();
     59
     60      csvFileParser.Parse(path);
     61
     62      Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
     63      string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
     64      IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
     65
     66      ClassificationProblemData claData = new ClassificationProblemData(dataset, allowedInputVars, targetVar);
     67
     68      int trainingPartEnd = csvFileParser.Rows * 2 / 3;
     69      claData.TrainingPartition.Start = 0;
     70      claData.TrainingPartition.End = trainingPartEnd;
     71      claData.TestPartition.Start = trainingPartEnd;
     72      claData.TestPartition.End = csvFileParser.Rows;
     73      int pos = path.LastIndexOf('\\');
     74      if (pos < 0)
     75        claData.Name = path;
     76      else {
     77        pos++;
     78        claData.Name = path.Substring(pos, path.Length - pos);
     79      }
     80
     81      return claData;
     82    }
     83
     84    public override bool CanExportData {
     85      get { return true; }
     86    }
     87    public override void ExportData(IClassificationProblemData instance, string path) {
     88      StringBuilder strBuilder = new StringBuilder();
     89
     90      foreach (var variable in instance.InputVariables) {
     91        strBuilder.Append(variable + ";");
     92      }
     93      strBuilder.Remove(strBuilder.Length - 1, 1);
     94      strBuilder.AppendLine();
     95
     96      Dataset dataset = instance.Dataset;
     97
     98      for (int i = 0; i < dataset.Rows; i++) {
     99        for (int j = 0; j < dataset.Columns; j++) {
     100          strBuilder.Append(dataset.GetValue(i, j) + ";");
     101        }
     102        strBuilder.Remove(strBuilder.Length - 1, 1);
     103        strBuilder.AppendLine();
     104      }
     105
     106      using (StreamWriter writer = new StreamWriter(path)) {
     107        writer.Write(strBuilder);
     108      }
     109    }
    50110  }
    51111}
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/ClassificationInstanceProvider.cs

    r8086 r8331  
    2020#endregion
    2121
    22 using System;
    23 using System.Collections;
    24 using System.Collections.Generic;
    25 using System.IO;
    26 using System.Linq;
    27 using System.Text;
    2822using HeuristicLab.Problems.DataAnalysis;
    2923
    3024namespace HeuristicLab.Problems.Instances.DataAnalysis {
    31   public abstract class ClassificationInstanceProvider : IProblemInstanceProvider<IClassificationProblemData> {
    32     public IClassificationProblemData LoadData(string path) {
    33       TableFileParser csvFileParser = new TableFileParser();
    34 
    35       csvFileParser.Parse(path);
    36 
    37       Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    38       string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
    39       IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
    40 
    41       ClassificationProblemData claData = new ClassificationProblemData(dataset, allowedInputVars, targetVar);
    42 
    43       int trainingPartEnd = csvFileParser.Rows * 2 / 3;
    44       claData.TrainingPartition.Start = 0;
    45       claData.TrainingPartition.End = trainingPartEnd;
    46       claData.TestPartition.Start = trainingPartEnd;
    47       claData.TestPartition.End = csvFileParser.Rows;
    48       int pos = path.LastIndexOf('\\');
    49       if (pos < 0)
    50         claData.Name = path;
    51       else {
    52         pos++;
    53         claData.Name = path.Substring(pos, path.Length - pos);
    54       }
    55 
    56       return claData;
    57     }
    58 
    59     public void SaveData(IClassificationProblemData instance, string path) {
    60       StringBuilder strBuilder = new StringBuilder();
    61 
    62       foreach (var variable in instance.InputVariables) {
    63         strBuilder.Append(variable + ";");
    64       }
    65       strBuilder.Remove(strBuilder.Length - 1, 1);
    66       strBuilder.AppendLine();
    67 
    68       Dataset dataset = instance.Dataset;
    69 
    70       for (int i = 0; i < dataset.Rows; i++) {
    71         for (int j = 0; j < dataset.Columns; j++) {
    72           strBuilder.Append(dataset.GetValue(i, j) + ";");
    73         }
    74         strBuilder.Remove(strBuilder.Length - 1, 1);
    75         strBuilder.AppendLine();
    76       }
    77 
    78       using (StreamWriter writer = new StreamWriter(path)) {
    79         writer.Write(strBuilder);
    80       }
    81     }
    82 
    83     public abstract IEnumerable<IDataDescriptor> GetDataDescriptors();
    84     public abstract IClassificationProblemData LoadData(IDataDescriptor descriptor);
    85 
    86     public abstract string Name { get; }
    87     public abstract string Description { get; }
    88     public abstract Uri WebLink { get; }
    89     public abstract string ReferencePublication { get; }
     25  public abstract class ClassificationInstanceProvider : ProblemInstanceProvider<IClassificationProblemData> {
    9026  }
    9127}
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/RealWorld/ClassificationRealWorldInstanceProvider.cs

    r8086 r8331  
    4747    public override IEnumerable<IDataDescriptor> GetDataDescriptors() {
    4848      List<IDataDescriptor> descriptorList = new List<IDataDescriptor>();
    49       descriptorList.Add(new Iris());
    5049      descriptorList.Add(new Mammography());
    5150      var solutionsArchiveName = GetResourceName(FileName + @"\.zip");
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Clustering/CSV/ClusteringCSVInstanceProvider.cs

    r8086 r8331  
    2222using System;
    2323using System.Collections.Generic;
     24using System.IO;
     25using System.Text;
    2426using HeuristicLab.Problems.DataAnalysis;
    2527
     
    2729  public class ClusteringCSVInstanceProvider : ClusteringInstanceProvider {
    2830    public override string Name {
    29       get { return "CSV Problem Provider"; }
     31      get { return "CSV File"; }
    3032    }
    3133    public override string Description {
     
    4850      throw new NotImplementedException();
    4951    }
     52
     53    public override bool CanImportData {
     54      get { return true; }
     55    }
     56    public override IClusteringProblemData ImportData(string path) {
     57      var csvFileParser = new TableFileParser();
     58
     59      csvFileParser.Parse(path);
     60
     61      var dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
     62      var claData = new ClusteringProblemData(dataset, dataset.DoubleVariables);
     63
     64      int trainingPartEnd = csvFileParser.Rows * 2 / 3;
     65      claData.TrainingPartition.Start = 0;
     66      claData.TrainingPartition.End = trainingPartEnd;
     67      claData.TestPartition.Start = trainingPartEnd;
     68      claData.TestPartition.End = csvFileParser.Rows;
     69      int pos = path.LastIndexOf('\\');
     70      if (pos < 0)
     71        claData.Name = path;
     72      else {
     73        pos++;
     74        claData.Name = path.Substring(pos, path.Length - pos);
     75      }
     76
     77      return claData;
     78    }
     79
     80    public override bool CanExportData {
     81      get { return true; }
     82    }
     83    public override void ExportData(IClusteringProblemData instance, string path) {
     84      var strBuilder = new StringBuilder();
     85
     86      foreach (var variable in instance.InputVariables) {
     87        strBuilder.Append(variable + ";");
     88      }
     89      strBuilder.Remove(strBuilder.Length - 1, 1);
     90      strBuilder.AppendLine();
     91
     92      var dataset = instance.Dataset;
     93
     94      for (int i = 0; i < dataset.Rows; i++) {
     95        for (int j = 0; j < dataset.Columns; j++) {
     96          strBuilder.Append(dataset.GetValue(i, j) + ";");
     97        }
     98        strBuilder.Remove(strBuilder.Length - 1, 1);
     99        strBuilder.AppendLine();
     100      }
     101
     102      using (var writer = new StreamWriter(path)) {
     103        writer.Write(strBuilder);
     104      }
     105    }
    50106  }
    51107}
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Clustering/ClusteringInstanceProvider.cs

    r8086 r8331  
    2020#endregion
    2121
    22 using System;
    23 using System.Collections.Generic;
    24 using System.IO;
    25 using System.Text;
    2622using HeuristicLab.Problems.DataAnalysis;
    2723
    2824namespace HeuristicLab.Problems.Instances.DataAnalysis {
    29   public abstract class ClusteringInstanceProvider : IProblemInstanceProvider<IClusteringProblemData> {
    30     public IClusteringProblemData LoadData(string path) {
    31       var csvFileParser = new TableFileParser();
    32 
    33       csvFileParser.Parse(path);
    34 
    35       var dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    36       var claData = new ClusteringProblemData(dataset, dataset.DoubleVariables);
    37 
    38       int trainingPartEnd = csvFileParser.Rows * 2 / 3;
    39       claData.TrainingPartition.Start = 0;
    40       claData.TrainingPartition.End = trainingPartEnd;
    41       claData.TestPartition.Start = trainingPartEnd;
    42       claData.TestPartition.End = csvFileParser.Rows;
    43       int pos = path.LastIndexOf('\\');
    44       if (pos < 0)
    45         claData.Name = path;
    46       else {
    47         pos++;
    48         claData.Name = path.Substring(pos, path.Length - pos);
    49       }
    50 
    51       return claData;
    52     }
    53 
    54     public void SaveData(IClusteringProblemData instance, string path) {
    55       var strBuilder = new StringBuilder();
    56 
    57       foreach (var variable in instance.InputVariables) {
    58         strBuilder.Append(variable + ";");
    59       }
    60       strBuilder.Remove(strBuilder.Length - 1, 1);
    61       strBuilder.AppendLine();
    62 
    63       var dataset = instance.Dataset;
    64 
    65       for (int i = 0; i < dataset.Rows; i++) {
    66         for (int j = 0; j < dataset.Columns; j++) {
    67           strBuilder.Append(dataset.GetValue(i, j) + ";");
    68         }
    69         strBuilder.Remove(strBuilder.Length - 1, 1);
    70         strBuilder.AppendLine();
    71       }
    72 
    73       using (var writer = new StreamWriter(path)) {
    74         writer.Write(strBuilder);
    75       }
    76     }
    77 
    78     public abstract IEnumerable<IDataDescriptor> GetDataDescriptors();
    79     public abstract IClusteringProblemData LoadData(IDataDescriptor descriptor);
    80 
    81     public abstract string Name { get; }
    82     public abstract string Description { get; }
    83     public abstract Uri WebLink { get; }
    84     public abstract string ReferencePublication { get; }
     25  public abstract class ClusteringInstanceProvider : ProblemInstanceProvider<IClusteringProblemData> {
    8526  }
    8627}
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/HeuristicLab.Problems.Instances.DataAnalysis-3.3.csproj

    r8086 r8331  
    130130    <Compile Include="Regression\Keijzer\KeijzerFunctionFourteen.cs" />
    131131    <Compile Include="Regression\Keijzer\KeijzerFunctionEleven.cs" />
    132     <Compile Include="Regression\Keijzer\KeijzerFunctionNine.cs" />
    133132    <Compile Include="Regression\Keijzer\KeijzerFunctionFive.cs" />
    134133    <Compile Include="Regression\Keijzer\KeijzerFunctionEight.cs" />
    135134    <Compile Include="Regression\Keijzer\KeijzerFunctionFifteen.cs" />
    136135    <Compile Include="Regression\Keijzer\KeijzerFunctionFour.cs" />
     136    <Compile Include="Regression\Keijzer\KeijzerFunctionNine.cs" />
     137    <Compile Include="Regression\Keijzer\KeijzerFunctionOne.cs" />
    137138    <Compile Include="Regression\Keijzer\KeijzerFunctionSeven.cs" />
    138139    <Compile Include="Regression\Keijzer\KeijzerFunctionSix.cs" />
    139     <Compile Include="Regression\Keijzer\KeijzerFunctionSixteen.cs" />
     140    <Compile Include="Regression\Keijzer\KeijzerFunctionTen.cs" />
    140141    <Compile Include="Regression\Keijzer\KeijzerFunctionThirteen.cs" />
     142    <Compile Include="Regression\Keijzer\KeijzerFunctionThree.cs" />
    141143    <Compile Include="Regression\Keijzer\KeijzerFunctionTwelve.cs" />
     144    <Compile Include="Regression\Keijzer\KeijzerFunctionTwo.cs" />
    142145    <Compile Include="Regression\Keijzer\KeijzerInstanceProvider.cs" />
    143146    <Compile Include="Regression\Korns\KornsFunctionEight.cs" />
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Plugin.cs.frame

    r8086 r8331  
    2323
    2424namespace HeuristicLab.Problems.Instances.DataAnalysis {
    25   [Plugin("HeuristicLab.Problems.Instances.DataAnalysis", "3.3.6.$WCREV$")]
     25  [Plugin("HeuristicLab.Problems.Instances.DataAnalysis", "3.3.7.$WCREV$")]
    2626  [PluginFile("HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll", PluginFileType.Assembly)]
    27   [PluginDependency("HeuristicLab.Common", "3.3")]
    2827  [PluginDependency("HeuristicLab.Core", "3.3")]
    2928  [PluginDependency("HeuristicLab.Data", "3.3")]
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Properties/AssemblyInfo.cs.frame

    r8086 r8331  
    5555// [assembly: AssemblyVersion("1.0.*")]
    5656[assembly: AssemblyVersion("3.3.0.0")]
    57 [assembly: AssemblyFileVersion("3.3.6.$WCREV$")]
     57[assembly: AssemblyFileVersion("3.3.7.$WCREV$")]
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/CSV/RegressionCSVInstanceProvider.cs

    r8086 r8331  
    2020#endregion
    2121
    22 
    2322using System;
    2423using System.Collections.Generic;
     24using System.IO;
     25using System.Linq;
     26using System.Text;
    2527using HeuristicLab.Problems.DataAnalysis;
     28
    2629namespace HeuristicLab.Problems.Instances.DataAnalysis {
    2730  public class RegressionCSVInstanceProvider : RegressionInstanceProvider {
    2831    public override string Name {
    29       get { return "CSV Problem Provider"; }
     32      get { return "CSV File"; }
    3033    }
    3134    public override string Description {
     
    4447      return new List<IDataDescriptor>();
    4548    }
    46 
    4749    public override IRegressionProblemData LoadData(IDataDescriptor descriptor) {
    4850      throw new NotImplementedException();
    4951    }
     52
     53    public override bool CanImportData {
     54      get { return true; }
     55    }
     56    public override IRegressionProblemData ImportData(string path) {
     57      TableFileParser csvFileParser = new TableFileParser();
     58      csvFileParser.Parse(path);
     59
     60      Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
     61      string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
     62
     63      IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
     64
     65      IRegressionProblemData regData = new RegressionProblemData(dataset, allowedInputVars, targetVar);
     66
     67      int trainingPartEnd = csvFileParser.Rows * 2 / 3;
     68      regData.TrainingPartition.Start = 0;
     69      regData.TrainingPartition.End = trainingPartEnd;
     70      regData.TestPartition.Start = trainingPartEnd;
     71      regData.TestPartition.End = csvFileParser.Rows;
     72
     73      int pos = path.LastIndexOf('\\');
     74      if (pos < 0)
     75        regData.Name = path;
     76      else {
     77        pos++;
     78        regData.Name = path.Substring(pos, path.Length - pos);
     79      }
     80      return regData;
     81    }
     82
     83    public override bool CanExportData {
     84      get { return true; }
     85    }
     86    public override void ExportData(IRegressionProblemData instance, string path) {
     87      StringBuilder strBuilder = new StringBuilder();
     88
     89      foreach (var variable in instance.InputVariables) {
     90        strBuilder.Append(variable + ";");
     91      }
     92      strBuilder.Remove(strBuilder.Length - 1, 1);
     93      strBuilder.AppendLine();
     94
     95      Dataset dataset = instance.Dataset;
     96
     97      for (int i = 0; i < dataset.Rows; i++) {
     98        for (int j = 0; j < dataset.Columns; j++) {
     99          strBuilder.Append(dataset.GetValue(i, j) + ";");
     100        }
     101        strBuilder.Remove(strBuilder.Length - 1, 1);
     102        strBuilder.AppendLine();
     103      }
     104
     105      using (StreamWriter writer = new StreamWriter(path)) {
     106        writer.Write(strBuilder);
     107      }
     108    }
    50109  }
    51110}
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Keijzer/KeijzerFunctionNine.cs

    r8086 r8331  
    2727  public class KeijzerFunctionNine : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Keijzer 9 f(x) = sqrt(x)"; } }
     29    public override string Name { get { return "Keijzer 9 f(x) = arcsinh(x)  i.e. ln(x + sqrt(x² + 1))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Improving Symbolic Regression with Interval Arithmetic and Linear Scaling" + Environment.NewLine
    3333        + "Authors: Maarten Keijzer" + Environment.NewLine
    34         + "Function: f(x) = sqrt(x)" + Environment.NewLine
     34        + "Function: f(x) = arcsinh(x)  i.e. ln(x + sqrt(x² + 1))" + Environment.NewLine
    3535        + "range(train): x = [0:1:100]" + Environment.NewLine
    3636        + "range(test): x = [0:0.1:100]" + Environment.NewLine
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 101; } }
    45     protected override int TestPartitionStart { get { return 101; } }
    46     protected override int TestPartitionEnd { get { return 1102; } }
     44    protected override int TrainingPartitionEnd { get { return 100; } }
     45    protected override int TestPartitionStart { get { return 100; } }
     46    protected override int TestPartitionEnd { get { return 1100; } }
    4747
    4848    protected override List<List<double>> GenerateValues() {
     
    5555      for (int i = 0; i < data[0].Count; i++) {
    5656        x = data[0][i];
    57         results.Add(Math.Sqrt(x));
     57        results.Add(Math.Log(x + Math.Sqrt(x*x + 1)));
    5858      }
    5959      data.Add(results);
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Keijzer/KeijzerInstanceProvider.cs

    r8086 r8331  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Keijzer"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
    4040    public override IEnumerable<IDataDescriptor> GetDataDescriptors() {
    4141      List<IDataDescriptor> descriptorList = new List<IDataDescriptor>();
     42      descriptorList.Add(new KeijzerFunctionOne());
     43      descriptorList.Add(new KeijzerFunctionTwo());
     44      descriptorList.Add(new KeijzerFunctionThree());
    4245      descriptorList.Add(new KeijzerFunctionFour());
    4346      descriptorList.Add(new KeijzerFunctionFive());
     
    4649      descriptorList.Add(new KeijzerFunctionEight());
    4750      descriptorList.Add(new KeijzerFunctionNine());
     51      descriptorList.Add(new KeijzerFunctionTen());
    4852      descriptorList.Add(new KeijzerFunctionEleven());
    4953      descriptorList.Add(new KeijzerFunctionTwelve());
     
    5155      descriptorList.Add(new KeijzerFunctionFourteen());
    5256      descriptorList.Add(new KeijzerFunctionFifteen());
    53       descriptorList.Add(new KeijzerFunctionSixteen());
    5457      return descriptorList;
    5558    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionEight.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 6.87 + (11 * sqrt(7.23 * X0 * X3 * X4))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
    5960
    6061      double x0, x3, x4;
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionEleven.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 6.87 + (11 * cos(7.23 * X0 * X0 * X0))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFive.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 3.0 + (2.13 * log(X4))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
    5960
    6061      double x4;
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFiveteen.cs

    r8086 r8331  
    2727  public class KornsFunctionFiveteen : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Korns 15 y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (log(X2) - tan(X3))))"; } }
     29    public override string Name { get { return "Korns 15 y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (ln(X2) - tan(X3))))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    34         + "Function: y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (log(X2) - tan(X3))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     34        + "Function: y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (ln(X2) - tan(X3))))" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
    5960
    6061      double x0, x1, x2, x3;
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFour.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = -2.3 + (0.13 * sin(X2))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
     51
    5252
    5353    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFourteen.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 22.0 + (4.2 * ((cos(X0) - tan(X1)) * (tanh(X2) / sin(X3))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionNine.cs

    r8086 r8331  
    2727  public class KornsFunctionNine : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Korns 9 y = ((sqrt(X0) / log(X1)) * (exp(X2) / square(X3)))"; } }
     29    public override string Name { get { return "Korns 9 y = ((sqrt(X0) / log(X1)) * (exp(X2) / X3²)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    34         + "Function: y = ((sqrt(X0) / log(X1)) * (exp(X2) / square(X3)))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     34        + "Function: y = (sqrt(X0) / log(X1)) * (exp(X2) / X3²)" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
    5960
    6061      double x0, x1, x2, x3;
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionOne.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 1.57 + (24.3 * X3)" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    3635        + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     36        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     37        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     38        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3939        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4040        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4747    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4848    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5252
    5353    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionSeven.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 213.80940889 - (213.80940889 * exp(-0.54723748542 * X0))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionSix.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 1.3 + (0.13 * sqrt(X0))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
    5960
    6061      double x0;
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTen.cs

    r8086 r8331  
    2727  public class KornsFunctionTen : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Korns 10 y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * square(X2))) / ((4.0 * cube(X3)) + (5.0 * quart(X4)))))"; } }
     29    public override string Name { get { return "Korns 10 y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * X2²)) / ((4.0 * X3³) + (5.0 * X4^4))))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    34         + "Function: y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * square(X2))) / ((4.0 * cube(X3)) + (5.0 * quart(X4)))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     34        + "Function: y =  0.81 + (24.3 * (((2.0 * X1) + (3.0 * X2²)) / ((4.0 * X3³) + (5.0 * X4^4))))" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionThirteen.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 32.0 - (3.0 * ((tan(X0) / tan(X1)) * (tan(X2) / tan(X3))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionThree.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 1.57 + (24.3 * X3)" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTwelve.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 2.0 - (2.1 * (cos(9.8 * X0) * sin(1.3 * X4)))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTwo.cs

    r8086 r8331  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 0.23 + (14.2 * ((X3 + X1) / (3.0 * X4)))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsInstanceProvider.cs

    r8086 r8331  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Korns"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionEight.cs

    r8086 r8331  
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    3434        + "Function: F8 = Sqrt(x)" + Environment.NewLine
    35         + "Fitcases: 20 random points [0, 4]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     35        + "Fitcases: 20 random points in [0, 4]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionEleven.cs

    r8086 r8331  
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    3434        + "Function: F11 = x^y" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionFive.cs

    r8086 r8331  
    2727  public class NguyenFunctionFive : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F5 = sin(x^2)cos(x) - 1"; } }
     29    public override string Name { get { return "Nguyen F5 = sin(x²)cos(x) - 1"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F5 = sin(x^2)cos(x) - 1" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F5 = sin(x²)cos(x) - 1" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionFour.cs

    r8086 r8331  
    2727  public class NguyenFunctionFour : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F4 = x^6 + x^5 + x^4 + x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F4 = x^6 + x^5 + x^4 + x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F4 = x^6 + x^5 + x^4 + x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F4 = x^6 + x^5 + x^4 + x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionNine.cs

    r8086 r8331  
    2727  public class NguyenFunctionNine : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F9 = sin(x) + sin(y^2)"; } }
     29    public override string Name { get { return "Nguyen F9 = sin(x) + sin(y²)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F9 = sin(x) + sin(y^2)" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F9 = sin(x) + sin(y²)" + Environment.NewLine
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionOne.cs

    r8086 r8331  
    2727  public class NguyenFunctionOne : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F1 = x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F1 = x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F1 = x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F1 = x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionSeven.cs

    r8086 r8331  
    2727  public class NguyenFunctionSeven : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F7 = log(x + 1) + log(x^2 + 1)"; } }
     29    public override string Name { get { return "Nguyen F7 = log(x + 1) + log(x² + 1)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F7 = log(x + 1) + log(x^2 + 1)" + Environment.NewLine
    35         + "Fitcases: 20 random points [0, 2]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F7 = ln(x + 1) + ln(x² + 1)" + Environment.NewLine
     35        + "Fitcases: 20 random points in [0, 2]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionSix.cs

    r8086 r8331  
    2727  public class NguyenFunctionSix : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F6 = sin(x) + sin(x + x^2)"; } }
     29    public override string Name { get { return "Nguyen F6 = sin(x) + sin(x + x²)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F6 = sin(x) + sin(x + x^2)" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F6 = sin(x) + sin(x + x²)" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTen.cs

    r8086 r8331  
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    3434        + "Function: F10 = 2sin(x)cos(y)" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionThree.cs

    r8086 r8331  
    2727  public class NguyenFunctionThree : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F3 = x^5 + x^4 + x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F3 = x^5 + x^4 + x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F3 = x^5 + x^4 + x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F3 = x^5 + x^4 + x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTwelve.cs

    r8086 r8331  
    2727  public class NguyenFunctionTwelve : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F12 = x^4 - x^3 + y^2/2 - y"; } }
     29    public override string Name { get { return "Nguyen F12 = x^4 - x³ + y²/2 - y"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F12 = x^4 - x^3 + y^2/2 - y" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F12 = x^4 - x³ + y²/2 - y" + Environment.NewLine
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTwo.cs

    r8086 r8331  
    2727  public class NguyenFunctionTwo : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F2 = x^4 + x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F2 = x^4 + x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F2 = x^4 + x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F2 = x^4 + x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenInstanceProvider.cs

    r8086 r8331  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Nguyen_et_al"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/RegressionInstanceProvider.cs

    r8086 r8331  
    2020#endregion
    2121
    22 using System;
    23 using System.Collections;
    24 using System.Collections.Generic;
    25 using System.IO;
    26 using System.Linq;
    27 using System.Text;
    2822using HeuristicLab.Problems.DataAnalysis;
    2923
    3024namespace HeuristicLab.Problems.Instances.DataAnalysis {
    31   public abstract class RegressionInstanceProvider : IProblemInstanceProvider<IRegressionProblemData> {
    32 
    33     public IRegressionProblemData LoadData(string path) {
    34       TableFileParser csvFileParser = new TableFileParser();
    35       csvFileParser.Parse(path);
    36 
    37       Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    38       string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
    39 
    40       IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
    41 
    42       IRegressionProblemData regData = new RegressionProblemData(dataset, allowedInputVars, targetVar);
    43 
    44       int trainingPartEnd = csvFileParser.Rows * 2 / 3;
    45       regData.TrainingPartition.Start = 0;
    46       regData.TrainingPartition.End = trainingPartEnd;
    47       regData.TestPartition.Start = trainingPartEnd;
    48       regData.TestPartition.End = csvFileParser.Rows;
    49 
    50       int pos = path.LastIndexOf('\\');
    51       if (pos < 0)
    52         regData.Name = path;
    53       else {
    54         pos++;
    55         regData.Name = path.Substring(pos, path.Length - pos);
    56       }
    57       return regData;
    58     }
    59 
    60     public void SaveData(IRegressionProblemData instance, string path) {
    61       StringBuilder strBuilder = new StringBuilder();
    62 
    63       foreach (var variable in instance.InputVariables) {
    64         strBuilder.Append(variable + ";");
    65       }
    66       strBuilder.Remove(strBuilder.Length - 1, 1);
    67       strBuilder.AppendLine();
    68 
    69       Dataset dataset = instance.Dataset;
    70 
    71       for (int i = 0; i < dataset.Rows; i++) {
    72         for (int j = 0; j < dataset.Columns; j++) {
    73           strBuilder.Append(dataset.GetValue(i, j) + ";");
    74         }
    75         strBuilder.Remove(strBuilder.Length - 1, 1);
    76         strBuilder.AppendLine();
    77       }
    78 
    79       using (StreamWriter writer = new StreamWriter(path)) {
    80         writer.Write(strBuilder);
    81       }
    82     }
    83 
    84     public abstract IEnumerable<IDataDescriptor> GetDataDescriptors();
    85     public abstract IRegressionProblemData LoadData(IDataDescriptor descriptor);
    86 
    87     public abstract string Name { get; }
    88     public abstract string Description { get; }
    89     public abstract Uri WebLink { get; }
    90     public abstract string ReferencePublication { get; }
     25  public abstract class RegressionInstanceProvider : ProblemInstanceProvider<IRegressionProblemData> {
    9126  }
    9227}
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/ValueGenerator.cs

    r8086 r8331  
    2929    private static FastRandom rand = new FastRandom();
    3030
     31    /// <summary>
     32    /// Generates a sequence of evenly spaced points between start and end (inclusive!).
     33    /// </summary>
     34    /// <param name="start">The smallest and first value of the sequence.</param>
     35    /// <param name="end">The largest and last value of the sequence.</param>
     36    /// <param name="stepWidth">The step size between subsequent values.</param>
     37    /// <returns>An sequence of values from start to end (inclusive)</returns>
    3138    public static IEnumerable<double> GenerateSteps(double start, double end, double stepWidth) {
    32       int steps = (int)Math.Round(((end - start) / stepWidth) + 1);
    33       for (int i = 0; i < steps; i++)
    34         yield return start + i * stepWidth;
     39      if (start > end) throw new ArgumentException("start must be less than or equal end.");
     40      if (stepWidth <= 0) throw new ArgumentException("stepwith must be larger than zero.", "stepWidth");
     41      double x = start;
     42      while (x <= end) {
     43        yield return x;
     44        x += stepWidth;
     45      }
    3546    }
    3647
    37     public static IEnumerable<double> GenerateUniformDistributedValues(int amount, double start, double end) {
    38       for (int i = 0; i < amount; i++)
    39         yield return rand.NextDouble() * (end - start) + start;
     48    /// <summary>
     49    /// Generates uniformly distributed values between start and end (inclusive!)
     50    /// </summary>
     51    /// <param name="n">Number of values to generate.</param>
     52    /// <param name="start">The lower value (inclusive)</param>
     53    /// <param name="end">The upper value (inclusive)</param>
     54    /// <returns>An enumerable including n values in [start, end]</returns>
     55    public static IEnumerable<double> GenerateUniformDistributedValues(int n, double start, double end) {
     56      for (int i = 0; i < n; i++) {
     57        // we need to return a random value including end.
     58        // so we cannot use rand.NextDouble() as it returns a value strictly smaller than 1.
     59        double r = rand.NextUInt() / (double)uint.MaxValue;    // r \in [0,1]
     60        yield return r * (end - start) + start;
     61      }
    4062    }
    4163
    42     public static IEnumerable<double> GenerateNormalDistributedValues(int amount, double mu, double sigma) {
    43       for (int i = 0; i < amount; i++)
     64    /// <summary>
     65    /// Generates normally distributed values sampling from N(mu, sigma)
     66    /// </summary>
     67    /// <param name="n">Number of values to generate.</param>
     68    /// <param name="mu">The mu parameter of the normal distribution</param>
     69    /// <param name="sigma">The sigma parameter of the normal distribution</param>
     70    /// <returns>An enumerable including n values ~ N(mu, sigma)</returns>
     71    public static IEnumerable<double> GenerateNormalDistributedValues(int n, double mu, double sigma) {
     72      for (int i = 0; i < n; i++)
    4473        yield return NormalDistributedRandom.NextDouble(rand, mu, sigma);
    4574    }
     
    82111      }
    83112    }
    84 
    85     //recursive approach
    86     /*public static IEnumerable<IEnumerable<double>> GenerateAllCombinationsOfValuesInLists(List<List<double>> lists) {
    87       int cur = 0;
    88       List<double> curCombination = new List<double>();
    89       List<List<double>> allCombinations = new List<List<double>>();
    90       for (int i = 0; i < lists.Count; i++) {
    91         allCombinations.Add(new List<double>());
    92       }
    93       if (lists.Count() > cur) {
    94         foreach (var item in lists[cur]) {
    95           curCombination.Clear();
    96           curCombination.Add(item);
    97           GetCombination(lists, cur + 1, curCombination, allCombinations);
    98         }
    99       }
    100       return allCombinations;
    101     }
    102 
    103     private static void GetCombination(List<List<double>> lists, int cur, List<double> curCombinations, List<List<double>> allCombinations) {
    104       if (lists.Count > cur) {
    105         foreach (var item in lists[cur]) {
    106           if (curCombinations.Count > cur) {
    107             curCombinations.RemoveAt(cur);
    108           }
    109           curCombinations.Add(item);
    110           GetCombination(lists, cur + 1, curCombinations, allCombinations);
    111         }
    112       } else {
    113         for (int i = 0; i < curCombinations.Count; i++) {
    114           allCombinations[i].Add(curCombinations[i]);
    115         }
    116       }
    117     }         */
    118 
    119     //original
    120     /*public static IEnumerable<IEnumerable<double>> GenerateAllCombinationsOfValuesInLists(List<List<double>> sets) {
    121 
    122       var combinations = new List<List<double>>();
    123 
    124       foreach (var value in sets[0])
    125         combinations.Add(new List<double> { value });
    126 
    127       foreach (var set in sets.Skip(1))
    128         combinations = AddListToCombinations(combinations, set);
    129 
    130       IEnumerable<IEnumerable<double>> res = (from i in Enumerable.Range(0, sets.Count)
    131                                               select (from list in combinations
    132                                                       select list.ElementAt(i)));
    133 
    134       return res;
    135     }
    136 
    137     private static List<List<double>> AddListToCombinations
    138          (List<List<double>> combinations, List<double> set) {
    139       var newCombinations = from value in set
    140                             from combination in combinations
    141                             select new List<double>(combination) { value };
    142 
    143       return newCombinations.ToList();
    144     }    */
    145113  }
    146114}
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Various/SpatialCoevolution.cs

    r8086 r8331  
    2727  public class SpatialCoevolution : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Spatial co-evolution F(x,y) = 1/(1+power(x,-4)) + 1/(1+pow(y,-4))"; } }
     29    public override string Name { get { return "Spatial co-evolution F(x,y) = 1/(1 + x^(-4)) + 1/(1 + y^(-4))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Evolutionary consequences of coevolving targets" + Environment.NewLine
    3333        + "Authors: Ludo Pagie and Paulien Hogeweg" + Environment.NewLine
    34         + "Function: F(x,y) = 1/(1+power(x,-4)) + 1/(1+pow(y,-4))" + Environment.NewLine
    35         + "Terminal set: x, y" + Environment.NewLine
     34        + "Function: F(x,y) = 1/(1 + x^(-4)) + 1/(1 + y^(-4))" + Environment.NewLine
     35        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     36        + "Terminals: only variables (no random constants)" + Environment.NewLine
    3637        + "The fitness of a solution is defined as the mean of the absolute differences between "
    3738        + "the target function and the solution over all problems on the basis of which it is evaluated. "
    3839        + "A solution is considered completely ’correct’ if, for all 676 problems in the ’complete’ "
    3940        + "problem set used in the static evaluation scheme, the absolute difference between "
    40         + "solution and target function is less than 0:01 (this is a so-called hit).";
     41        + "solution and target function is less than 0.01 (this is a so-called hit).";
    4142      }
    4243    }
     
    4546    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4647    protected override int TrainingPartitionStart { get { return 0; } }
    47     protected override int TrainingPartitionEnd { get { return 1000; } }
    48     protected override int TestPartitionStart { get { return 1000; } }
     48    protected override int TrainingPartitionEnd { get { return 676; } }
     49    protected override int TestPartitionStart { get { return 676; } }
    4950    protected override int TestPartitionEnd { get { return 1676; } }
    5051
     
    5253      List<List<double>> data = new List<List<double>>();
    5354
    54       List<double> oneVariableTestData = ValueGenerator.GenerateSteps(-5, 5, 0.4).ToList();
    55       List<List<double>> testData = new List<List<double>>() { oneVariableTestData, oneVariableTestData };
    56       var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(testData).ToList<IEnumerable<double>>();
     55      List<double> evenlySpacedSequence = ValueGenerator.GenerateSteps(-5, 5, 0.4).ToList();
     56      List<List<double>> trainingData = new List<List<double>>() { evenlySpacedSequence, evenlySpacedSequence };
     57      var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(trainingData).ToList();
    5758
    5859      for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    59         data.Add(ValueGenerator.GenerateUniformDistributedValues(1000, -5, 5).ToList());
    60         data[i].AddRange(combinations[i]);
     60        data.Add(combinations[i].ToList());
     61        data[i].AddRange(ValueGenerator.GenerateUniformDistributedValues(1000, -5, 5).ToList());
    6162      }
    6263
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/KotanchekFunction.cs

    r8086 r8331  
    2727  public class KotanchekFunction : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Kotanchek"; } }
     29    public override string Name { get { return "Vladislavleva-1 F1(X1,X2) = exp(-(X1 - 1))² / (1.2 + (X2 -2.5)²"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F1(X1, X2) = e^-(X1 - 1)^2 / (1.2 + (X2 -2.5)^2" + Environment.NewLine
     34        + "Function: F1(X1, X2) = exp(-(X1 - 1))² / (1.2 + (X2 -2.5)²" + Environment.NewLine
    3535        + "Training Data: 100 points X1, X2 = Rand(0.3, 4)" + Environment.NewLine
    3636        + "Test Data: 2026 points (X1, X2) = (-0.2:0.1:4.2)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, x^eps, x + eps, x * eps";
    3838      }
    3939    }
     
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    4444    protected override int TrainingPartitionEnd { get { return 100; } }
    45     protected override int TestPartitionStart { get { return 1000; } }
    46     protected override int TestPartitionEnd { get { return 3025; } }
     45    protected override int TestPartitionStart { get { return 100; } }
     46    protected override int TestPartitionEnd { get { return 2126; } }
    4747
    4848    protected override List<List<double>> GenerateValues() {
     
    5353      var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(testData).ToList<IEnumerable<double>>();
    5454      for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    55         data.Add(ValueGenerator.GenerateUniformDistributedValues(1000, 0.3, 4).ToList());
     55        data.Add(ValueGenerator.GenerateUniformDistributedValues(100, 0.3, 4).ToList());
    5656        data[i].AddRange(combinations[i]);
    5757      }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RationalPolynomialThreeDimensional.cs

    r8086 r8331  
    2727  public class RationalPolynomialThreeDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva RatPol3D"; } }
     29    public override string Name { get { return "Vladislavleva-5 F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2² * (X1 - 10))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2^2 * (X1 - 10))" + Environment.NewLine
     34        + "Function: F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2² * (X1 - 10))" + Environment.NewLine
    3535        + "Training Data: 300 points X1, X3 = Rand(0.05, 2), X2 = Rand(1, 2)" + Environment.NewLine
    3636        + "Test Data: 2701 points X1, X3 = (-0.05:0.15:2.1), X2 = (0.95:0.1:2.05)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real";
     37        + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps";
    3838      }
    3939    }
     
    4949      List<List<double>> data = new List<List<double>>();
    5050
    51       int amountOfPoints = 1000;
    52       data.Add(ValueGenerator.GenerateUniformDistributedValues(amountOfPoints, 0.05, 2).ToList());
    53       data.Add(ValueGenerator.GenerateUniformDistributedValues(amountOfPoints, 1, 2).ToList());
    54       data.Add(ValueGenerator.GenerateUniformDistributedValues(amountOfPoints, 0.05, 2).ToList());
     51      int n = 1000;
     52      data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 0.05, 2).ToList());
     53      data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 1, 2).ToList());
     54      data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 0.05, 2).ToList());
    5555
    5656      List<List<double>> testData = new List<List<double>>() {
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RationalPolynomialTwoDimensional.cs

    r8086 r8331  
    2727  public class RationalPolynomialTwoDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva RatPol2D"; } }
     29    public override string Name { get { return "Vladislavleva-8 F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)³ - (X2 -3)) / ((X2 - 2)^4 + 10)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)^3 - (X2 -3)) / ((X2 - 2)^4 + 10)" + Environment.NewLine
     34        + "Function: F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)³ - (X2 -3)) / ((X2 - 2)^4 + 10)" + Environment.NewLine
    3535        + "Training Data: 50 points X1, X2 = Rand(0.05, 6.05)" + Environment.NewLine
    3636        + "Test Data: 1157 points X1, X2 = (-0.25:0.2:6.35)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real";
     37        + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RippleFunction.cs

    r8086 r8331  
    2727  public class RippleFunction : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Ripple"; } }
     29    public override string Name { get { return "Vladislavleva-7  F7(X1, X2) = (X1 - 3)(X2 - 3) + 2 * sin((X1 - 4)(X2 - 4))"; } }
    3030    public override string Description {
    3131      get {
     
    3535        + "Training Data: 300 points X1, X2 = Rand(0.05, 6.05)" + Environment.NewLine
    3636        + "Test Data: 1000 points X1, X2 = Rand(-0.25, 6.35)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SalutowiczFunctionOneDimensional.cs

    r8086 r8331  
    2727  public class SalutowiczFunctionOneDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Salutowicz"; } }
     29    public override string Name { get { return "Vladislavleva-2 F2(X) = exp(-X) * X³ * cos(X) * sin(X) * (cos(X)sin(X)² - 1)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F2(X) = e^-X * X^3 * cos(X) * sin(X) * (cos(X)sin(X)^2 - 1)" + Environment.NewLine
     34        + "Function: F2(X) = exp(-X) * X³ * cos(X) * sin(X) * (cos(X)sin(X)² - 1)" + Environment.NewLine
    3535        + "Training Data: 100 points X = (0.05:0.1:10)" + Environment.NewLine
    3636        + "Test Data: 221 points X = (-0.5:0.05:10.5)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SalutowiczFunctionTwoDimensional.cs

    r8086 r8331  
    2727  public class SalutowiczFunctionTwoDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Salutowicz2D"; } }
     29    public override string Name { get { return "Vladislavleva-3 F3(X1, X2) = exp(-X1) * X1³ * cos(X1) * sin(X1) * (cos(X1)sin(X1)² - 1)(X2 - 5)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F3(X1, X2) = e^-X1 * X1^3 * cos(X1) * sin(X1) * (cos(X1)sin(X1)^2 - 1)(X2 - 5)" + Environment.NewLine
     34        + "Function: F3(X1, X2) = exp(-X1) * X1³ * cos(X1) * sin(X1) * (cos(X1)sin(X1)² - 1)(X2 - 5)" + Environment.NewLine
    3535        + "Training Data: 601 points X1 = (0.05:0.1:10), X2 = (0.05:2:10.05)" + Environment.NewLine
    36         + "Test Data: 2554 points X1 = (-0.5:0.05:10.5), X2 = (-0.5:0.5:10.5)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)" + Environment.NewLine + Environment.NewLine
    38         + "Important: The stepwidth of the variable X1 in the test partition has been set to 0.1, to fit the amount of data points.";
     36        + "Test Data: 4840 points X1 = (-0.5:0.05:10.5), X2 = (-0.5:0.5:10.5)" + Environment.NewLine
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps";
    3938      }
    4039    }
     
    4544    protected override int TrainingPartitionEnd { get { return 601; } }
    4645    protected override int TestPartitionStart { get { return 601; } }
    47     protected override int TestPartitionEnd { get { return 3155; } }
     46    protected override int TestPartitionEnd { get { return 5441; } }
    4847
    4948    protected override List<List<double>> GenerateValues() {
     
    5554
    5655      List<List<double>> testData = new List<List<double>>() {
    57         ValueGenerator.GenerateSteps(-0.5, 10.5, 0.1).ToList(),
     56        ValueGenerator.GenerateSteps(-0.5, 10.5, 0.05).ToList(),
    5857        ValueGenerator.GenerateSteps(-0.5, 10.5, 0.5).ToList()
    5958      };
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SineCosineFunction.cs

    r8086 r8331  
    2727  public class SineCosineFunction : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva SineCosine"; } }
     29    public override string Name { get { return "Vladislavleva-6 F6(X1, X2) = 6 * sin(X1) * cos(X2)"; } }
    3030    public override string Description {
    3131      get {
     
    3535        + "Training Data: 30 points X1, X2 = Rand(0.1, 5.9)" + Environment.NewLine
    3636        + "Test Data: 961 points X1, X2 = (-0.05:0.02:6.05)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, x^eps, x + eps, x * eps";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/UnwrappedBallFunctionFiveDimensional.cs

    r8086 r8331  
    2727  public class UnwrappedBallFunctionFiveDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva UBall5D"; } }
     29    public override string Name { get { return "Vladislavleva-4 F4(X1, X2, X3, X4, X5) = 10 / (5 + Sum(Xi - 3)^2)"; } }
    3030    public override string Description {
    3131      get {
     
    3535        + "Training Data: 1024 points Xi = Rand(0.05, 6.05)" + Environment.NewLine
    3636        + "Test Data: 5000 points Xi = Rand(-0.25, 6.35)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real";
     37        + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps";
    3838      }
    3939    }
  • branches/ScatterSearch (trunk integration)/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/VladislavlevaInstanceProvider.cs

    r8086 r8331  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Vladislavleva_et_al"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
     
    4141      List<IDataDescriptor> descriptorList = new List<IDataDescriptor>();
    4242      descriptorList.Add(new KotanchekFunction());
    43       descriptorList.Add(new RationalPolynomialTwoDimensional());
    44       descriptorList.Add(new RationalPolynomialThreeDimensional());
    45       descriptorList.Add(new RippleFunction());
    4643      descriptorList.Add(new SalutowiczFunctionOneDimensional());
    4744      descriptorList.Add(new SalutowiczFunctionTwoDimensional());
    4845      descriptorList.Add(new UnwrappedBallFunctionFiveDimensional());
     46      descriptorList.Add(new RationalPolynomialThreeDimensional());
     47      descriptorList.Add(new SineCosineFunction());
     48      descriptorList.Add(new RippleFunction());
     49      descriptorList.Add(new RationalPolynomialTwoDimensional());
    4950      return descriptorList;
    5051    }
Note: See TracChangeset for help on using the changeset viewer.