Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/08/12 14:04:17 (12 years ago)
Author:
mkommend
Message:

#1081: Intermediate commit of trunk updates - interpreter changes must be redone.

Location:
branches/HeuristicLab.TimeSeries
Files:
1 deleted
56 edited
15 copied

Legend:

Unmodified
Added
Removed
  • branches/HeuristicLab.TimeSeries

    • Property svn:ignore
      •  

        old new  
        2020bin
        2121protoc.exe
         22_ReSharper.HeuristicLab.TimeSeries-3.3
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis

  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/CSV/ClassifiactionCSVInstanceProvider.cs

    r7860 r8430  
    2020#endregion
    2121
    22 
    2322using System;
    2423using System.Collections.Generic;
     24using System.IO;
     25using System.Linq;
     26using System.Text;
    2527using HeuristicLab.Problems.DataAnalysis;
     28
    2629namespace HeuristicLab.Problems.Instances.DataAnalysis {
    2730  public class ClassificationCSVInstanceProvider : ClassificationInstanceProvider {
    2831    public override string Name {
    29       get { return "CSV Problem Provider"; }
     32      get { return "CSV File"; }
    3033    }
    3134    public override string Description {
     
    4851      throw new NotImplementedException();
    4952    }
     53
     54    public override bool CanImportData {
     55      get { return true; }
     56    }
     57    public override IClassificationProblemData ImportData(string path) {
     58      TableFileParser csvFileParser = new TableFileParser();
     59
     60      csvFileParser.Parse(path);
     61
     62      Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
     63      string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
     64      IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
     65
     66      ClassificationProblemData claData = new ClassificationProblemData(dataset, allowedInputVars, targetVar);
     67
     68      int trainingPartEnd = csvFileParser.Rows * 2 / 3;
     69      claData.TrainingPartition.Start = 0;
     70      claData.TrainingPartition.End = trainingPartEnd;
     71      claData.TestPartition.Start = trainingPartEnd;
     72      claData.TestPartition.End = csvFileParser.Rows;
     73      int pos = path.LastIndexOf('\\');
     74      if (pos < 0)
     75        claData.Name = path;
     76      else {
     77        pos++;
     78        claData.Name = path.Substring(pos, path.Length - pos);
     79      }
     80
     81      return claData;
     82    }
     83
     84    public override bool CanExportData {
     85      get { return true; }
     86    }
     87    public override void ExportData(IClassificationProblemData instance, string path) {
     88      StringBuilder strBuilder = new StringBuilder();
     89
     90      foreach (var variable in instance.InputVariables) {
     91        strBuilder.Append(variable + ";");
     92      }
     93      strBuilder.Remove(strBuilder.Length - 1, 1);
     94      strBuilder.AppendLine();
     95
     96      Dataset dataset = instance.Dataset;
     97
     98      for (int i = 0; i < dataset.Rows; i++) {
     99        for (int j = 0; j < dataset.Columns; j++) {
     100          strBuilder.Append(dataset.GetValue(i, j) + ";");
     101        }
     102        strBuilder.Remove(strBuilder.Length - 1, 1);
     103        strBuilder.AppendLine();
     104      }
     105
     106      using (StreamWriter writer = new StreamWriter(path)) {
     107        writer.Write(strBuilder);
     108      }
     109    }
    50110  }
    51111}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/ClassificationInstanceProvider.cs

    r7851 r8430  
    2020#endregion
    2121
    22 using System;
    23 using System.Collections;
    24 using System.Collections.Generic;
    25 using System.IO;
    26 using System.Linq;
    27 using System.Text;
    2822using HeuristicLab.Problems.DataAnalysis;
    2923
    3024namespace HeuristicLab.Problems.Instances.DataAnalysis {
    31   public abstract class ClassificationInstanceProvider : IProblemInstanceProvider<IClassificationProblemData> {
    32     public IClassificationProblemData LoadData(string path) {
    33       TableFileParser csvFileParser = new TableFileParser();
    34 
    35       csvFileParser.Parse(path);
    36 
    37       Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    38       string targetVar = csvFileParser.VariableNames.Last();
    39       IEnumerable<string> allowedInputVars = csvFileParser.VariableNames.Where(x => !x.Equals(targetVar));
    40 
    41       ClassificationProblemData claData = new ClassificationProblemData(dataset, allowedInputVars, targetVar);
    42 
    43       int trainingPartEnd = csvFileParser.Rows * 2 / 3;
    44       claData.TrainingPartition.Start = 0;
    45       claData.TrainingPartition.End = trainingPartEnd;
    46       claData.TestPartition.Start = trainingPartEnd;
    47       claData.TestPartition.End = csvFileParser.Rows;
    48       int pos = path.LastIndexOf('\\');
    49       if (pos < 0)
    50         claData.Name = path;
    51       else {
    52         pos++;
    53         claData.Name = path.Substring(pos, path.Length - pos);
    54       }
    55 
    56       return claData;
    57     }
    58 
    59     public void SaveData(IClassificationProblemData instance, string path) {
    60       StringBuilder strBuilder = new StringBuilder();
    61 
    62       foreach (var variable in instance.InputVariables) {
    63         strBuilder.Append(variable + ";");
    64       }
    65       strBuilder.Remove(strBuilder.Length - 1, 1);
    66       strBuilder.AppendLine();
    67 
    68       Dataset dataset = instance.Dataset;
    69 
    70       for (int i = 0; i < dataset.Rows; i++) {
    71         for (int j = 0; j < dataset.Columns; j++) {
    72           strBuilder.Append(dataset.GetValue(i, j) + ";");
    73         }
    74         strBuilder.Remove(strBuilder.Length - 1, 1);
    75         strBuilder.AppendLine();
    76       }
    77 
    78       using (StreamWriter writer = new StreamWriter(path)) {
    79         writer.Write(strBuilder);
    80       }
    81     }
    82 
    83     public abstract IEnumerable<IDataDescriptor> GetDataDescriptors();
    84     public abstract IClassificationProblemData LoadData(IDataDescriptor descriptor);
    85 
    86     public abstract string Name { get; }
    87     public abstract string Description { get; }
    88     public abstract Uri WebLink { get; }
    89     public abstract string ReferencePublication { get; }
     25  public abstract class ClassificationInstanceProvider : ProblemInstanceProvider<IClassificationProblemData> {
    9026  }
    9127}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/RealWorld/ClassificationRealWorldInstanceProvider.cs

    r7849 r8430  
    4747    public override IEnumerable<IDataDescriptor> GetDataDescriptors() {
    4848      List<IDataDescriptor> descriptorList = new List<IDataDescriptor>();
    49       descriptorList.Add(new Iris());
    5049      descriptorList.Add(new Mammography());
    5150      var solutionsArchiveName = GetResourceName(FileName + @"\.zip");
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Classification/ResourceClassificationInstanceProvider.cs

    r7851 r8430  
    7070
    7171        Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    72         string targetVar = csvFileParser.VariableNames.Last();
    73         IEnumerable<string> allowedInputVars = csvFileParser.VariableNames.Where(x => !x.Equals(targetVar));
     72        string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
     73        IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
    7474
    7575        ClassificationProblemData claData = new ClassificationProblemData(dataset, allowedInputVars, targetVar);
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/HeuristicLab.Problems.Instances.DataAnalysis-3.3.csproj

    r7890 r8430  
    3737    <AssemblyOriginatorKeyFile>HeuristicLab.snk</AssemblyOriginatorKeyFile>
    3838  </PropertyGroup>
     39  <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
     40    <DebugSymbols>true</DebugSymbols>
     41    <OutputPath>..\..\bin\</OutputPath>
     42    <DefineConstants>DEBUG;TRACE</DefineConstants>
     43    <DebugType>full</DebugType>
     44    <PlatformTarget>x86</PlatformTarget>
     45    <CodeAnalysisLogFile>..\..\bin\HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll.CodeAnalysisLog.xml</CodeAnalysisLogFile>
     46    <CodeAnalysisUseTypeNameInSuppression>true</CodeAnalysisUseTypeNameInSuppression>
     47    <CodeAnalysisModuleSuppressionsFile>GlobalSuppressions.cs</CodeAnalysisModuleSuppressionsFile>
     48    <ErrorReport>prompt</ErrorReport>
     49    <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
     50    <CodeAnalysisRuleSetDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\\Rule Sets</CodeAnalysisRuleSetDirectories>
     51    <CodeAnalysisRuleDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\FxCop\\Rules</CodeAnalysisRuleDirectories>
     52  </PropertyGroup>
     53  <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
     54    <OutputPath>..\..\bin\</OutputPath>
     55    <DefineConstants>TRACE</DefineConstants>
     56    <Optimize>true</Optimize>
     57    <DebugType>pdbonly</DebugType>
     58    <PlatformTarget>x86</PlatformTarget>
     59    <CodeAnalysisLogFile>..\..\bin\HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll.CodeAnalysisLog.xml</CodeAnalysisLogFile>
     60    <CodeAnalysisUseTypeNameInSuppression>true</CodeAnalysisUseTypeNameInSuppression>
     61    <CodeAnalysisModuleSuppressionsFile>GlobalSuppressions.cs</CodeAnalysisModuleSuppressionsFile>
     62    <ErrorReport>prompt</ErrorReport>
     63    <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
     64    <CodeAnalysisRuleSetDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\\Rule Sets</CodeAnalysisRuleSetDirectories>
     65    <CodeAnalysisIgnoreBuiltInRuleSets>true</CodeAnalysisIgnoreBuiltInRuleSets>
     66    <CodeAnalysisRuleDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\FxCop\\Rules</CodeAnalysisRuleDirectories>
     67    <CodeAnalysisIgnoreBuiltInRules>false</CodeAnalysisIgnoreBuiltInRules>
     68  </PropertyGroup>
     69  <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
     70    <DebugSymbols>true</DebugSymbols>
     71    <OutputPath>..\..\bin\</OutputPath>
     72    <DefineConstants>DEBUG;TRACE</DefineConstants>
     73    <DebugType>full</DebugType>
     74    <PlatformTarget>x64</PlatformTarget>
     75    <CodeAnalysisLogFile>..\..\bin\HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll.CodeAnalysisLog.xml</CodeAnalysisLogFile>
     76    <CodeAnalysisUseTypeNameInSuppression>true</CodeAnalysisUseTypeNameInSuppression>
     77    <CodeAnalysisModuleSuppressionsFile>GlobalSuppressions.cs</CodeAnalysisModuleSuppressionsFile>
     78    <ErrorReport>prompt</ErrorReport>
     79    <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
     80    <CodeAnalysisRuleSetDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\\Rule Sets</CodeAnalysisRuleSetDirectories>
     81    <CodeAnalysisRuleDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\FxCop\\Rules</CodeAnalysisRuleDirectories>
     82  </PropertyGroup>
     83  <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
     84    <OutputPath>..\..\bin\</OutputPath>
     85    <DefineConstants>TRACE</DefineConstants>
     86    <Optimize>true</Optimize>
     87    <DebugType>pdbonly</DebugType>
     88    <PlatformTarget>x64</PlatformTarget>
     89    <CodeAnalysisLogFile>..\..\bin\HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll.CodeAnalysisLog.xml</CodeAnalysisLogFile>
     90    <CodeAnalysisUseTypeNameInSuppression>true</CodeAnalysisUseTypeNameInSuppression>
     91    <CodeAnalysisModuleSuppressionsFile>GlobalSuppressions.cs</CodeAnalysisModuleSuppressionsFile>
     92    <ErrorReport>prompt</ErrorReport>
     93    <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
     94    <CodeAnalysisRuleSetDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\\Rule Sets</CodeAnalysisRuleSetDirectories>
     95    <CodeAnalysisIgnoreBuiltInRuleSets>true</CodeAnalysisIgnoreBuiltInRuleSets>
     96    <CodeAnalysisRuleDirectories>;C:\Program Files (x86)\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\FxCop\\Rules</CodeAnalysisRuleDirectories>
     97    <CodeAnalysisIgnoreBuiltInRules>false</CodeAnalysisIgnoreBuiltInRules>
     98  </PropertyGroup>
    3999  <ItemGroup>
    40100    <Reference Include="HeuristicLab.Collections-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     
    82142    <Compile Include="Classification\ResourceClassificationDataDescriptor.cs" />
    83143    <Compile Include="Classification\ResourceClassificationInstanceProvider.cs" />
     144    <Compile Include="Clustering\ClusteringInstanceProvider.cs" />
     145    <Compile Include="Clustering\CSV\ClusteringCSVInstanceProvider.cs" />
    84146    <Compile Include="Plugin.cs" />
    85147    <Compile Include="Properties\AssemblyInfo.cs" />
     
    89151    <Compile Include="Regression\Keijzer\KeijzerFunctionFourteen.cs" />
    90152    <Compile Include="Regression\Keijzer\KeijzerFunctionEleven.cs" />
    91     <Compile Include="Regression\Keijzer\KeijzerFunctionNine.cs" />
    92153    <Compile Include="Regression\Keijzer\KeijzerFunctionFive.cs" />
    93154    <Compile Include="Regression\Keijzer\KeijzerFunctionEight.cs" />
    94155    <Compile Include="Regression\Keijzer\KeijzerFunctionFifteen.cs" />
    95156    <Compile Include="Regression\Keijzer\KeijzerFunctionFour.cs" />
     157    <Compile Include="Regression\Keijzer\KeijzerFunctionNine.cs" />
     158    <Compile Include="Regression\Keijzer\KeijzerFunctionOne.cs" />
    96159    <Compile Include="Regression\Keijzer\KeijzerFunctionSeven.cs" />
    97160    <Compile Include="Regression\Keijzer\KeijzerFunctionSix.cs" />
    98     <Compile Include="Regression\Keijzer\KeijzerFunctionSixteen.cs" />
     161    <Compile Include="Regression\Keijzer\KeijzerFunctionTen.cs" />
    99162    <Compile Include="Regression\Keijzer\KeijzerFunctionThirteen.cs" />
     163    <Compile Include="Regression\Keijzer\KeijzerFunctionThree.cs" />
    100164    <Compile Include="Regression\Keijzer\KeijzerFunctionTwelve.cs" />
     165    <Compile Include="Regression\Keijzer\KeijzerFunctionTwo.cs" />
    101166    <Compile Include="Regression\Keijzer\KeijzerInstanceProvider.cs" />
    102167    <Compile Include="Regression\Korns\KornsFunctionEight.cs" />
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Plugin.cs.frame

    r7849 r8430  
    2323
    2424namespace HeuristicLab.Problems.Instances.DataAnalysis {
    25   [Plugin("HeuristicLab.Problems.Instances.DataAnalysis", "3.3.6.$WCREV$")]
     25  [Plugin("HeuristicLab.Problems.Instances.DataAnalysis", "3.3.7.$WCREV$")]
    2626  [PluginFile("HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll", PluginFileType.Assembly)]
    27   [PluginDependency("HeuristicLab.Common", "3.3")]
    2827  [PluginDependency("HeuristicLab.Core", "3.3")]
    2928  [PluginDependency("HeuristicLab.Data", "3.3")]
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Properties/AssemblyInfo.cs.frame

    r7849 r8430  
    5555// [assembly: AssemblyVersion("1.0.*")]
    5656[assembly: AssemblyVersion("3.3.0.0")]
    57 [assembly: AssemblyFileVersion("3.3.6.$WCREV$")]
     57[assembly: AssemblyFileVersion("3.3.7.$WCREV$")]
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/CSV/RegressionCSVInstanceProvider.cs

    r7860 r8430  
    2020#endregion
    2121
    22 
    2322using System;
    2423using System.Collections.Generic;
     24using System.IO;
     25using System.Linq;
     26using System.Text;
    2527using HeuristicLab.Problems.DataAnalysis;
     28
    2629namespace HeuristicLab.Problems.Instances.DataAnalysis {
    2730  public class RegressionCSVInstanceProvider : RegressionInstanceProvider {
    2831    public override string Name {
    29       get { return "CSV Problem Provider"; }
     32      get { return "CSV File"; }
    3033    }
    3134    public override string Description {
     
    4447      return new List<IDataDescriptor>();
    4548    }
    46 
    4749    public override IRegressionProblemData LoadData(IDataDescriptor descriptor) {
    4850      throw new NotImplementedException();
    4951    }
     52
     53    public override bool CanImportData {
     54      get { return true; }
     55    }
     56    public override IRegressionProblemData ImportData(string path) {
     57      TableFileParser csvFileParser = new TableFileParser();
     58      csvFileParser.Parse(path);
     59
     60      Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
     61      string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
     62
     63      IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
     64
     65      IRegressionProblemData regData = new RegressionProblemData(dataset, allowedInputVars, targetVar);
     66
     67      int trainingPartEnd = csvFileParser.Rows * 2 / 3;
     68      regData.TrainingPartition.Start = 0;
     69      regData.TrainingPartition.End = trainingPartEnd;
     70      regData.TestPartition.Start = trainingPartEnd;
     71      regData.TestPartition.End = csvFileParser.Rows;
     72
     73      int pos = path.LastIndexOf('\\');
     74      if (pos < 0)
     75        regData.Name = path;
     76      else {
     77        pos++;
     78        regData.Name = path.Substring(pos, path.Length - pos);
     79      }
     80      return regData;
     81    }
     82
     83    public override bool CanExportData {
     84      get { return true; }
     85    }
     86    public override void ExportData(IRegressionProblemData instance, string path) {
     87      StringBuilder strBuilder = new StringBuilder();
     88
     89      foreach (var variable in instance.InputVariables) {
     90        strBuilder.Append(variable + ";");
     91      }
     92      strBuilder.Remove(strBuilder.Length - 1, 1);
     93      strBuilder.AppendLine();
     94
     95      Dataset dataset = instance.Dataset;
     96
     97      for (int i = 0; i < dataset.Rows; i++) {
     98        for (int j = 0; j < dataset.Columns; j++) {
     99          strBuilder.Append(dataset.GetValue(i, j) + ";");
     100        }
     101        strBuilder.Remove(strBuilder.Length - 1, 1);
     102        strBuilder.AppendLine();
     103      }
     104
     105      using (StreamWriter writer = new StreamWriter(path)) {
     106        writer.Write(strBuilder);
     107      }
     108    }
    50109  }
    51110}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Keijzer/KeijzerFunctionNine.cs

    r7860 r8430  
    2727  public class KeijzerFunctionNine : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Keijzer 9 f(x) = sqrt(x)"; } }
     29    public override string Name { get { return "Keijzer 9 f(x) = arcsinh(x)  i.e. ln(x + sqrt(x² + 1))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Improving Symbolic Regression with Interval Arithmetic and Linear Scaling" + Environment.NewLine
    3333        + "Authors: Maarten Keijzer" + Environment.NewLine
    34         + "Function: f(x) = sqrt(x)" + Environment.NewLine
     34        + "Function: f(x) = arcsinh(x)  i.e. ln(x + sqrt(x² + 1))" + Environment.NewLine
    3535        + "range(train): x = [0:1:100]" + Environment.NewLine
    3636        + "range(test): x = [0:0.1:100]" + Environment.NewLine
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 101; } }
    45     protected override int TestPartitionStart { get { return 101; } }
    46     protected override int TestPartitionEnd { get { return 1102; } }
     44    protected override int TrainingPartitionEnd { get { return 100; } }
     45    protected override int TestPartitionStart { get { return 100; } }
     46    protected override int TestPartitionEnd { get { return 1100; } }
    4747
    4848    protected override List<List<double>> GenerateValues() {
     
    5555      for (int i = 0; i < data[0].Count; i++) {
    5656        x = data[0][i];
    57         results.Add(Math.Sqrt(x));
     57        results.Add(Math.Log(x + Math.Sqrt(x*x + 1)));
    5858      }
    5959      data.Add(results);
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Keijzer/KeijzerInstanceProvider.cs

    r7860 r8430  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Keijzer"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
    4040    public override IEnumerable<IDataDescriptor> GetDataDescriptors() {
    4141      List<IDataDescriptor> descriptorList = new List<IDataDescriptor>();
     42      descriptorList.Add(new KeijzerFunctionOne());
     43      descriptorList.Add(new KeijzerFunctionTwo());
     44      descriptorList.Add(new KeijzerFunctionThree());
    4245      descriptorList.Add(new KeijzerFunctionFour());
    4346      descriptorList.Add(new KeijzerFunctionFive());
     
    4649      descriptorList.Add(new KeijzerFunctionEight());
    4750      descriptorList.Add(new KeijzerFunctionNine());
     51      descriptorList.Add(new KeijzerFunctionTen());
    4852      descriptorList.Add(new KeijzerFunctionEleven());
    4953      descriptorList.Add(new KeijzerFunctionTwelve());
     
    5155      descriptorList.Add(new KeijzerFunctionFourteen());
    5256      descriptorList.Add(new KeijzerFunctionFifteen());
    53       descriptorList.Add(new KeijzerFunctionSixteen());
    5457      return descriptorList;
    5558    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionEight.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 6.87 + (11 * sqrt(7.23 * X0 * X3 * X4))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
    5960
    6061      double x0, x3, x4;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionEleven.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 6.87 + (11 * cos(7.23 * X0 * X0 * X0))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFive.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 3.0 + (2.13 * log(X4))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
    5960
    6061      double x4;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFiveteen.cs

    r7849 r8430  
    2727  public class KornsFunctionFiveteen : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Korns 15 y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (log(X2) - tan(X3))))"; } }
     29    public override string Name { get { return "Korns 15 y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (ln(X2) - tan(X3))))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    34         + "Function: y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (log(X2) - tan(X3))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     34        + "Function: y = 12.0 - (6.0 * ((tan(X0) / exp(X1)) * (ln(X2) - tan(X3))))" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
    5960
    6061      double x0, x1, x2, x3;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFour.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = -2.3 + (0.13 * sin(X2))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
     51
    5252
    5353    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionFourteen.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 22.0 + (4.2 * ((cos(X0) - tan(X1)) * (tanh(X2) / sin(X3))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionNine.cs

    r7849 r8430  
    2727  public class KornsFunctionNine : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Korns 9 y = ((sqrt(X0) / log(X1)) * (exp(X2) / square(X3)))"; } }
     29    public override string Name { get { return "Korns 9 y = ((sqrt(X0) / log(X1)) * (exp(X2) / X3²)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    34         + "Function: y = ((sqrt(X0) / log(X1)) * (exp(X2) / square(X3)))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     34        + "Function: y = (sqrt(X0) / log(X1)) * (exp(X2) / X3²)" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
    5960
    6061      double x0, x1, x2, x3;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionOne.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 1.57 + (24.3 * X3)" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    3635        + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     36        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     37        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     38        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3939        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4040        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4747    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4848    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5252
    5353    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionSeven.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 213.80940889 - (213.80940889 * exp(-0.54723748542 * X0))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionSix.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 1.3 + (0.13 * sqrt(X0))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4847    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4948    protected override int TrainingPartitionStart { get { return 0; } }
    50     protected override int TrainingPartitionEnd { get { return 5000; } }
    51     protected override int TestPartitionStart { get { return 5000; } }
    52     protected override int TestPartitionEnd { get { return 10000; } }
     49    protected override int TrainingPartitionEnd { get { return 10000; } }
     50    protected override int TestPartitionStart { get { return 10000; } }
     51    protected override int TestPartitionEnd { get { return 20000; } }
    5352
    5453    protected override List<List<double>> GenerateValues() {
    5554      List<List<double>> data = new List<List<double>>();
    56       for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    57         data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList());
    58       }
     55      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, 0, 50).ToList()); // note: range is only [0,50] to prevent NaN values (deviates from gp benchmark paper)
     56      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     57      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     58      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
     59      data.Add(ValueGenerator.GenerateUniformDistributedValues(TestPartitionEnd, -50, 50).ToList());
    5960
    6061      double x0;
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTen.cs

    r7849 r8430  
    2727  public class KornsFunctionTen : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Korns 10 y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * square(X2))) / ((4.0 * cube(X3)) + (5.0 * quart(X4)))))"; } }
     29    public override string Name { get { return "Korns 10 y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * X2²)) / ((4.0 * X3³) + (5.0 * X4^4))))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Accuracy in Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    34         + "Function: y = 0.81 + (24.3 * (((2.0 * X1) + (3.0 * square(X2))) / ((4.0 * cube(X3)) + (5.0 * quart(X4)))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     34        + "Function: y =  0.81 + (24.3 * (((2.0 * X1) + (3.0 * X2²)) / ((4.0 * X3³) + (5.0 * X4^4))))" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionThirteen.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 32.0 - (3.0 * ((tan(X0) / tan(X1)) * (tan(X2) / tan(X3))))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionThree.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 1.57 + (24.3 * X3)" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTwelve.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 2.0 - (2.1 * (cos(9.8 * X0) * sin(1.3 * X4)))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsFunctionTwo.cs

    r7849 r8430  
    3333        + "Authors: Michael F. Korns" + Environment.NewLine
    3434        + "Function: y = 0.23 + (14.2 * ((X3 + X1) / (3.0 * X4)))" + Environment.NewLine
    35         + "Real Numbers: 3.45, -.982, 100.389, and all other real constants" + Environment.NewLine
    36         + "Row Features: x1, x2, x9, and all other features" + Environment.NewLine
    37         + "Binary Operators: +, -, *, /" + Environment.NewLine
    38         + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, log, exp" + Environment.NewLine
     35        + "Binary Operators: +, -, *, % (protected division)" + Environment.NewLine
     36        + "Unary Operators: sqrt, square, cube, cos, sin, tan, tanh, ln(|x|) (protected log), exp" + Environment.NewLine
     37        + "Constants: random finit 64-bit IEEE double" + Environment.NewLine
    3938        + "\"Our testing regimen uses only statistical best practices out-of-sample testing techniques. "
    4039        + "We test each of the test cases on matrices of 10000 rows by 1 to 5 columns with no noise. "
     
    4746    protected override string[] AllowedInputVariables { get { return new string[] { "X0", "X1", "X2", "X3", "X4" }; } }
    4847    protected override int TrainingPartitionStart { get { return 0; } }
    49     protected override int TrainingPartitionEnd { get { return 5000; } }
    50     protected override int TestPartitionStart { get { return 5000; } }
    51     protected override int TestPartitionEnd { get { return 10000; } }
     48    protected override int TrainingPartitionEnd { get { return 10000; } }
     49    protected override int TestPartitionStart { get { return 10000; } }
     50    protected override int TestPartitionEnd { get { return 20000; } }
    5251
    5352    protected override List<List<double>> GenerateValues() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Korns/KornsInstanceProvider.cs

    r7849 r8430  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Korns"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionEight.cs

    r7849 r8430  
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    3434        + "Function: F8 = Sqrt(x)" + Environment.NewLine
    35         + "Fitcases: 20 random points [0, 4]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     35        + "Fitcases: 20 random points in [0, 4]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionEleven.cs

    r7849 r8430  
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    3434        + "Function: F11 = x^y" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionFive.cs

    r7849 r8430  
    2727  public class NguyenFunctionFive : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F5 = sin(x^2)cos(x) - 1"; } }
     29    public override string Name { get { return "Nguyen F5 = sin(x²)cos(x) - 1"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F5 = sin(x^2)cos(x) - 1" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F5 = sin(x²)cos(x) - 1" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionFour.cs

    r7849 r8430  
    2727  public class NguyenFunctionFour : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F4 = x^6 + x^5 + x^4 + x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F4 = x^6 + x^5 + x^4 + x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F4 = x^6 + x^5 + x^4 + x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F4 = x^6 + x^5 + x^4 + x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionNine.cs

    r7849 r8430  
    2727  public class NguyenFunctionNine : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F9 = sin(x) + sin(y^2)"; } }
     29    public override string Name { get { return "Nguyen F9 = sin(x) + sin(y²)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F9 = sin(x) + sin(y^2)" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F9 = sin(x) + sin(y²)" + Environment.NewLine
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionOne.cs

    r7849 r8430  
    2727  public class NguyenFunctionOne : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F1 = x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F1 = x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F1 = x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F1 = x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionSeven.cs

    r7849 r8430  
    2727  public class NguyenFunctionSeven : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F7 = log(x + 1) + log(x^2 + 1)"; } }
     29    public override string Name { get { return "Nguyen F7 = log(x + 1) + log(x² + 1)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F7 = log(x + 1) + log(x^2 + 1)" + Environment.NewLine
    35         + "Fitcases: 20 random points [0, 2]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F7 = ln(x + 1) + ln(x² + 1)" + Environment.NewLine
     35        + "Fitcases: 20 random points in [0, 2]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionSix.cs

    r7849 r8430  
    2727  public class NguyenFunctionSix : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F6 = sin(x) + sin(x + x^2)"; } }
     29    public override string Name { get { return "Nguyen F6 = sin(x) + sin(x + x²)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F6 = sin(x) + sin(x + x^2)" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F6 = sin(x) + sin(x + x²)" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTen.cs

    r7849 r8430  
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    3434        + "Function: F10 = 2sin(x)cos(y)" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionThree.cs

    r7849 r8430  
    2727  public class NguyenFunctionThree : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F3 = x^5 + x^4 + x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F3 = x^5 + x^4 + x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F3 = x^5 + x^4 + x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F3 = x^5 + x^4 + x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTwelve.cs

    r7849 r8430  
    2727  public class NguyenFunctionTwelve : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F12 = x^4 - x^3 + y^2/2 - y"; } }
     29    public override string Name { get { return "Nguyen F12 = x^4 - x³ + y²/2 - y"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F12 = x^4 - x^3 + y^2/2 - y" + Environment.NewLine
    35         + "Fitcases: 100 random points ⊆ [0, 1]x[0, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F12 = x^4 - x³ + y²/2 - y" + Environment.NewLine
     35        + "Fitcases: 20 random points in [0, 1]x[0, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
     
    4242    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    44     protected override int TrainingPartitionEnd { get { return 100; } }
     44    protected override int TrainingPartitionEnd { get { return 20; } }
    4545    protected override int TestPartitionStart { get { return 500; } }
    4646    protected override int TestPartitionEnd { get { return 1000; } }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenFunctionTwo.cs

    r7849 r8430  
    2727  public class NguyenFunctionTwo : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Nguyen F2 = x^4 + x^3 + x^2 + x"; } }
     29    public override string Name { get { return "Nguyen F2 = x^4 + x³ + x² + x"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Semantically-based Crossover in Genetic Programming: Application to Real-valued Symbolic Regression" + Environment.NewLine
    3333        + "Authors: Nguyen Quang Uy · Nguyen Xuan Hoai · Michael O’Neill · R.I. McKay · Edgar Galvan-Lopez" + Environment.NewLine
    34         + "Function: F2 = x^4 + x^3 + x^2 + x" + Environment.NewLine
    35         + "Fitcases: 20 random points [-1, 1]" + Environment.NewLine
    36         + "Non-terminals: +, -, *, /, sin, cos, exp, log (protected version)" + Environment.NewLine
    37         + "Terminals: X, 1 for single variable problems, and X, Y for bivariable problems";
     34        + "Function: F2 = x^4 + x³ + x² + x" + Environment.NewLine
     35        + "Fitcases: 20 random points in [-1, 1]" + Environment.NewLine
     36        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     37        + "Terminals: only variables (no random constants)";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Nguyen/NguyenInstanceProvider.cs

    r7849 r8430  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Nguyen_et_al"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/RegressionInstanceProvider.cs

    r7860 r8430  
    2020#endregion
    2121
    22 using System;
    23 using System.Collections;
    24 using System.Collections.Generic;
    25 using System.IO;
    26 using System.Linq;
    27 using System.Text;
    2822using HeuristicLab.Problems.DataAnalysis;
    2923
    3024namespace HeuristicLab.Problems.Instances.DataAnalysis {
    31   public abstract class RegressionInstanceProvider : IProblemInstanceProvider<IRegressionProblemData> {
    32 
    33     public IRegressionProblemData LoadData(string path) {
    34       TableFileParser csvFileParser = new TableFileParser();
    35       csvFileParser.Parse(path);
    36 
    37       Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    38       string targetVar = csvFileParser.VariableNames.Last();
    39 
    40       IEnumerable<string> allowedInputVars = csvFileParser.VariableNames.Where(x => !x.Equals(targetVar));
    41 
    42       IRegressionProblemData regData = new RegressionProblemData(dataset, allowedInputVars, targetVar);
    43 
    44       int trainingPartEnd = csvFileParser.Rows * 2 / 3;
    45       regData.TrainingPartition.Start = 0;
    46       regData.TrainingPartition.End = trainingPartEnd;
    47       regData.TestPartition.Start = trainingPartEnd;
    48       regData.TestPartition.End = csvFileParser.Rows;
    49 
    50       int pos = path.LastIndexOf('\\');
    51       if (pos < 0)
    52         regData.Name = path;
    53       else {
    54         pos++;
    55         regData.Name = path.Substring(pos, path.Length - pos);
    56       }
    57       return regData;
    58     }
    59 
    60     public void SaveData(IRegressionProblemData instance, string path) {
    61       StringBuilder strBuilder = new StringBuilder();
    62 
    63       foreach (var variable in instance.InputVariables) {
    64         strBuilder.Append(variable + ";");
    65       }
    66       strBuilder.Remove(strBuilder.Length - 1, 1);
    67       strBuilder.AppendLine();
    68 
    69       Dataset dataset = instance.Dataset;
    70 
    71       for (int i = 0; i < dataset.Rows; i++) {
    72         for (int j = 0; j < dataset.Columns; j++) {
    73           strBuilder.Append(dataset.GetValue(i, j) + ";");
    74         }
    75         strBuilder.Remove(strBuilder.Length - 1, 1);
    76         strBuilder.AppendLine();
    77       }
    78 
    79       using (StreamWriter writer = new StreamWriter(path)) {
    80         writer.Write(strBuilder);
    81       }
    82     }
    83 
    84     public abstract IEnumerable<IDataDescriptor> GetDataDescriptors();
    85     public abstract IRegressionProblemData LoadData(IDataDescriptor descriptor);
    86 
    87     public abstract string Name { get; }
    88     public abstract string Description { get; }
    89     public abstract Uri WebLink { get; }
    90     public abstract string ReferencePublication { get; }
     25  public abstract class RegressionInstanceProvider : ProblemInstanceProvider<IRegressionProblemData> {
    9126  }
    9227}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/ResourceRegressionInstanceProvider.cs

    r7851 r8430  
    7070
    7171        Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    72         string targetVar = csvFileParser.VariableNames.Last();
    73         IEnumerable<string> allowedInputVars = csvFileParser.VariableNames.Where(x => !x.Equals(targetVar));
     72        string targetVar = csvFileParser.VariableNames.Where(x => dataset.DoubleVariables.Contains(x)).Last();
     73        IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
    7474
    7575        IRegressionProblemData regData = new RegressionProblemData(dataset, allowedInputVars, targetVar);
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/ValueGenerator.cs

    r7849 r8430  
    2929    private static FastRandom rand = new FastRandom();
    3030
     31    /// <summary>
     32    /// Generates a sequence of evenly spaced points between start and end (inclusive!).
     33    /// </summary>
     34    /// <param name="start">The smallest and first value of the sequence.</param>
     35    /// <param name="end">The largest and last value of the sequence.</param>
     36    /// <param name="stepWidth">The step size between subsequent values.</param>
     37    /// <returns>An sequence of values from start to end (inclusive)</returns>
    3138    public static IEnumerable<double> GenerateSteps(double start, double end, double stepWidth) {
    32       int steps = (int)Math.Round(((end - start) / stepWidth) + 1);
    33       for (int i = 0; i < steps; i++)
    34         yield return start + i * stepWidth;
     39      if (start > end) throw new ArgumentException("start must be less than or equal end.");
     40      if (stepWidth <= 0) throw new ArgumentException("stepwith must be larger than zero.", "stepWidth");
     41      double x = start;
     42      while (x <= end) {
     43        yield return x;
     44        x += stepWidth;
     45      }
    3546    }
    3647
    37     public static IEnumerable<double> GenerateUniformDistributedValues(int amount, double start, double end) {
    38       for (int i = 0; i < amount; i++)
    39         yield return rand.NextDouble() * (end - start) + start;
     48    /// <summary>
     49    /// Generates uniformly distributed values between start and end (inclusive!)
     50    /// </summary>
     51    /// <param name="n">Number of values to generate.</param>
     52    /// <param name="start">The lower value (inclusive)</param>
     53    /// <param name="end">The upper value (inclusive)</param>
     54    /// <returns>An enumerable including n values in [start, end]</returns>
     55    public static IEnumerable<double> GenerateUniformDistributedValues(int n, double start, double end) {
     56      for (int i = 0; i < n; i++) {
     57        // we need to return a random value including end.
     58        // so we cannot use rand.NextDouble() as it returns a value strictly smaller than 1.
     59        double r = rand.NextUInt() / (double)uint.MaxValue;    // r \in [0,1]
     60        yield return r * (end - start) + start;
     61      }
    4062    }
    4163
    42     public static IEnumerable<double> GenerateNormalDistributedValues(int amount, double mu, double sigma) {
    43       for (int i = 0; i < amount; i++)
     64    /// <summary>
     65    /// Generates normally distributed values sampling from N(mu, sigma)
     66    /// </summary>
     67    /// <param name="n">Number of values to generate.</param>
     68    /// <param name="mu">The mu parameter of the normal distribution</param>
     69    /// <param name="sigma">The sigma parameter of the normal distribution</param>
     70    /// <returns>An enumerable including n values ~ N(mu, sigma)</returns>
     71    public static IEnumerable<double> GenerateNormalDistributedValues(int n, double mu, double sigma) {
     72      for (int i = 0; i < n; i++)
    4473        yield return NormalDistributedRandom.NextDouble(rand, mu, sigma);
    4574    }
     
    82111      }
    83112    }
    84 
    85     //recursive approach
    86     /*public static IEnumerable<IEnumerable<double>> GenerateAllCombinationsOfValuesInLists(List<List<double>> lists) {
    87       int cur = 0;
    88       List<double> curCombination = new List<double>();
    89       List<List<double>> allCombinations = new List<List<double>>();
    90       for (int i = 0; i < lists.Count; i++) {
    91         allCombinations.Add(new List<double>());
    92       }
    93       if (lists.Count() > cur) {
    94         foreach (var item in lists[cur]) {
    95           curCombination.Clear();
    96           curCombination.Add(item);
    97           GetCombination(lists, cur + 1, curCombination, allCombinations);
    98         }
    99       }
    100       return allCombinations;
    101     }
    102 
    103     private static void GetCombination(List<List<double>> lists, int cur, List<double> curCombinations, List<List<double>> allCombinations) {
    104       if (lists.Count > cur) {
    105         foreach (var item in lists[cur]) {
    106           if (curCombinations.Count > cur) {
    107             curCombinations.RemoveAt(cur);
    108           }
    109           curCombinations.Add(item);
    110           GetCombination(lists, cur + 1, curCombinations, allCombinations);
    111         }
    112       } else {
    113         for (int i = 0; i < curCombinations.Count; i++) {
    114           allCombinations[i].Add(curCombinations[i]);
    115         }
    116       }
    117     }         */
    118 
    119     //original
    120     /*public static IEnumerable<IEnumerable<double>> GenerateAllCombinationsOfValuesInLists(List<List<double>> sets) {
    121 
    122       var combinations = new List<List<double>>();
    123 
    124       foreach (var value in sets[0])
    125         combinations.Add(new List<double> { value });
    126 
    127       foreach (var set in sets.Skip(1))
    128         combinations = AddListToCombinations(combinations, set);
    129 
    130       IEnumerable<IEnumerable<double>> res = (from i in Enumerable.Range(0, sets.Count)
    131                                               select (from list in combinations
    132                                                       select list.ElementAt(i)));
    133 
    134       return res;
    135     }
    136 
    137     private static List<List<double>> AddListToCombinations
    138          (List<List<double>> combinations, List<double> set) {
    139       var newCombinations = from value in set
    140                             from combination in combinations
    141                             select new List<double>(combination) { value };
    142 
    143       return newCombinations.ToList();
    144     }    */
    145113  }
    146114}
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Various/SpatialCoevolution.cs

    r7849 r8430  
    2727  public class SpatialCoevolution : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Spatial co-evolution F(x,y) = 1/(1+power(x,-4)) + 1/(1+pow(y,-4))"; } }
     29    public override string Name { get { return "Spatial co-evolution F(x,y) = 1/(1 + x^(-4)) + 1/(1 + y^(-4))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Evolutionary consequences of coevolving targets" + Environment.NewLine
    3333        + "Authors: Ludo Pagie and Paulien Hogeweg" + Environment.NewLine
    34         + "Function: F(x,y) = 1/(1+power(x,-4)) + 1/(1+pow(y,-4))" + Environment.NewLine
    35         + "Terminal set: x, y" + Environment.NewLine
     34        + "Function: F(x,y) = 1/(1 + x^(-4)) + 1/(1 + y^(-4))" + Environment.NewLine
     35        + "Non-terminals: +, -, *, % (protected division), sin, cos, exp, ln(|x|) (protected log)" + Environment.NewLine
     36        + "Terminals: only variables (no random constants)" + Environment.NewLine
    3637        + "The fitness of a solution is defined as the mean of the absolute differences between "
    3738        + "the target function and the solution over all problems on the basis of which it is evaluated. "
    3839        + "A solution is considered completely ’correct’ if, for all 676 problems in the ’complete’ "
    3940        + "problem set used in the static evaluation scheme, the absolute difference between "
    40         + "solution and target function is less than 0:01 (this is a so-called hit).";
     41        + "solution and target function is less than 0.01 (this is a so-called hit).";
    4142      }
    4243    }
     
    4546    protected override string[] AllowedInputVariables { get { return new string[] { "X", "Y" }; } }
    4647    protected override int TrainingPartitionStart { get { return 0; } }
    47     protected override int TrainingPartitionEnd { get { return 250; } }
    48     protected override int TestPartitionStart { get { return 250; } }
    49     protected override int TestPartitionEnd { get { return 500; } }
     48    protected override int TrainingPartitionEnd { get { return 676; } }
     49    protected override int TestPartitionStart { get { return 676; } }
     50    protected override int TestPartitionEnd { get { return 1676; } }
    5051
    5152    protected override List<List<double>> GenerateValues() {
    5253      List<List<double>> data = new List<List<double>>();
    5354
    54       List<double> oneVariableTestData = ValueGenerator.GenerateSteps(-5, 5, 0.4).ToList();
    55       List<List<double>> testData = new List<List<double>>() { oneVariableTestData, oneVariableTestData };
    56       var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(testData).ToList<IEnumerable<double>>();
     55      List<double> evenlySpacedSequence = ValueGenerator.GenerateSteps(-5, 5, 0.4).ToList();
     56      List<List<double>> trainingData = new List<List<double>>() { evenlySpacedSequence, evenlySpacedSequence };
     57      var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(trainingData).ToList();
    5758
    5859      for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    59         data.Add(ValueGenerator.GenerateUniformDistributedValues(1000, -5, 5).ToList());
    60         data[i].AddRange(combinations[i]);
     60        data.Add(combinations[i].ToList());
     61        data[i].AddRange(ValueGenerator.GenerateUniformDistributedValues(1000, -5, 5).ToList());
    6162      }
    6263
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/KotanchekFunction.cs

    r7849 r8430  
    2727  public class KotanchekFunction : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Kotanchek"; } }
     29    public override string Name { get { return "Vladislavleva-1 F1(X1,X2) = exp(-(X1 - 1))² / (1.2 + (X2 -2.5)²"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F1(X1, X2) = e^-(X1 - 1)^2 / (1.2 + (X2 -2.5)^2" + Environment.NewLine
     34        + "Function: F1(X1, X2) = exp(-(X1 - 1))² / (1.2 + (X2 -2.5)²" + Environment.NewLine
    3535        + "Training Data: 100 points X1, X2 = Rand(0.3, 4)" + Environment.NewLine
    3636        + "Test Data: 2026 points (X1, X2) = (-0.2:0.1:4.2)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, x^eps, x + eps, x * eps";
    3838      }
    3939    }
     
    4343    protected override int TrainingPartitionStart { get { return 0; } }
    4444    protected override int TrainingPartitionEnd { get { return 100; } }
    45     protected override int TestPartitionStart { get { return 1000; } }
    46     protected override int TestPartitionEnd { get { return 3025; } }
     45    protected override int TestPartitionStart { get { return 100; } }
     46    protected override int TestPartitionEnd { get { return 2126; } }
    4747
    4848    protected override List<List<double>> GenerateValues() {
     
    5353      var combinations = ValueGenerator.GenerateAllCombinationsOfValuesInLists(testData).ToList<IEnumerable<double>>();
    5454      for (int i = 0; i < AllowedInputVariables.Count(); i++) {
    55         data.Add(ValueGenerator.GenerateUniformDistributedValues(1000, 0.3, 4).ToList());
     55        data.Add(ValueGenerator.GenerateUniformDistributedValues(100, 0.3, 4).ToList());
    5656        data[i].AddRange(combinations[i]);
    5757      }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RationalPolynomialThreeDimensional.cs

    r7849 r8430  
    2727  public class RationalPolynomialThreeDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva RatPol3D"; } }
     29    public override string Name { get { return "Vladislavleva-5 F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2² * (X1 - 10))"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2^2 * (X1 - 10))" + Environment.NewLine
     34        + "Function: F5(X1, X2, X3) = 30 * ((X1 - 1) * (X3 -1)) / (X2² * (X1 - 10))" + Environment.NewLine
    3535        + "Training Data: 300 points X1, X3 = Rand(0.05, 2), X2 = Rand(1, 2)" + Environment.NewLine
    3636        + "Test Data: 2701 points X1, X3 = (-0.05:0.15:2.1), X2 = (0.95:0.1:2.05)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real";
     37        + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps";
    3838      }
    3939    }
     
    4949      List<List<double>> data = new List<List<double>>();
    5050
    51       int amountOfPoints = 1000;
    52       data.Add(ValueGenerator.GenerateUniformDistributedValues(amountOfPoints, 0.05, 2).ToList());
    53       data.Add(ValueGenerator.GenerateUniformDistributedValues(amountOfPoints, 1, 2).ToList());
    54       data.Add(ValueGenerator.GenerateUniformDistributedValues(amountOfPoints, 0.05, 2).ToList());
     51      int n = 1000;
     52      data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 0.05, 2).ToList());
     53      data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 1, 2).ToList());
     54      data.Add(ValueGenerator.GenerateUniformDistributedValues(n, 0.05, 2).ToList());
    5555
    5656      List<List<double>> testData = new List<List<double>>() {
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RationalPolynomialTwoDimensional.cs

    r7849 r8430  
    2727  public class RationalPolynomialTwoDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva RatPol2D"; } }
     29    public override string Name { get { return "Vladislavleva-8 F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)³ - (X2 -3)) / ((X2 - 2)^4 + 10)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)^3 - (X2 -3)) / ((X2 - 2)^4 + 10)" + Environment.NewLine
     34        + "Function: F8(X1, X2) = ((X1 - 3)^4 + (X2 - 3)³ - (X2 -3)) / ((X2 - 2)^4 + 10)" + Environment.NewLine
    3535        + "Training Data: 50 points X1, X2 = Rand(0.05, 6.05)" + Environment.NewLine
    3636        + "Test Data: 1157 points X1, X2 = (-0.25:0.2:6.35)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real";
     37        + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/RippleFunction.cs

    r7849 r8430  
    2727  public class RippleFunction : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Ripple"; } }
     29    public override string Name { get { return "Vladislavleva-7  F7(X1, X2) = (X1 - 3)(X2 - 3) + 2 * sin((X1 - 4)(X2 - 4))"; } }
    3030    public override string Description {
    3131      get {
     
    3535        + "Training Data: 300 points X1, X2 = Rand(0.05, 6.05)" + Environment.NewLine
    3636        + "Test Data: 1000 points X1, X2 = Rand(-0.25, 6.35)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SalutowiczFunctionOneDimensional.cs

    r7849 r8430  
    2727  public class SalutowiczFunctionOneDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Salutowicz"; } }
     29    public override string Name { get { return "Vladislavleva-2 F2(X) = exp(-X) * X³ * cos(X) * sin(X) * (cos(X)sin(X)² - 1)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F2(X) = e^-X * X^3 * cos(X) * sin(X) * (cos(X)sin(X)^2 - 1)" + Environment.NewLine
     34        + "Function: F2(X) = exp(-X) * X³ * cos(X) * sin(X) * (cos(X)sin(X)² - 1)" + Environment.NewLine
    3535        + "Training Data: 100 points X = (0.05:0.1:10)" + Environment.NewLine
    3636        + "Test Data: 221 points X = (-0.5:0.05:10.5)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SalutowiczFunctionTwoDimensional.cs

    r7849 r8430  
    2727  public class SalutowiczFunctionTwoDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva Salutowicz2D"; } }
     29    public override string Name { get { return "Vladislavleva-3 F3(X1, X2) = exp(-X1) * X1³ * cos(X1) * sin(X1) * (cos(X1)sin(X1)² - 1)(X2 - 5)"; } }
    3030    public override string Description {
    3131      get {
    3232        return "Paper: Order of Nonlinearity as a Complexity Measure for Models Generated by Symbolic Regression via Pareto Genetic Programming " + Environment.NewLine
    3333        + "Authors: Ekaterina J. Vladislavleva, Member, IEEE, Guido F. Smits, Member, IEEE, and Dick den Hertog" + Environment.NewLine
    34         + "Function: F3(X1, X2) = e^-X1 * X1^3 * cos(X1) * sin(X1) * (cos(X1)sin(X1)^2 - 1)(X2 - 5)" + Environment.NewLine
     34        + "Function: F3(X1, X2) = exp(-X1) * X1³ * cos(X1) * sin(X1) * (cos(X1)sin(X1)² - 1)(X2 - 5)" + Environment.NewLine
    3535        + "Training Data: 601 points X1 = (0.05:0.1:10), X2 = (0.05:2:10.05)" + Environment.NewLine
    36         + "Test Data: 2554 points X1 = (-0.5:0.05:10.5), X2 = (-0.5:0.5:10.5)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x, sin(x), cos(x)" + Environment.NewLine + Environment.NewLine
    38         + "Important: The stepwidth of the variable X1 in the test partition has been set to 0.1, to fit the amount of data points.";
     36        + "Test Data: 4840 points X1 = (-0.5:0.05:10.5), X2 = (-0.5:0.5:10.5)" + Environment.NewLine
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, sin(x), cos(x), x^eps, x + eps, x + eps";
    3938      }
    4039    }
     
    4544    protected override int TrainingPartitionEnd { get { return 601; } }
    4645    protected override int TestPartitionStart { get { return 601; } }
    47     protected override int TestPartitionEnd { get { return 3155; } }
     46    protected override int TestPartitionEnd { get { return 5441; } }
    4847
    4948    protected override List<List<double>> GenerateValues() {
     
    5554
    5655      List<List<double>> testData = new List<List<double>>() {
    57         ValueGenerator.GenerateSteps(-0.5, 10.5, 0.1).ToList(),
     56        ValueGenerator.GenerateSteps(-0.5, 10.5, 0.05).ToList(),
    5857        ValueGenerator.GenerateSteps(-0.5, 10.5, 0.5).ToList()
    5958      };
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/SineCosineFunction.cs

    r7849 r8430  
    2727  public class SineCosineFunction : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva SineCosine"; } }
     29    public override string Name { get { return "Vladislavleva-6 F6(X1, X2) = 6 * sin(X1) * cos(X2)"; } }
    3030    public override string Description {
    3131      get {
     
    3535        + "Training Data: 30 points X1, X2 = Rand(0.1, 5.9)" + Environment.NewLine
    3636        + "Test Data: 961 points X1, X2 = (-0.05:0.02:6.05)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real, e^x, e^-x";
     37        + "Function Set: +, -, *, /, square, e^x, e^-x, x^eps, x + eps, x * eps";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/UnwrappedBallFunctionFiveDimensional.cs

    r7849 r8430  
    2727  public class UnwrappedBallFunctionFiveDimensional : ArtificialRegressionDataDescriptor {
    2828
    29     public override string Name { get { return "Vladislavleva UBall5D"; } }
     29    public override string Name { get { return "Vladislavleva-4 F4(X1, X2, X3, X4, X5) = 10 / (5 + Sum(Xi - 3)^2)"; } }
    3030    public override string Description {
    3131      get {
     
    3535        + "Training Data: 1024 points Xi = Rand(0.05, 6.05)" + Environment.NewLine
    3636        + "Test Data: 5000 points Xi = Rand(-0.25, 6.35)" + Environment.NewLine
    37         + "Function Set: +, -, *, /, sqaure, x^real, x + real, x + real";
     37        + "Function Set: +, -, *, /, square, x^eps, x + eps, x * eps";
    3838      }
    3939    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Vladislavleva/VladislavlevaInstanceProvider.cs

    r7849 r8430  
    3232    }
    3333    public override Uri WebLink {
    34       get { return new Uri("http://groups.csail.mit.edu/EVO-DesignOpt/GPBenchmarks/"); }
     34      get { return new Uri("http://www.gpbenchmarks.org/wiki/index.php?title=Problem_Classification#Vladislavleva_et_al"); }
    3535    }
    3636    public override string ReferencePublication {
    37       get { return ""; }
     37      get { return "McDermott et al., 2012 \"Genetic Programming Needs Better Benchmarks\", in Proc. of GECCO 2012."; }
    3838    }
    3939
     
    4141      List<IDataDescriptor> descriptorList = new List<IDataDescriptor>();
    4242      descriptorList.Add(new KotanchekFunction());
    43       descriptorList.Add(new RationalPolynomialTwoDimensional());
    44       descriptorList.Add(new RationalPolynomialThreeDimensional());
    45       descriptorList.Add(new RippleFunction());
    4643      descriptorList.Add(new SalutowiczFunctionOneDimensional());
    4744      descriptorList.Add(new SalutowiczFunctionTwoDimensional());
    4845      descriptorList.Add(new UnwrappedBallFunctionFiveDimensional());
     46      descriptorList.Add(new RationalPolynomialThreeDimensional());
     47      descriptorList.Add(new SineCosineFunction());
     48      descriptorList.Add(new RippleFunction());
     49      descriptorList.Add(new RationalPolynomialTwoDimensional());
    4950      return descriptorList;
    5051    }
  • branches/HeuristicLab.TimeSeries/HeuristicLab.Problems.Instances.DataAnalysis/3.3/TimeSeries/TimeSeriesPrognosisInstanceProvider.cs

    r7998 r8430  
    2929namespace HeuristicLab.Problems.Instances.DataAnalysis {
    3030  public abstract class TimeSeriesPrognosisInstanceProvider : IProblemInstanceProvider<ITimeSeriesPrognosisProblemData> {
     31    public bool CanImportData { get { return true; } }
     32    public bool CanExportData { get { return true; } }
    3133
    32     public ITimeSeriesPrognosisProblemData LoadData(string path) {
     34
     35    public ITimeSeriesPrognosisProblemData ImportData(string path) {
    3336      TableFileParser csvFileParser = new TableFileParser();
    3437      csvFileParser.Parse(path);
     
    3639      Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values);
    3740      string targetVar = csvFileParser.VariableNames.Last();
    38      
     41
    3942      IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar));
    4043
     
    5760    }
    5861
    59     public void SaveData(ITimeSeriesPrognosisProblemData instance, string path) {
     62    public void ExportData(ITimeSeriesPrognosisProblemData instance, string path) {
    6063      StringBuilder strBuilder = new StringBuilder();
    6164
Note: See TracChangeset for help on using the changeset viewer.