Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
09/14/12 18:58:15 (12 years ago)
Author:
gkronber
Message:

#1847 merged r8205:8635 from trunk into branch

Location:
branches/GP-MoveOperators
Files:
21 edited
7 copied

Legend:

Unmodified
Added
Removed
  • branches/GP-MoveOperators

  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4

    • Property svn:ignore
      •  

        old new  
        55*.vs10x
        66Plugin.cs
         7*.user
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/CrossValidation.cs

    r7738 r8660  
    447447        problemDataClone.TestPartition.Start = SamplesStart.Value; problemDataClone.TestPartition.End = SamplesEnd.Value;
    448448        // clone models
    449         var ensembleSolution = new ClassificationEnsembleSolution(
    450           solutions.Value.Select(x => cloner.Clone(x.Model)),
    451           problemDataClone,
    452           solutions.Value.Select(x => cloner.Clone(x.ProblemData.TrainingPartition)),
    453           solutions.Value.Select(x => cloner.Clone(x.ProblemData.TestPartition)));
     449        var ensembleSolution = new ClassificationEnsembleSolution(problemDataClone);
     450        ensembleSolution.AddClassificationSolutions(solutions.Value);
    454451
    455452        aggregatedResults.Add(new Result(solutions.Key + " (ensemble)", ensembleSolution));
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r8085 r8660  
    101101  </PropertyGroup>
    102102  <ItemGroup>
    103     <Reference Include="ALGLIB-3.5.0, Version=3.5.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    104       <HintPath>..\..\bin\ALGLIB-3.5.0.dll</HintPath>
     103    <Reference Include="ALGLIB-3.6.0, Version=3.6.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     104      <HintPath>..\..\bin\ALGLIB-3.6.0.dll</HintPath>
    105105      <Private>False</Private>
    106106    </Reference>
    107     <Reference Include="LibSVM-1.6.3, Version=1.6.3.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    108       <HintPath>..\..\bin\LibSVM-1.6.3.dll</HintPath>
     107    <Reference Include="LibSVM-3.12, Version=3.12.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     108      <HintPath>..\..\bin\LibSVM-3.12.dll</HintPath>
    109109      <Private>False</Private>
    110110    </Reference>
     
    113113      <RequiredTargetFramework>3.5</RequiredTargetFramework>
    114114    </Reference>
    115     <Reference Include="System.Data" />
    116115    <Reference Include="System.Drawing" />
    117     <Reference Include="System.Xml" />
    118116  </ItemGroup>
    119117  <ItemGroup>
     
    122120    </Compile>
    123121    <Compile Include="FixedDataAnalysisAlgorithm.cs" />
     122    <Compile Include="GaussianProcess\GaussianProcessDiscriminantFunctionClassificationSolution.cs" />
     123    <Compile Include="GaussianProcess\GaussianProcessDiscriminantFunctionClassificationModel.cs" />
     124    <Compile Include="GaussianProcess\GaussianProcessClassificationSolutionCreator.cs" />
     125    <Compile Include="GaussianProcess\GaussianProcessClassificationModelCreator.cs" />
     126    <Compile Include="GaussianProcess\GaussianProcessClassification.cs" />
     127    <Compile Include="GaussianProcess\CovarianceProduct.cs" />
     128    <Compile Include="GaussianProcess\CovarianceScale.cs" />
     129    <Compile Include="GaussianProcess\CovarianceRationalQuadraticArd.cs" />
     130    <Compile Include="GaussianProcess\CovarianceRationalQuadraticIso.cs" />
     131    <Compile Include="GaussianProcess\CovarianceSquaredExponentialArd.cs" />
     132    <Compile Include="GaussianProcess\CovarianceSquaredExponentialIso.cs" />
     133    <Compile Include="GaussianProcess\HyperParameter.cs" />
     134    <Compile Include="GaussianProcess\CovarianceMaternIso.cs" />
     135    <Compile Include="GaussianProcess\CovarianceLinearArd.cs" />
     136    <Compile Include="GaussianProcess\CovarianceNoise.cs" />
     137    <Compile Include="GaussianProcess\CovarianceConst.cs" />
     138    <Compile Include="GaussianProcess\MeanProduct.cs" />
     139    <Compile Include="GaussianProcess\MeanSum.cs" />
     140    <Compile Include="GaussianProcess\CovarianceSum.cs" />
     141    <Compile Include="GaussianProcess\CovariancePeriodic.cs" />
     142    <Compile Include="GaussianProcess\GaussianProcessHyperparameterInitializer.cs" />
     143    <Compile Include="GaussianProcess\GaussianProcessRegressionSolutionCreator.cs" />
     144    <Compile Include="GaussianProcess\GaussianProcessRegressionModelCreator.cs" />
     145    <Compile Include="GaussianProcess\CovarianceLinear.cs" />
     146    <Compile Include="GaussianProcess\GaussianProcessModelCreator.cs">
     147      <SubType>Code</SubType>
     148    </Compile>
     149    <Compile Include="GaussianProcess\MeanLinear.cs" />
     150    <Compile Include="GaussianProcess\Util.cs" />
     151    <Compile Include="GaussianProcess\MeanZero.cs" />
     152    <Compile Include="GaussianProcess\MeanConst.cs" />
     153    <Compile Include="GaussianProcess\IMeanFunction.cs" />
     154    <Compile Include="GaussianProcess\GaussianProcessModel.cs" />
     155    <Compile Include="GaussianProcess\GaussianProcessRegression.cs" />
     156    <Compile Include="GaussianProcess\GaussianProcessRegressionSolution.cs" />
     157    <Compile Include="GaussianProcess\ICovarianceFunction.cs" />
     158    <Compile Include="Interfaces\IGaussianProcessModel.cs" />
     159    <Compile Include="Interfaces\IGaussianProcessSolution.cs" />
     160    <Compile Include="Interfaces\INcaClassificationSolution.cs" />
     161    <Compile Include="Interfaces\INcaModel.cs" />
    124162    <Compile Include="Interfaces\INearestNeighbourClassificationSolution.cs" />
    125163    <Compile Include="Interfaces\INearestNeighbourRegressionSolution.cs" />
     
    144182    </Compile>
    145183    <Compile Include="Linear\AlglibUtil.cs" />
     184    <Compile Include="Linear\Scaling.cs" />
    146185    <Compile Include="Linear\LinearDiscriminantAnalysis.cs" />
    147186    <Compile Include="Linear\LinearRegression.cs">
     
    151190    <Compile Include="Linear\MultinomialLogitClassificationSolution.cs" />
    152191    <Compile Include="Linear\MultinomialLogitModel.cs" />
     192    <Compile Include="Nca\Initialization\INcaInitializer.cs" />
     193    <Compile Include="Nca\Initialization\LdaInitializer.cs" />
     194    <Compile Include="Nca\Initialization\PcaInitializer.cs" />
     195    <Compile Include="Nca\Initialization\RandomInitializer.cs" />
     196    <Compile Include="Nca\Matrix.cs" />
     197    <Compile Include="Nca\NcaAlgorithm.cs" />
     198    <Compile Include="Nca\NcaClassificationSolution.cs" />
     199    <Compile Include="Nca\NcaModel.cs" />
    153200    <Compile Include="NearestNeighbour\NearestNeighbourClassification.cs" />
    154201    <Compile Include="NearestNeighbour\NearestNeighbourClassificationSolution.cs" />
     
    187234  </ItemGroup>
    188235  <ItemGroup>
     236    <ProjectReference Include="..\..\HeuristicLab.Algorithms.GradientDescent\3.3\HeuristicLab.Algorithms.GradientDescent.csproj">
     237      <Project>{1256B945-EEA9-4BE4-9880-76B5B113F089}</Project>
     238      <Name>HeuristicLab.Algorithms.GradientDescent</Name>
     239      <Private>False</Private>
     240    </ProjectReference>
     241    <ProjectReference Include="..\..\HeuristicLab.Analysis\3.3\HeuristicLab.Analysis-3.3.csproj">
     242      <Project>{887425B4-4348-49ED-A457-B7D2C26DDBF9}</Project>
     243      <Name>HeuristicLab.Analysis-3.3</Name>
     244      <Private>False</Private>
     245    </ProjectReference>
    189246    <ProjectReference Include="..\..\HeuristicLab.Collections\3.3\HeuristicLab.Collections-3.3.csproj">
    190247      <Project>{958B43BC-CC5C-4FA2-8628-2B3B01D890B6}</Project>
     
    212269      <Private>False</Private>
    213270    </ProjectReference>
     271    <ProjectReference Include="..\..\HeuristicLab.Encodings.RealVectorEncoding\3.3\HeuristicLab.Encodings.RealVectorEncoding-3.3.csproj">
     272      <Project>{BB6D334A-4BB6-4674-9883-31A6EBB32CAB}</Project>
     273      <Name>HeuristicLab.Encodings.RealVectorEncoding-3.3</Name>
     274      <Private>False</Private>
     275    </ProjectReference>
    214276    <ProjectReference Include="..\..\HeuristicLab.Encodings.SymbolicExpressionTreeEncoding\3.4\HeuristicLab.Encodings.SymbolicExpressionTreeEncoding-3.4.csproj">
    215277      <Project>{06D4A186-9319-48A0-BADE-A2058D462EEA}</Project>
     
    217279      <Private>False</Private>
    218280    </ProjectReference>
     281    <ProjectReference Include="..\..\HeuristicLab.Operators\3.3\HeuristicLab.Operators-3.3.csproj">
     282      <Project>{23DA7FF4-D5B8-41B6-AA96-F0561D24F3EE}</Project>
     283      <Name>HeuristicLab.Operators-3.3</Name>
     284      <Private>False</Private>
     285    </ProjectReference>
    219286    <ProjectReference Include="..\..\HeuristicLab.Optimization\3.3\HeuristicLab.Optimization-3.3.csproj">
    220287      <Project>{14AB8D24-25BC-400C-A846-4627AA945192}</Project>
     
    260327      <Project>{3540E29E-4793-49E7-8EE2-FEA7F61C3994}</Project>
    261328      <Name>HeuristicLab.Problems.Instances-3.3</Name>
     329      <Private>False</Private>
     330    </ProjectReference>
     331    <ProjectReference Include="..\..\HeuristicLab.Random\3.3\HeuristicLab.Random-3.3.csproj">
     332      <Project>{F4539FB6-4708-40C9-BE64-0A1390AEA197}</Project>
     333      <Name>HeuristicLab.Random-3.3</Name>
    262334      <Private>False</Private>
    263335    </ProjectReference>
     
    294366  -->
    295367  <PropertyGroup>
    296     <PreBuildEvent>set Path=%25Path%25;$(ProjectDir);$(SolutionDir)
     368    <PreBuildEvent Condition=" '$(OS)' == 'Windows_NT' ">set Path=%25Path%25;$(ProjectDir);$(SolutionDir)
    297369set ProjectDir=$(ProjectDir)
    298370set SolutionDir=$(SolutionDir)
     
    301373call PreBuildEvent.cmd
    302374</PreBuildEvent>
     375    <PreBuildEvent Condition=" '$(OS)' != 'Windows_NT' ">
     376export ProjectDir=$(ProjectDir)
     377export SolutionDir=$(SolutionDir)
     378
     379$SolutionDir/PreBuildEvent.sh
     380</PreBuildEvent>
    303381  </PropertyGroup>
    304382</Project>
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/ISupportVectorMachineModel.cs

    r7259 r8660  
    2020#endregion
    2121
    22 using HeuristicLab.Optimization;
    2322using HeuristicLab.Problems.DataAnalysis;
    24 using HeuristicLab.Core;
    25 using System.Collections.Generic;
     23using LibSVM;
    2624
    2725namespace HeuristicLab.Algorithms.DataAnalysis {
     
    3028  /// </summary>
    3129  public interface ISupportVectorMachineModel : IDataAnalysisModel, IRegressionModel, IClassificationModel {
    32     SVM.Model Model { get; }
    33     SVM.RangeTransform RangeTransform { get; }
     30    svm_model Model { get; }
     31    RangeTransform RangeTransform { get; }
    3432    Dataset SupportVectors { get; }
    3533  }
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/AlglibUtil.cs

    r7259 r8660  
    4545      return matrix;
    4646    }
     47    public static double[,] PrepareAndScaleInputMatrix(Dataset dataset, IEnumerable<string> variables, IEnumerable<int> rows, Scaling scaling) {
     48      List<string> variablesList = variables.ToList();
     49      List<int> rowsList = rows.ToList();
     50
     51      double[,] matrix = new double[rowsList.Count, variablesList.Count];
     52
     53      int col = 0;
     54      foreach (string column in variables) {
     55        var values = scaling.GetScaledValues(dataset, column, rows);
     56        int row = 0;
     57        foreach (var value in values) {
     58          matrix[row, col] = value;
     59          row++;
     60        }
     61        col++;
     62      }
     63
     64      return matrix;
     65    }
    4766  }
    4867}
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearDiscriminantAnalysis.cs

    r8206 r8660  
    111111      IClassificationProblemData problemData,
    112112      IEnumerable<int> rows) {
    113       return new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter);
     113      var model = new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter, new AccuracyMaximizationThresholdCalculator());
     114      model.RecalculateModelParameters(problemData, rows);
     115      return model;
    114116    }
    115117  }
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitModel.cs

    r7259 r8660  
    109109
    110110    public MultinomialLogitClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    111       return new MultinomialLogitClassificationSolution(problemData, this);
     111      return new MultinomialLogitClassificationSolution(new ClassificationProblemData(problemData), this);
    112112    }
    113113    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassification.cs

    r8206 r8660  
    2121
    2222using System;
    23 using System.Collections.Generic;
    2423using System.Linq;
    2524using HeuristicLab.Common;
    2625using HeuristicLab.Core;
    2726using HeuristicLab.Data;
    28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2927using HeuristicLab.Optimization;
     28using HeuristicLab.Parameters;
    3029using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3130using HeuristicLab.Problems.DataAnalysis;
    32 using HeuristicLab.Problems.DataAnalysis.Symbolic;
    33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
    34 using HeuristicLab.Parameters;
    3531
    3632namespace HeuristicLab.Algorithms.DataAnalysis {
     
    8480
    8581    public static IClassificationSolution CreateNearestNeighbourClassificationSolution(IClassificationProblemData problemData, int k) {
    86       Dataset dataset = problemData.Dataset;
    87       string targetVariable = problemData.TargetVariable;
    88       IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
    89       IEnumerable<int> rows = problemData.TrainingIndices;
    90       double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    91       if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    92         throw new NotSupportedException("Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
     82      var problemDataClone = (IClassificationProblemData)problemData.Clone();
     83      return new NearestNeighbourClassificationSolution(problemDataClone, Train(problemDataClone, k));
     84    }
    9385
    94       alglib.nearestneighbor.kdtree kdtree = new alglib.nearestneighbor.kdtree();
    95 
    96       int nRows = inputMatrix.GetLength(0);
    97       int nFeatures = inputMatrix.GetLength(1) - 1;
    98       double[] classValues = dataset.GetDoubleValues(targetVariable).Distinct().OrderBy(x => x).ToArray();
    99       int nClasses = classValues.Count();
    100       // map original class values to values [0..nClasses-1]
    101       Dictionary<double, double> classIndices = new Dictionary<double, double>();
    102       for (int i = 0; i < nClasses; i++) {
    103         classIndices[classValues[i]] = i;
    104       }
    105       for (int row = 0; row < nRows; row++) {
    106         inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
    107       }
    108       alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdtree);
    109       var problemDataClone = (IClassificationProblemData) problemData.Clone();
    110       return new NearestNeighbourClassificationSolution(problemDataClone, new NearestNeighbourModel(kdtree, k, targetVariable, allowedInputVariables, problemDataClone.ClassValues.ToArray()));
     86    public static INearestNeighbourModel Train(IClassificationProblemData problemData, int k) {
     87      return new NearestNeighbourModel(problemData.Dataset,
     88        problemData.TrainingIndices,
     89        k,
     90        problemData.TargetVariable,
     91        problemData.AllowedInputVariables,
     92        problemData.ClassValues.ToArray());
    11193    }
    11294    #endregion
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs

    r7294 r8660  
    3333  /// </summary>
    3434  [StorableClass]
    35   [Item("NearestNeighbourModel", "Represents a neural network for regression and classification.")]
     35  [Item("NearestNeighbourModel", "Represents a nearest neighbour model for regression and classification.")]
    3636  public sealed class NearestNeighbourModel : NamedItem, INearestNeighbourModel {
    3737
     
    5656    [Storable]
    5757    private int k;
     58
    5859    [StorableConstructor]
    5960    private NearestNeighbourModel(bool deserializing)
     
    9596        this.classValues = (double[])original.classValues.Clone();
    9697    }
    97     public NearestNeighbourModel(alglib.nearestneighbor.kdtree kdTree, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
    98       : base() {
    99       this.name = ItemName;
    100       this.description = ItemDescription;
    101       this.kdTree = kdTree;
     98    public NearestNeighbourModel(Dataset dataset, IEnumerable<int> rows, int k, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) {
     99      Name = ItemName;
     100      Description = ItemDescription;
    102101      this.k = k;
    103102      this.targetVariable = targetVariable;
    104103      this.allowedInputVariables = allowedInputVariables.ToArray();
    105       if (classValues != null)
     104
     105      var inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,
     106                                   allowedInputVariables.Concat(new string[] { targetVariable }),
     107                                   rows);
     108
     109      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
     110        throw new NotSupportedException(
     111          "Nearest neighbour classification does not support NaN or infinity values in the input dataset.");
     112
     113      this.kdTree = new alglib.nearestneighbor.kdtree();
     114
     115      var nRows = inputMatrix.GetLength(0);
     116      var nFeatures = inputMatrix.GetLength(1) - 1;
     117
     118      if (classValues != null) {
    106119        this.classValues = (double[])classValues.Clone();
     120        int nClasses = classValues.Length;
     121        // map original class values to values [0..nClasses-1]
     122        var classIndices = new Dictionary<double, double>();
     123        for (int i = 0; i < nClasses; i++)
     124          classIndices[classValues[i]] = i;
     125
     126        for (int row = 0; row < nRows; row++) {
     127          inputMatrix[row, nFeatures] = classIndices[inputMatrix[row, nFeatures]];
     128        }
     129      }
     130      alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdTree);
    107131    }
    108132
     
    140164
    141165    public IEnumerable<double> GetEstimatedClassValues(Dataset dataset, IEnumerable<int> rows) {
     166      if (classValues == null) throw new InvalidOperationException("No class values are defined.");
    142167      double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows);
    143168
     
    177202
    178203    public INearestNeighbourRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    179       return new NearestNeighbourRegressionSolution(problemData, this);
     204      return new NearestNeighbourRegressionSolution(new RegressionProblemData(problemData), this);
    180205    }
    181206    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    183208    }
    184209    public INearestNeighbourClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    185       return new NearestNeighbourClassificationSolution(problemData, this);
     210      return new NearestNeighbourClassificationSolution(new ClassificationProblemData(problemData), this);
    186211    }
    187212    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourRegression.cs

    r8206 r8660  
    2121
    2222using System;
    23 using System.Collections.Generic;
    24 using System.Linq;
    2523using HeuristicLab.Common;
    2624using HeuristicLab.Core;
    2725using HeuristicLab.Data;
    28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2926using HeuristicLab.Optimization;
     27using HeuristicLab.Parameters;
    3028using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3129using HeuristicLab.Problems.DataAnalysis;
    32 using HeuristicLab.Problems.DataAnalysis.Symbolic;
    33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
    34 using HeuristicLab.Parameters;
    3530
    3631namespace HeuristicLab.Algorithms.DataAnalysis {
     
    8479
    8580    public static IRegressionSolution CreateNearestNeighbourRegressionSolution(IRegressionProblemData problemData, int k) {
    86       Dataset dataset = problemData.Dataset;
    87       string targetVariable = problemData.TargetVariable;
    88       IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
    89       IEnumerable<int> rows = problemData.TrainingIndices;
    90       double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    91       if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    92         throw new NotSupportedException("Nearest neighbour regression does not support NaN or infinity values in the input dataset.");
     81      var clonedProblemData = (IRegressionProblemData)problemData.Clone();
     82      return new NearestNeighbourRegressionSolution(clonedProblemData, Train(problemData, k));
     83    }
    9384
    94       alglib.nearestneighbor.kdtree kdtree = new alglib.nearestneighbor.kdtree();
    95 
    96       int nRows = inputMatrix.GetLength(0);
    97 
    98       alglib.nearestneighbor.kdtreebuild(inputMatrix, nRows, inputMatrix.GetLength(1) - 1, 1, 2, kdtree);
    99 
    100       return new NearestNeighbourRegressionSolution((IRegressionProblemData)problemData.Clone(), new NearestNeighbourModel(kdtree, k, targetVariable, allowedInputVariables));
     85    public static INearestNeighbourModel Train(IRegressionProblemData problemData, int k) {
     86      return new NearestNeighbourModel(problemData.Dataset,
     87        problemData.TrainingIndices,
     88        k,
     89        problemData.TargetVariable,
     90        problemData.AllowedInputVariables);
    10191    }
    10292    #endregion
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs

    r7694 r8660  
    130130
    131131    public INeuralNetworkEnsembleRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    132       return new NeuralNetworkEnsembleRegressionSolution(problemData, this);
     132      return new NeuralNetworkEnsembleRegressionSolution(new RegressionEnsembleProblemData(problemData), this);
    133133    }
    134134    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    136136    }
    137137    public INeuralNetworkEnsembleClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    138       return new NeuralNetworkEnsembleClassificationSolution(problemData, this);
     138      return new NeuralNetworkEnsembleClassificationSolution(new ClassificationEnsembleProblemData(problemData), this);
    139139    }
    140140    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs

    r7259 r8660  
    138138
    139139    public INeuralNetworkRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    140       return new NeuralNetworkRegressionSolution(problemData, this);
     140      return new NeuralNetworkRegressionSolution(new RegressionProblemData(problemData), this);
    141141    }
    142142    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    144144    }
    145145    public INeuralNetworkClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    146       return new NeuralNetworkClassificationSolution(problemData, this);
     146      return new NeuralNetworkClassificationSolution(new ClassificationProblemData(problemData), this);
    147147    }
    148148    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs

    r8206 r8660  
    2626using HeuristicLab.Core;
    2727using HeuristicLab.Data;
    28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
    2928using HeuristicLab.Optimization;
     29using HeuristicLab.Parameters;
    3030using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3131using HeuristicLab.Problems.DataAnalysis;
    32 using HeuristicLab.Problems.DataAnalysis.Symbolic;
    33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
    34 using HeuristicLab.Parameters;
    3532
    3633namespace HeuristicLab.Algorithms.DataAnalysis {
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/Plugin.cs.frame

    r8085 r8660  
    2626  /// Plugin class for HeuristicLab.Algorithms.DataAnalysis plugin.
    2727  /// </summary>
    28   [Plugin("HeuristicLab.Algorithms.DataAnalysis", "Provides wrappers for data analysis algorithms implemented in external libraries (linear regression, linear discriminant analysis, k-means clustering, support vector classification and regression)", "3.4.2.$WCREV$")]
     28  [Plugin("HeuristicLab.Algorithms.DataAnalysis", "Provides wrappers for data analysis algorithms implemented in external libraries (linear regression, linear discriminant analysis, k-means clustering, support vector classification and regression)", "3.4.3.$WCREV$")]
    2929  [PluginFile("HeuristicLab.Algorithms.DataAnalysis-3.4.dll", PluginFileType.Assembly)]
    30   [PluginDependency("HeuristicLab.ALGLIB", "3.5.0")]
    31   [PluginDependency("HeuristicLab.LibSVM", "1.6.3")]
     30  [PluginDependency("HeuristicLab.ALGLIB", "3.6.0")]
     31  [PluginDependency("HeuristicLab.Algorithms.GradientDescent", "3.3")]
     32  [PluginDependency("HeuristicLab.Analysis", "3.3")]
    3233  [PluginDependency("HeuristicLab.Collections", "3.3")]
    3334  [PluginDependency("HeuristicLab.Common", "3.3")]
     
    3536  [PluginDependency("HeuristicLab.Core", "3.3")]
    3637  [PluginDependency("HeuristicLab.Data", "3.3")]
     38  [PluginDependency("HeuristicLab.Encodings.RealVectorEncoding", "3.3")]
    3739  [PluginDependency("HeuristicLab.Encodings.SymbolicExpressionTreeEncoding", "3.4")]
     40  [PluginDependency("HeuristicLab.Operators", "3.3")]
    3841  [PluginDependency("HeuristicLab.Optimization", "3.3")]
    3942  [PluginDependency("HeuristicLab.Parameters", "3.3")]
     
    4346  [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Classification", "3.4")]
    4447  [PluginDependency("HeuristicLab.Problems.DataAnalysis.Symbolic.Regression", "3.4")]
     48  [PluginDependency("HeuristicLab.LibSVM", "3.12")]
     49  [PluginDependency("HeuristicLab.Random", "3.3")]
    4550  public class HeuristicLabAlgorithmsDataAnalysisPlugin : PluginBase {
    4651  }
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/Properties/AssemblyInfo.cs.frame

    r7259 r8660  
    5353// by using the '*' as shown below:
    5454[assembly: AssemblyVersion("3.4.0.0")]
    55 [assembly: AssemblyFileVersion("3.4.2.$WCREV$")]
     55[assembly: AssemblyFileVersion("3.4.3.$WCREV$")]
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs

    r7259 r8660  
    132132
    133133    public IRandomForestRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    134       return new RandomForestRegressionSolution(problemData, this);
     134      return new RandomForestRegressionSolution(new RegressionProblemData(problemData), this);
    135135    }
    136136    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    138138    }
    139139    public IRandomForestClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    140       return new RandomForestClassificationSolution(problemData, this);
     140      return new RandomForestClassificationSolution(new ClassificationProblemData(problemData), this);
    141141    }
    142142    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassification.cs

    r8206 r8660  
    3030using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3131using HeuristicLab.Problems.DataAnalysis;
     32using LibSVM;
    3233
    3334namespace HeuristicLab.Algorithms.DataAnalysis {
     
    4445    private const string NuParameterName = "Nu";
    4546    private const string GammaParameterName = "Gamma";
     47    private const string DegreeParameterName = "Degree";
    4648
    4749    #region parameter properties
     
    6062    public IValueParameter<DoubleValue> GammaParameter {
    6163      get { return (IValueParameter<DoubleValue>)Parameters[GammaParameterName]; }
     64    }
     65    public IValueParameter<IntValue> DegreeParameter {
     66      get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; }
    6267    }
    6368    #endregion
     
    7984    public DoubleValue Gamma {
    8085      get { return GammaParameter.Value; }
     86    }
     87    public IntValue Degree {
     88      get { return DegreeParameter.Value; }
    8189    }
    8290    #endregion
     
    103111      Parameters.Add(new ValueParameter<DoubleValue>(CostParameterName, "The value of the C (cost) parameter of C-SVC.", new DoubleValue(1.0)));
    104112      Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0)));
     113      Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
    105114    }
    106115    [StorableHook(HookType.AfterDeserialization)]
    107     private void AfterDeserialization() { }
     116    private void AfterDeserialization() {
     117      #region backwards compatibility (change with 3.4)
     118      if (!Parameters.ContainsKey(DegreeParameterName))
     119        Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     120      #endregion
     121    }
    108122
    109123    public override IDeepCloneable Clone(Cloner cloner) {
     
    118132      int nSv;
    119133      var solution = CreateSupportVectorClassificationSolution(problemData, selectedInputVariables,
    120         SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value,
     134        SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Degree.Value,
    121135        out trainingAccuracy, out testAccuracy, out nSv);
    122136
    123137      Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", solution));
    124138      Results.Add(new Result("Training accuracy", "The accuracy of the SVR solution on the training partition.", new DoubleValue(trainingAccuracy)));
    125       Results.Add(new Result("Test ", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy)));
     139      Results.Add(new Result("Test accuracy", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy)));
    126140      Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv)));
    127141    }
    128142
    129143    public static SupportVectorClassificationSolution CreateSupportVectorClassificationSolution(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables,
    130       string svmType, string kernelType, double cost, double nu, double gamma,
     144      string svmType, string kernelType, double cost, double nu, double gamma, int degree,
    131145      out double trainingAccuracy, out double testAccuracy, out int nSv) {
    132146      Dataset dataset = problemData.Dataset;
     
    135149
    136150      //extract SVM parameters from scope and set them
    137       SVM.Parameter parameter = new SVM.Parameter();
    138       parameter.SvmType = (SVM.SvmType)Enum.Parse(typeof(SVM.SvmType), svmType, true);
    139       parameter.KernelType = (SVM.KernelType)Enum.Parse(typeof(SVM.KernelType), kernelType, true);
     151      svm_parameter parameter = new svm_parameter();
     152      parameter.svm_type = GetSvmType(svmType);
     153      parameter.kernel_type = GetKernelType(kernelType);
    140154      parameter.C = cost;
    141       parameter.Nu = nu;
    142       parameter.Gamma = gamma;
    143       parameter.CacheSize = 500;
    144       parameter.Probability = false;
    145 
     155      parameter.nu = nu;
     156      parameter.gamma = gamma;
     157      parameter.cache_size = 500;
     158      parameter.probability = 0;
     159      parameter.eps = 0.001;
     160      parameter.degree = degree;
     161      parameter.shrinking = 1;
     162      parameter.coef0 = 0;
     163
     164
     165      var weightLabels = new List<int>();
     166      var weights = new List<double>();
    146167      foreach (double c in problemData.ClassValues) {
    147168        double wSum = 0.0;
     
    151172          }
    152173        }
    153         parameter.Weights.Add((int)c, wSum);
     174        weightLabels.Add((int)c);
     175        weights.Add(wSum);
    154176      }
    155 
    156 
    157       SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
    158       SVM.RangeTransform rangeTransform = SVM.RangeTransform.Compute(problem);
    159       SVM.Problem scaledProblem = SVM.Scaling.Scale(rangeTransform, problem);
    160       var svmModel = SVM.Training.Train(scaledProblem, parameter);
     177      parameter.weight_label = weightLabels.ToArray();
     178      parameter.weight = weights.ToArray();
     179
     180
     181      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     182      RangeTransform rangeTransform = RangeTransform.Compute(problem);
     183      svm_problem scaledProblem = rangeTransform.Scale(problem);
     184      var svmModel = svm.svm_train(scaledProblem, parameter);
    161185      var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues);
    162186      var solution = new SupportVectorClassificationSolution(model, (IClassificationProblemData)problemData.Clone());
    163187
    164       nSv = svmModel.SupportVectorCount;
     188      nSv = svmModel.SV.Length;
    165189      trainingAccuracy = solution.TrainingAccuracy;
    166190      testAccuracy = solution.TestAccuracy;
    167191
    168192      return solution;
     193    }
     194
     195    private static int GetSvmType(string svmType) {
     196      if (svmType == "NU_SVC") return svm_parameter.NU_SVC;
     197      if (svmType == "C_SVC") return svm_parameter.C_SVC;
     198      throw new ArgumentException("Unknown SVM type");
     199    }
     200
     201    private static int GetKernelType(string kernelType) {
     202      if (kernelType == "LINEAR") return svm_parameter.LINEAR;
     203      if (kernelType == "POLY") return svm_parameter.POLY;
     204      if (kernelType == "SIGMOID") return svm_parameter.SIGMOID;
     205      if (kernelType == "RBF") return svm_parameter.RBF;
     206      throw new ArgumentException("Unknown kernel type");
    169207    }
    170208    #endregion
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineModel.cs

    r7259 r8660  
    2929using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3030using HeuristicLab.Problems.DataAnalysis;
    31 using SVM;
     31using LibSVM;
    3232
    3333namespace HeuristicLab.Algorithms.DataAnalysis {
     
    3939  public sealed class SupportVectorMachineModel : NamedItem, ISupportVectorMachineModel {
    4040
    41     private SVM.Model model;
     41    private svm_model model;
    4242    /// <summary>
    4343    /// Gets or sets the SVM model.
    4444    /// </summary>
    45     public SVM.Model Model {
     45    public svm_model Model {
    4646      get { return model; }
    4747      set {
     
    5757    /// Gets or sets the range transformation for the model.
    5858    /// </summary>
    59     private SVM.RangeTransform rangeTransform;
    60     public SVM.RangeTransform RangeTransform {
     59    private RangeTransform rangeTransform;
     60    public RangeTransform RangeTransform {
    6161      get { return rangeTransform; }
    6262      set {
     
    7171    public Dataset SupportVectors {
    7272      get {
    73         var data = new double[Model.SupportVectorCount, allowedInputVariables.Count()];
    74         for (int i = 0; i < Model.SupportVectorCount; i++) {
    75           var sv = Model.SupportVectors[i];
     73        var data = new double[Model.sv_coef.Length, allowedInputVariables.Count()];
     74        for (int i = 0; i < Model.sv_coef.Length; i++) {
     75          var sv = Model.SV[i];
    7676          for (int j = 0; j < sv.Length; j++) {
    77             data[i, j] = sv[j].Value;
     77            data[i, j] = sv[j].value;
    7878          }
    7979        }
     
    101101        this.classValues = (double[])original.classValues.Clone();
    102102    }
    103     public SupportVectorMachineModel(SVM.Model model, SVM.RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<double> classValues)
     103    public SupportVectorMachineModel(svm_model model, RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables, IEnumerable<double> classValues)
    104104      : this(model, rangeTransform, targetVariable, allowedInputVariables) {
    105105      this.classValues = classValues.ToArray();
    106106    }
    107     public SupportVectorMachineModel(SVM.Model model, SVM.RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables)
     107    public SupportVectorMachineModel(svm_model model, RangeTransform rangeTransform, string targetVariable, IEnumerable<string> allowedInputVariables)
    108108      : base() {
    109109      this.name = ItemName;
     
    124124    }
    125125    public SupportVectorRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    126       return new SupportVectorRegressionSolution(this, problemData);
     126      return new SupportVectorRegressionSolution(this, new RegressionProblemData(problemData));
    127127    }
    128128    IRegressionSolution IRegressionModel.CreateRegressionSolution(IRegressionProblemData problemData) {
     
    153153
    154154    public SupportVectorClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    155       return new SupportVectorClassificationSolution(this, problemData);
     155      return new SupportVectorClassificationSolution(this, new ClassificationProblemData(problemData));
    156156    }
    157157    IClassificationSolution IClassificationModel.CreateClassificationSolution(IClassificationProblemData problemData) {
     
    161161    private IEnumerable<double> GetEstimatedValuesHelper(Dataset dataset, IEnumerable<int> rows) {
    162162      // calculate predictions for the currently requested rows
    163       SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
    164       SVM.Problem scaledProblem = Scaling.Scale(RangeTransform, problem);
    165 
    166       for (int i = 0; i < scaledProblem.Count; i++) {
    167         yield return SVM.Prediction.Predict(Model, scaledProblem.X[i]);
     163      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     164      svm_problem scaledProblem = rangeTransform.Scale(problem);
     165
     166      for (int i = 0; i < problem.l; i++) {
     167        yield return svm.svm_predict(Model, scaledProblem.x[i]);
    168168      }
    169169    }
     
    183183      get {
    184184        using (MemoryStream stream = new MemoryStream()) {
    185           SVM.Model.Write(stream, Model);
     185          svm.svm_save_model(new StreamWriter(stream), Model);
    186186          stream.Seek(0, System.IO.SeekOrigin.Begin);
    187187          StreamReader reader = new StreamReader(stream);
     
    191191      set {
    192192        using (MemoryStream stream = new MemoryStream(Encoding.ASCII.GetBytes(value))) {
    193           model = SVM.Model.Read(stream);
     193          model = svm.svm_load_model(new StreamReader(stream));
    194194        }
    195195      }
     
    199199      get {
    200200        using (MemoryStream stream = new MemoryStream()) {
    201           SVM.RangeTransform.Write(stream, RangeTransform);
     201          RangeTransform.Write(stream, RangeTransform);
    202202          stream.Seek(0, System.IO.SeekOrigin.Begin);
    203203          StreamReader reader = new StreamReader(stream);
     
    207207      set {
    208208        using (MemoryStream stream = new MemoryStream(Encoding.ASCII.GetBytes(value))) {
    209           RangeTransform = SVM.RangeTransform.Read(stream);
     209          RangeTransform = RangeTransform.Read(stream);
    210210        }
    211211      }
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorMachineUtil.cs

    r7259 r8660  
    2323using System.Linq;
    2424using HeuristicLab.Problems.DataAnalysis;
     25using LibSVM;
    2526
    2627namespace HeuristicLab.Algorithms.DataAnalysis {
     
    3233    /// <param name="rowIndices">The rows of the dataset that should be contained in the resulting SVM-problem</param>
    3334    /// <returns>A problem data type that can be used to train a support vector machine.</returns>
    34     public static SVM.Problem CreateSvmProblem(Dataset dataset, string targetVariable, IEnumerable<string> inputVariables, IEnumerable<int> rowIndices) {
     35    public static svm_problem CreateSvmProblem(Dataset dataset, string targetVariable, IEnumerable<string> inputVariables, IEnumerable<int> rowIndices) {
    3536      double[] targetVector =
    3637        dataset.GetDoubleValues(targetVariable, rowIndices).ToArray();
    3738
    38       SVM.Node[][] nodes = new SVM.Node[targetVector.Length][];
    39       List<SVM.Node> tempRow;
     39      svm_node[][] nodes = new svm_node[targetVector.Length][];
     40      List<svm_node> tempRow;
    4041      int maxNodeIndex = 0;
    4142      int svmProblemRowIndex = 0;
    4243      List<string> inputVariablesList = inputVariables.ToList();
    4344      foreach (int row in rowIndices) {
    44         tempRow = new List<SVM.Node>();
     45        tempRow = new List<svm_node>();
    4546        int colIndex = 1; // make sure the smallest node index for SVM = 1
    4647        foreach (var inputVariable in inputVariablesList) {
     
    4950          // => don't add NaN values in the dataset to the sparse SVM matrix representation
    5051          if (!double.IsNaN(value)) {
    51             tempRow.Add(new SVM.Node(colIndex, value)); // nodes must be sorted in ascending ordered by column index
     52            tempRow.Add(new svm_node() { index = colIndex, value = value }); // nodes must be sorted in ascending ordered by column index
    5253            if (colIndex > maxNodeIndex) maxNodeIndex = colIndex;
    5354          }
     
    5758      }
    5859
    59       return new SVM.Problem(targetVector.Length, targetVector, nodes, maxNodeIndex);
     60      return new svm_problem() { l = targetVector.Length, y = targetVector, x = nodes };
    6061    }
    6162  }
  • branches/GP-MoveOperators/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorRegression.cs

    r8206 r8660  
    3030using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3131using HeuristicLab.Problems.DataAnalysis;
     32using LibSVM;
    3233
    3334namespace HeuristicLab.Algorithms.DataAnalysis {
     
    4546    private const string GammaParameterName = "Gamma";
    4647    private const string EpsilonParameterName = "Epsilon";
     48    private const string DegreeParameterName = "Degree";
    4749
    4850    #region parameter properties
     
    6466    public IValueParameter<DoubleValue> EpsilonParameter {
    6567      get { return (IValueParameter<DoubleValue>)Parameters[EpsilonParameterName]; }
     68    }
     69    public IValueParameter<IntValue> DegreeParameter {
     70      get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; }
    6671    }
    6772    #endregion
     
    8691    public DoubleValue Epsilon {
    8792      get { return EpsilonParameter.Value; }
     93    }
     94    public IntValue Degree {
     95      get { return DegreeParameter.Value; }
    8896    }
    8997    #endregion
     
    111119      Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0)));
    112120      Parameters.Add(new ValueParameter<DoubleValue>(EpsilonParameterName, "The value of the epsilon parameter for epsilon-SVR.", new DoubleValue(0.1)));
     121      Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
    113122    }
    114123    [StorableHook(HookType.AfterDeserialization)]
    115     private void AfterDeserialization() { }
     124    private void AfterDeserialization() {
     125      #region backwards compatibility (change with 3.4)
     126      if (!Parameters.ContainsKey(DegreeParameterName))
     127        Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     128      #endregion
     129    }
    116130
    117131    public override IDeepCloneable Clone(Cloner cloner) {
     
    126140      int nSv;
    127141      var solution = CreateSupportVectorRegressionSolution(problemData, selectedInputVariables, SvmType.Value,
    128         KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value,
     142        KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value,
    129143        out trainR2, out testR2, out nSv);
    130144
     
    136150
    137151    public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables,
    138       string svmType, string kernelType, double cost, double nu, double gamma, double epsilon,
     152      string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree,
    139153      out double trainingR2, out double testR2, out int nSv) {
    140154      Dataset dataset = problemData.Dataset;
     
    143157
    144158      //extract SVM parameters from scope and set them
    145       SVM.Parameter parameter = new SVM.Parameter();
    146       parameter.SvmType = (SVM.SvmType)Enum.Parse(typeof(SVM.SvmType), svmType, true);
    147       parameter.KernelType = (SVM.KernelType)Enum.Parse(typeof(SVM.KernelType), kernelType, true);
     159      svm_parameter parameter = new svm_parameter();
     160      parameter.svm_type = GetSvmType(svmType);
     161      parameter.kernel_type = GetKernelType(kernelType);
    148162      parameter.C = cost;
    149       parameter.Nu = nu;
    150       parameter.Gamma = gamma;
    151       parameter.P = epsilon;
    152       parameter.CacheSize = 500;
    153       parameter.Probability = false;
    154 
    155 
    156       SVM.Problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
    157       SVM.RangeTransform rangeTransform = SVM.RangeTransform.Compute(problem);
    158       SVM.Problem scaledProblem = SVM.Scaling.Scale(rangeTransform, problem);
    159       var svmModel = SVM.Training.Train(scaledProblem, parameter);
    160       nSv = svmModel.SupportVectorCount;
     163      parameter.nu = nu;
     164      parameter.gamma = gamma;
     165      parameter.p = epsilon;
     166      parameter.cache_size = 500;
     167      parameter.probability = 0;
     168      parameter.eps = 0.001;
     169      parameter.degree = degree;
     170      parameter.shrinking = 1;
     171      parameter.coef0 = 0;
     172
     173
     174
     175      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     176      RangeTransform rangeTransform = RangeTransform.Compute(problem);
     177      svm_problem scaledProblem = rangeTransform.Scale(problem);
     178      var svmModel = svm.svm_train(scaledProblem, parameter);
     179      nSv = svmModel.SV.Length;
    161180      var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables);
    162181      var solution = new SupportVectorRegressionSolution(model, (IRegressionProblemData)problemData.Clone());
     
    165184      return solution;
    166185    }
     186
     187    private static int GetSvmType(string svmType) {
     188      if (svmType == "NU_SVR") return svm_parameter.NU_SVR;
     189      if (svmType == "EPSILON_SVR") return svm_parameter.EPSILON_SVR;
     190      throw new ArgumentException("Unknown SVM type");
     191    }
     192
     193    private static int GetKernelType(string kernelType) {
     194      if (kernelType == "LINEAR") return svm_parameter.LINEAR;
     195      if (kernelType == "POLY") return svm_parameter.POLY;
     196      if (kernelType == "SIGMOID") return svm_parameter.SIGMOID;
     197      if (kernelType == "RBF") return svm_parameter.RBF;
     198      throw new ArgumentException("Unknown kernel type");
     199    }
    167200    #endregion
    168201  }
Note: See TracChangeset for help on using the changeset viewer.