Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
11/19/15 11:45:22 (8 years ago)
Author:
gkronber
Message:

#2385: merged r12934,r13204,r13205,r13270 from trunk to stable

Location:
stable
Files:
10 edited

Legend:

Unmodified
Added
Removed
  • stable

  • stable/HeuristicLab.Algorithms.DataAnalysis

  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassification.cs

    r12708 r13283  
    2525using HeuristicLab.Common;
    2626using HeuristicLab.Core;
     27using HeuristicLab.Data;
    2728using HeuristicLab.Optimization;
    2829using HeuristicLab.Parameters;
     
    4849
    4950    private const string ModelParameterName = "Model";
     51    private const string CreateSolutionParameterName = "CreateSolution";
    5052
    5153    #region parameter properties
     
    5557    public IFixedValueParameter<GaussianProcessClassificationSolutionCreator> GaussianProcessSolutionCreatorParameter {
    5658      get { return (IFixedValueParameter<GaussianProcessClassificationSolutionCreator>)Parameters[SolutionCreatorParameterName]; }
     59    }
     60    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
     61      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
     62    }
     63    #endregion
     64    #region properties
     65    public bool CreateSolution {
     66      get { return CreateSolutionParameter.Value.Value; }
     67      set { CreateSolutionParameter.Value.Value = value; }
    5768    }
    5869    #endregion
     
    8293        new GaussianProcessClassificationSolutionCreator()));
    8394      Parameters[SolutionCreatorParameterName].Hidden = true;
     95      // TODO: it would be better to deactivate the solution creator when this parameter is changed
     96      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     97      Parameters[CreateSolutionParameterName].Hidden = true;
    8498
    8599      ParameterizedModelCreators();
     
    91105    [StorableHook(HookType.AfterDeserialization)]
    92106    private void AfterDeserialization() {
     107      // BackwardsCompatibility3.3
     108      #region Backwards compatible code, remove with 3.4
     109      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     110        Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     111        Parameters[CreateSolutionParameterName].Hidden = true;
     112      }
     113      #endregion
    93114      RegisterEventHandlers();
    94115    }
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationSolutionCreator.cs

    r12009 r13283  
    4040    private const string TrainingAccuracyResultName = "Accuracy (training)";
    4141    private const string TestAccuracyResultName = "Accuracy (test)";
     42    private const string CreateSolutionParameterName = "CreateSolution";
    4243
    4344    #region Parameter Properties
     
    5455      get { return (ILookupParameter<ResultCollection>)Parameters[ResultsParameterName]; }
    5556    }
     57    public ILookupParameter<BoolValue> CreateSolutionParameter {
     58      get { return (ILookupParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
     59    }
    5660    #endregion
    5761
     
    6468      Parameters.Add(new LookupParameter<IClassificationProblemData>(ProblemDataParameterName, "The classification problem data for the Gaussian process solution."));
    6569      Parameters.Add(new LookupParameter<IGaussianProcessModel>(ModelParameterName, "The Gaussian process classification model to use for the solution."));
     70      Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run"));
     71
    6672      // in & out
    6773      Parameters.Add(new LookupParameter<ResultCollection>(ResultsParameterName, "The result collection of the algorithm."));
    6874      // out
    6975      Parameters.Add(new LookupParameter<IDiscriminantFunctionClassificationSolution>(SolutionParameterName, "The produced Gaussian process solution."));
     76    }
     77
     78    [StorableHook(HookType.AfterDeserialization)]
     79    private void AfterDeserialization() {
     80      // BackwardsCompatibility3.3
     81      #region Backwards compatible code, remove with 3.4
     82      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     83        Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run"));
     84      }
     85      #endregion
    7086    }
    7187
     
    7591
    7692    public override IOperation Apply() {
    77       if (ModelParameter.ActualValue != null) {
     93      if (ModelParameter.ActualValue != null && CreateSolutionParameter.ActualValue.Value == true) {
    7894        var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
    7995        m.FixParameters();
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs

    r12708 r13283  
    2525using HeuristicLab.Common;
    2626using HeuristicLab.Core;
     27using HeuristicLab.Data;
    2728using HeuristicLab.Optimization;
    2829using HeuristicLab.Parameters;
     
    4849
    4950    private const string ModelParameterName = "Model";
     51    private const string CreateSolutionParameterName = "CreateSolution";
     52
    5053
    5154    #region parameter properties
     
    5558    public IFixedValueParameter<GaussianProcessRegressionSolutionCreator> GaussianProcessSolutionCreatorParameter {
    5659      get { return (IFixedValueParameter<GaussianProcessRegressionSolutionCreator>)Parameters[SolutionCreatorParameterName]; }
     60    }
     61    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
     62      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
     63    }
     64    #endregion
     65    #region properties
     66    public bool CreateSolution {
     67      get { return CreateSolutionParameter.Value.Value; }
     68      set { CreateSolutionParameter.Value.Value = value; }
    5769    }
    5870    #endregion
     
    7890      Parameters.Add(new ConstrainedValueParameter<IGaussianProcessRegressionModelCreator>(ModelCreatorParameterName, "The operator to create the Gaussian process model.",
    7991        new ItemSet<IGaussianProcessRegressionModelCreator>(modelCreators), defaultModelCreator));
    80       // this parameter is not intended to be changed,
     92      // the solution creator cannot be changed
    8193      Parameters.Add(new FixedValueParameter<GaussianProcessRegressionSolutionCreator>(SolutionCreatorParameterName, "The solution creator for the algorithm",
    8294        new GaussianProcessRegressionSolutionCreator()));
    8395      Parameters[SolutionCreatorParameterName].Hidden = true;
     96      // TODO: it would be better to deactivate the solution creator when this parameter is changed
     97      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     98      Parameters[CreateSolutionParameterName].Hidden = true;
    8499
    85100      ParameterizedModelCreators();
     
    91106    [StorableHook(HookType.AfterDeserialization)]
    92107    private void AfterDeserialization() {
     108      // BackwardsCompatibility3.3
     109      #region Backwards compatible code, remove with 3.4
     110      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     111        Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     112        Parameters[CreateSolutionParameterName].Hidden = true;
     113      }
     114      #endregion
    93115      RegisterEventHandlers();
    94116    }
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs

    r12009 r13283  
    4040    private const string TrainingRSquaredResultName = "Training R²";
    4141    private const string TestRSquaredResultName = "Test R²";
     42    private const string CreateSolutionParameterName = "CreateSolution";
    4243
    4344    #region Parameter Properties
     
    5455      get { return (ILookupParameter<ResultCollection>)Parameters[ResultsParameterName]; }
    5556    }
     57    public ILookupParameter<BoolValue> CreateSolutionParameter {
     58      get { return (ILookupParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
     59    }
    5660    #endregion
    5761
     
    6468      Parameters.Add(new LookupParameter<IRegressionProblemData>(ProblemDataParameterName, "The regression problem data for the Gaussian process solution."));
    6569      Parameters.Add(new LookupParameter<IGaussianProcessModel>(ModelParameterName, "The Gaussian process regression model to use for the solution."));
     70      Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run"));
     71
    6672      // in & out
    6773      Parameters.Add(new LookupParameter<ResultCollection>(ResultsParameterName, "The result collection of the algorithm."));
    6874      // out
    6975      Parameters.Add(new LookupParameter<IGaussianProcessSolution>(SolutionParameterName, "The produced Gaussian process solution."));
     76    }
     77
     78    [StorableHook(HookType.AfterDeserialization)]
     79    private void AfterDeserialization() {
     80      // BackwardsCompatibility3.3
     81      #region Backwards compatible code, remove with 3.4
     82      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     83        Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run"));
     84      }
     85      #endregion
    7086    }
    7187
     
    7591
    7692    public override IOperation Apply() {
    77       if (ModelParameter.ActualValue != null) {
     93      if (ModelParameter.ActualValue != null && CreateSolutionParameter.ActualValue.Value == true) {
    7894        var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone();
    7995        m.FixParameters();
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestClassification.cs

    r12708 r13283  
    4242    private const string SeedParameterName = "Seed";
    4343    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
     44    private const string CreateSolutionParameterName = "CreateSolution";
    4445
    4546    #region parameter properties
     
    5859    public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter {
    5960      get { return (IFixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
     61    }
     62    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
     63      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
    6064    }
    6165    #endregion
     
    8185      set { SetSeedRandomlyParameter.Value.Value = value; }
    8286    }
     87    public bool CreateSolution {
     88      get { return CreateSolutionParameter.Value.Value; }
     89      set { CreateSolutionParameter.Value.Value = value; }
     90    }
    8391    #endregion
    8492
     
    96104      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
    97105      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
     106      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     107      Parameters[CreateSolutionParameterName].Hidden = true;
     108
    98109      Problem = new ClassificationProblem();
    99110    }
     
    101112    [StorableHook(HookType.AfterDeserialization)]
    102113    private void AfterDeserialization() {
     114      // BackwardsCompatibility3.3
     115      #region Backwards compatible code, remove with 3.4
    103116      if (!Parameters.ContainsKey(MParameterName))
    104117        Parameters.Add(new FixedValueParameter<DoubleValue>(MParameterName, "The ratio of features that will be used in the construction of individual trees (0<m<=1)", new DoubleValue(0.5)));
     
    107120      if (!Parameters.ContainsKey((SetSeedRandomlyParameterName)))
    108121        Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
     122      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     123        Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     124        Parameters[CreateSolutionParameterName].Hidden = true;
     125      }
     126      #endregion
    109127    }
    110128
     
    118136      if (SetSeedRandomly) Seed = new System.Random().Next();
    119137
    120       var solution = CreateRandomForestClassificationSolution(Problem.ProblemData, NumberOfTrees, R, M, Seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError);
    121       Results.Add(new Result(RandomForestClassificationModelResultName, "The random forest classification solution.", solution));
     138      var model = CreateRandomForestClassificationModel(Problem.ProblemData, NumberOfTrees, R, M, Seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError);
    122139      Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the random forest regression solution on the training set.", new DoubleValue(rmsError)));
    123140      Results.Add(new Result("Relative classification error", "Relative classification error of the random forest regression solution on the training set.", new PercentValue(relClassificationError)));
    124141      Results.Add(new Result("Root mean square error (out-of-bag)", "The out-of-bag root of the mean of squared errors of the random forest regression solution.", new DoubleValue(outOfBagRmsError)));
    125142      Results.Add(new Result("Relative classification error (out-of-bag)", "The out-of-bag relative classification error  of the random forest regression solution.", new PercentValue(outOfBagRelClassificationError)));
     143
     144      if (CreateSolution) {
     145        var solution = new RandomForestClassificationSolution((IClassificationProblemData)Problem.ProblemData.Clone(), model);
     146        Results.Add(new Result(RandomForestClassificationModelResultName, "The random forest classification solution.", solution));
     147      }
     148    }
     149   
     150    // keep for compatibility with old API
     151    public static RandomForestClassificationSolution CreateRandomForestClassificationSolution(IClassificationProblemData problemData, int nTrees, double r, double m, int seed,
     152      out double rmsError, out double relClassificationError, out double outOfBagRmsError, out double outOfBagRelClassificationError) {
     153      var model = CreateRandomForestClassificationModel(problemData, nTrees, r, m, seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError);
     154      return new RandomForestClassificationSolution((IClassificationProblemData)problemData.Clone(), model);
    126155    }
    127156
    128     public static IClassificationSolution CreateRandomForestClassificationSolution(IClassificationProblemData problemData, int nTrees, double r, double m, int seed,
     157    public static RandomForestModel CreateRandomForestClassificationModel(IClassificationProblemData problemData, int nTrees, double r, double m, int seed,
    129158      out double rmsError, out double relClassificationError, out double outOfBagRmsError, out double outOfBagRelClassificationError) {
    130       var model = RandomForestModel.CreateClassificationModel(problemData, nTrees, r, m, seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError);
    131       return new RandomForestClassificationSolution((IClassificationProblemData)problemData.Clone(), model);
     159      return RandomForestModel.CreateClassificationModel(problemData, nTrees, r, m, seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError);
    132160    }
    133161    #endregion
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestRegression.cs

    r12708 r13283  
    4242    private const string SeedParameterName = "Seed";
    4343    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
     44    private const string CreateSolutionParameterName = "CreateSolution";
    4445
    4546    #region parameter properties
     
    5859    public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter {
    5960      get { return (IFixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
     61    }
     62    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
     63      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
    6064    }
    6165    #endregion
     
    8185      set { SetSeedRandomlyParameter.Value.Value = value; }
    8286    }
     87    public bool CreateSolution {
     88      get { return CreateSolutionParameter.Value.Value; }
     89      set { CreateSolutionParameter.Value.Value = value; }
     90    }
    8391    #endregion
    8492    [StorableConstructor]
     
    95103      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
    96104      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
     105      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     106      Parameters[CreateSolutionParameterName].Hidden = true;
     107
    97108      Problem = new RegressionProblem();
    98109    }
     
    100111    [StorableHook(HookType.AfterDeserialization)]
    101112    private void AfterDeserialization() {
     113      // BackwardsCompatibility3.3
     114      #region Backwards compatible code, remove with 3.4
    102115      if (!Parameters.ContainsKey(MParameterName))
    103116        Parameters.Add(new FixedValueParameter<DoubleValue>(MParameterName, "The ratio of features that will be used in the construction of individual trees (0<m<=1)", new DoubleValue(0.5)));
     
    106119      if (!Parameters.ContainsKey((SetSeedRandomlyParameterName)))
    107120        Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
     121      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     122        Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     123        Parameters[CreateSolutionParameterName].Hidden = true;
     124      }
     125      #endregion
    108126    }
    109127
     
    116134      double rmsError, avgRelError, outOfBagRmsError, outOfBagAvgRelError;
    117135      if (SetSeedRandomly) Seed = new System.Random().Next();
     136      var model = CreateRandomForestRegressionModel(Problem.ProblemData, NumberOfTrees, R, M, Seed,
     137        out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);
    118138
    119       var solution = CreateRandomForestRegressionSolution(Problem.ProblemData, NumberOfTrees, R, M, Seed, out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);
    120       Results.Add(new Result(RandomForestRegressionModelResultName, "The random forest regression solution.", solution));
    121139      Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the random forest regression solution on the training set.", new DoubleValue(rmsError)));
    122140      Results.Add(new Result("Average relative error", "The average of relative errors of the random forest regression solution on the training set.", new PercentValue(avgRelError)));
    123141      Results.Add(new Result("Root mean square error (out-of-bag)", "The out-of-bag root of the mean of squared errors of the random forest regression solution.", new DoubleValue(outOfBagRmsError)));
    124142      Results.Add(new Result("Average relative error (out-of-bag)", "The out-of-bag average of relative errors of the random forest regression solution.", new PercentValue(outOfBagAvgRelError)));
     143
     144      if (CreateSolution) {
     145        var solution = new RandomForestRegressionSolution((IRegressionProblemData)Problem.ProblemData.Clone(), model);
     146        Results.Add(new Result(RandomForestRegressionModelResultName, "The random forest regression solution.", solution));
     147      }
    125148    }
    126149
    127     public static IRegressionSolution CreateRandomForestRegressionSolution(IRegressionProblemData problemData, int nTrees, double r, double m, int seed,
     150    // keep for compatibility with old API
     151    public static RandomForestRegressionSolution CreateRandomForestRegressionSolution(IRegressionProblemData problemData, int nTrees, double r, double m, int seed,
    128152      out double rmsError, out double avgRelError, out double outOfBagRmsError, out double outOfBagAvgRelError) {
    129       var model = RandomForestModel.CreateRegressionModel(problemData, nTrees, r, m, seed, out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);
     153      var model = CreateRandomForestRegressionModel(problemData, nTrees, r, m, seed,
     154        out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);
    130155      return new RandomForestRegressionSolution((IRegressionProblemData)problemData.Clone(), model);
    131156    }
     157
     158    public static RandomForestModel CreateRandomForestRegressionModel(IRegressionProblemData problemData, int nTrees,
     159      double r, double m, int seed,
     160      out double rmsError, out double avgRelError, out double outOfBagRmsError, out double outOfBagAvgRelError) {
     161      return RandomForestModel.CreateRegressionModel(problemData, nTrees, r, m, seed, out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);
     162    }
     163
    132164    #endregion
    133165  }
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassification.cs

    r12708 r13283  
    4646    private const string GammaParameterName = "Gamma";
    4747    private const string DegreeParameterName = "Degree";
     48    private const string CreateSolutionParameterName = "CreateSolution";
    4849
    4950    #region parameter properties
     
    6566    public IValueParameter<IntValue> DegreeParameter {
    6667      get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; }
     68    }
     69    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
     70      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
    6771    }
    6872    #endregion
     
    8791    public IntValue Degree {
    8892      get { return DegreeParameter.Value; }
     93    }
     94    public bool CreateSolution {
     95      get { return CreateSolutionParameter.Value.Value; }
     96      set { CreateSolutionParameter.Value.Value = value; }
    8997    }
    9098    #endregion
     
    112120      Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0)));
    113121      Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     122      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     123      Parameters[CreateSolutionParameterName].Hidden = true;
    114124    }
    115125    [StorableHook(HookType.AfterDeserialization)]
    116126    private void AfterDeserialization() {
    117127      #region backwards compatibility (change with 3.4)
    118       if (!Parameters.ContainsKey(DegreeParameterName))
    119         Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     128      if (!Parameters.ContainsKey(DegreeParameterName)) {
     129        Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName,
     130          "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     131      }
     132      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     133        Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName,
     134          "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     135        Parameters[CreateSolutionParameterName].Hidden = true;
     136      }
    120137      #endregion
    121138    }
     
    129146      IClassificationProblemData problemData = Problem.ProblemData;
    130147      IEnumerable<string> selectedInputVariables = problemData.AllowedInputVariables;
    131       double trainingAccuracy, testAccuracy;
    132148      int nSv;
    133       var solution = CreateSupportVectorClassificationSolution(problemData, selectedInputVariables,
    134         SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Degree.Value,
    135         out trainingAccuracy, out testAccuracy, out nSv);
    136 
    137       Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", solution));
    138       Results.Add(new Result("Training accuracy", "The accuracy of the SVR solution on the training partition.", new DoubleValue(trainingAccuracy)));
    139       Results.Add(new Result("Test accuracy", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy)));
    140       Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv)));
     149      ISupportVectorMachineModel model;
     150
     151      Run(problemData, selectedInputVariables, GetSvmType(SvmType.Value), GetKernelType(KernelType.Value), Cost.Value, Nu.Value, Gamma.Value, Degree.Value, out model, out nSv);
     152
     153      if (CreateSolution) {
     154        var solution = new SupportVectorClassificationSolution((SupportVectorMachineModel)model, (IClassificationProblemData)problemData.Clone());
     155        Results.Add(new Result("Support vector classification solution", "The support vector classification solution.",
     156          solution));
     157      }
     158
     159      {
     160        // calculate classification metrics
     161        // calculate regression model metrics
     162        var ds = problemData.Dataset;
     163        var trainRows = problemData.TrainingIndices;
     164        var testRows = problemData.TestIndices;
     165        var yTrain = ds.GetDoubleValues(problemData.TargetVariable, trainRows);
     166        var yTest = ds.GetDoubleValues(problemData.TargetVariable, testRows);
     167        var yPredTrain = model.GetEstimatedClassValues(ds, trainRows);
     168        var yPredTest = model.GetEstimatedClassValues(ds, testRows);
     169
     170        OnlineCalculatorError error;
     171        var trainAccuracy = OnlineAccuracyCalculator.Calculate(yPredTrain, yTrain, out error);
     172        if (error != OnlineCalculatorError.None) trainAccuracy = double.MaxValue;
     173        var testAccuracy = OnlineAccuracyCalculator.Calculate(yPredTest, yTest, out error);
     174        if (error != OnlineCalculatorError.None) testAccuracy = double.MaxValue;
     175
     176        Results.Add(new Result("Accuracy (training)", "The mean of squared errors of the SVR solution on the training partition.", new DoubleValue(trainAccuracy)));
     177        Results.Add(new Result("Accuracy (test)", "The mean of squared errors of the SVR solution on the test partition.", new DoubleValue(testAccuracy)));
     178
     179        Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.",
     180          new IntValue(nSv)));
     181      }
    141182    }
    142183
     
    147188    }
    148189
     190    // BackwardsCompatibility3.4
     191    #region Backwards compatible code, remove with 3.5
    149192    public static SupportVectorClassificationSolution CreateSupportVectorClassificationSolution(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables,
    150193      int svmType, int kernelType, double cost, double nu, double gamma, int degree, out double trainingAccuracy, out double testAccuracy, out int nSv) {
     194
     195      ISupportVectorMachineModel model;
     196      Run(problemData, allowedInputVariables, svmType, kernelType, cost, nu, gamma, degree, out model, out nSv);
     197      var solution = new SupportVectorClassificationSolution((SupportVectorMachineModel)model, (IClassificationProblemData)problemData.Clone());
     198
     199      trainingAccuracy = solution.TrainingAccuracy;
     200      testAccuracy = solution.TestAccuracy;
     201
     202      return solution;
     203    }
     204
     205    #endregion
     206
     207    public static void Run(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables,
     208      int svmType, int kernelType, double cost, double nu, double gamma, int degree,
     209      out ISupportVectorMachineModel model, out int nSv) {
    151210      var dataset = problemData.Dataset;
    152211      string targetVariable = problemData.TargetVariable;
    153212      IEnumerable<int> rows = problemData.TrainingIndices;
    154213
    155       //extract SVM parameters from scope and set them
    156       svm_parameter parameter = new svm_parameter();
    157       parameter.svm_type = svmType;
    158       parameter.kernel_type = kernelType;
    159       parameter.C = cost;
    160       parameter.nu = nu;
    161       parameter.gamma = gamma;
    162       parameter.cache_size = 500;
    163       parameter.probability = 0;
    164       parameter.eps = 0.001;
    165       parameter.degree = degree;
    166       parameter.shrinking = 1;
    167       parameter.coef0 = 0;
     214      svm_parameter parameter = new svm_parameter {
     215        svm_type = svmType,
     216        kernel_type = kernelType,
     217        C = cost,
     218        nu = nu,
     219        gamma = gamma,
     220        cache_size = 500,
     221        probability = 0,
     222        eps = 0.001,
     223        degree = degree,
     224        shrinking = 1,
     225        coef0 = 0
     226      };
    168227
    169228      var weightLabels = new List<int>();
     
    182241      parameter.weight = weights.ToArray();
    183242
    184 
    185243      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
    186244      RangeTransform rangeTransform = RangeTransform.Compute(problem);
    187245      svm_problem scaledProblem = rangeTransform.Scale(problem);
    188246      var svmModel = svm.svm_train(scaledProblem, parameter);
    189       var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues);
    190       var solution = new SupportVectorClassificationSolution(model, (IClassificationProblemData)problemData.Clone());
    191 
    192247      nSv = svmModel.SV.Length;
    193       trainingAccuracy = solution.TrainingAccuracy;
    194       testAccuracy = solution.TestAccuracy;
    195 
    196       return solution;
     248
     249      model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues);
    197250    }
    198251
  • stable/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorRegression.cs

    r12708 r13283  
    4747    private const string EpsilonParameterName = "Epsilon";
    4848    private const string DegreeParameterName = "Degree";
     49    private const string CreateSolutionParameterName = "CreateSolution";
    4950
    5051    #region parameter properties
     
    6970    public IValueParameter<IntValue> DegreeParameter {
    7071      get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; }
     72    }
     73    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
     74      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
    7175    }
    7276    #endregion
     
    9498    public IntValue Degree {
    9599      get { return DegreeParameter.Value; }
     100    }
     101    public bool CreateSolution {
     102      get { return CreateSolutionParameter.Value.Value; }
     103      set { CreateSolutionParameter.Value.Value = value; }
    96104    }
    97105    #endregion
     
    120128      Parameters.Add(new ValueParameter<DoubleValue>(EpsilonParameterName, "The value of the epsilon parameter for epsilon-SVR.", new DoubleValue(0.1)));
    121129      Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     130      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     131      Parameters[CreateSolutionParameterName].Hidden = true;
    122132    }
    123133    [StorableHook(HookType.AfterDeserialization)]
    124134    private void AfterDeserialization() {
    125135      #region backwards compatibility (change with 3.4)
    126       if (!Parameters.ContainsKey(DegreeParameterName))
    127         Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     136
     137      if (!Parameters.ContainsKey(DegreeParameterName)) {
     138        Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName,
     139          "The degree parameter for the polynomial kernel function.", new IntValue(3)));
     140      }
     141      if (!Parameters.ContainsKey(CreateSolutionParameterName)) {
     142        Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
     143        Parameters[CreateSolutionParameterName].Hidden = true;
     144      }
    128145      #endregion
    129146    }
     
    137154      IRegressionProblemData problemData = Problem.ProblemData;
    138155      IEnumerable<string> selectedInputVariables = problemData.AllowedInputVariables;
    139       double trainR2, testR2;
    140156      int nSv;
    141       var solution = CreateSupportVectorRegressionSolution(problemData, selectedInputVariables, SvmType.Value,
    142         KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value,
    143         out trainR2, out testR2, out nSv);
    144 
    145       Results.Add(new Result("Support vector regression solution", "The support vector regression solution.", solution));
    146       Results.Add(new Result("Training R²", "The Pearson's R² of the SVR solution on the training partition.", new DoubleValue(trainR2)));
    147       Results.Add(new Result("Test R²", "The Pearson's R² of the SVR solution on the test partition.", new DoubleValue(testR2)));
     157      ISupportVectorMachineModel model;
     158      Run(problemData, selectedInputVariables, SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value, out model, out nSv);
     159
     160      if (CreateSolution) {
     161        var solution = new SupportVectorRegressionSolution((SupportVectorMachineModel)model, (IRegressionProblemData)problemData.Clone());
     162        Results.Add(new Result("Support vector regression solution", "The support vector regression solution.", solution));
     163      }
     164
    148165      Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv)));
    149     }
    150 
    151     public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables,
     166
     167
     168      {
     169        // calculate regression model metrics
     170        var ds = problemData.Dataset;
     171        var trainRows = problemData.TrainingIndices;
     172        var testRows = problemData.TestIndices;
     173        var yTrain = ds.GetDoubleValues(problemData.TargetVariable, trainRows);
     174        var yTest = ds.GetDoubleValues(problemData.TargetVariable, testRows);
     175        var yPredTrain = model.GetEstimatedValues(ds, trainRows).ToArray();
     176        var yPredTest = model.GetEstimatedValues(ds, testRows).ToArray();
     177
     178        OnlineCalculatorError error;
     179        var trainMse = OnlineMeanSquaredErrorCalculator.Calculate(yPredTrain, yTrain, out error);
     180        if (error != OnlineCalculatorError.None) trainMse = double.MaxValue;
     181        var testMse = OnlineMeanSquaredErrorCalculator.Calculate(yPredTest, yTest, out error);
     182        if (error != OnlineCalculatorError.None) testMse = double.MaxValue;
     183
     184        Results.Add(new Result("Mean squared error (training)", "The mean of squared errors of the SVR solution on the training partition.", new DoubleValue(trainMse)));
     185        Results.Add(new Result("Mean squared error (test)", "The mean of squared errors of the SVR solution on the test partition.", new DoubleValue(testMse)));
     186
     187
     188        var trainMae = OnlineMeanAbsoluteErrorCalculator.Calculate(yPredTrain, yTrain, out error);
     189        if (error != OnlineCalculatorError.None) trainMae = double.MaxValue;
     190        var testMae = OnlineMeanAbsoluteErrorCalculator.Calculate(yPredTest, yTest, out error);
     191        if (error != OnlineCalculatorError.None) testMae = double.MaxValue;
     192
     193        Results.Add(new Result("Mean absolute error (training)", "The mean of absolute errors of the SVR solution on the training partition.", new DoubleValue(trainMae)));
     194        Results.Add(new Result("Mean absolute error (test)", "The mean of absolute errors of the SVR solution on the test partition.", new DoubleValue(testMae)));
     195
     196
     197        var trainRelErr = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(yPredTrain, yTrain, out error);
     198        if (error != OnlineCalculatorError.None) trainRelErr = double.MaxValue;
     199        var testRelErr = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(yPredTest, yTest, out error);
     200        if (error != OnlineCalculatorError.None) testRelErr = double.MaxValue;
     201
     202        Results.Add(new Result("Average relative error (training)", "The mean of relative errors of the SVR solution on the training partition.", new DoubleValue(trainRelErr)));
     203        Results.Add(new Result("Average relative error (test)", "The mean of relative errors of the SVR solution on the test partition.", new DoubleValue(testRelErr)));
     204      }
     205    }
     206
     207    // BackwardsCompatibility3.4
     208    #region Backwards compatible code, remove with 3.5
     209    // for compatibility with old API
     210    public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution(
     211      IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables,
    152212      string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree,
    153213      out double trainingR2, out double testR2, out int nSv) {
     214      ISupportVectorMachineModel model;
     215      Run(problemData, allowedInputVariables, svmType, kernelType, cost, nu, gamma, epsilon, degree, out model, out nSv);
     216
     217      var solution = new SupportVectorRegressionSolution((SupportVectorMachineModel)model, (IRegressionProblemData)problemData.Clone());
     218      trainingR2 = solution.TrainingRSquared;
     219      testR2 = solution.TestRSquared;
     220      return solution;
     221    }
     222    #endregion
     223
     224    public static void Run(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables,
     225      string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree,
     226      out ISupportVectorMachineModel model, out int nSv) {
    154227      var dataset = problemData.Dataset;
    155228      string targetVariable = problemData.TargetVariable;
    156229      IEnumerable<int> rows = problemData.TrainingIndices;
    157230
    158       //extract SVM parameters from scope and set them
    159       svm_parameter parameter = new svm_parameter();
    160       parameter.svm_type = GetSvmType(svmType);
    161       parameter.kernel_type = GetKernelType(kernelType);
    162       parameter.C = cost;
    163       parameter.nu = nu;
    164       parameter.gamma = gamma;
    165       parameter.p = epsilon;
    166       parameter.cache_size = 500;
    167       parameter.probability = 0;
    168       parameter.eps = 0.001;
    169       parameter.degree = degree;
    170       parameter.shrinking = 1;
    171       parameter.coef0 = 0;
    172 
    173 
     231      svm_parameter parameter = new svm_parameter {
     232        svm_type = GetSvmType(svmType),
     233        kernel_type = GetKernelType(kernelType),
     234        C = cost,
     235        nu = nu,
     236        gamma = gamma,
     237        p = epsilon,
     238        cache_size = 500,
     239        probability = 0,
     240        eps = 0.001,
     241        degree = degree,
     242        shrinking = 1,
     243        coef0 = 0
     244      };
    174245
    175246      svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows);
     
    178249      var svmModel = svm.svm_train(scaledProblem, parameter);
    179250      nSv = svmModel.SV.Length;
    180       var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables);
    181       var solution = new SupportVectorRegressionSolution(model, (IRegressionProblemData)problemData.Clone());
    182       trainingR2 = solution.TrainingRSquared;
    183       testR2 = solution.TestRSquared;
    184       return solution;
     251
     252      model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables);
    185253    }
    186254
Note: See TracChangeset for help on using the changeset viewer.