- Timestamp:
- 11/19/15 11:45:22 (9 years ago)
- Location:
- stable
- Files:
-
- 10 edited
Legend:
- Unmodified
- Added
- Removed
-
stable
- Property svn:mergeinfo changed
/trunk/sources merged: 12934,13204-13205,13270
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Algorithms.DataAnalysis merged: 12934,13204-13205,13270
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassification.cs
r12708 r13283 25 25 using HeuristicLab.Common; 26 26 using HeuristicLab.Core; 27 using HeuristicLab.Data; 27 28 using HeuristicLab.Optimization; 28 29 using HeuristicLab.Parameters; … … 48 49 49 50 private const string ModelParameterName = "Model"; 51 private const string CreateSolutionParameterName = "CreateSolution"; 50 52 51 53 #region parameter properties … … 55 57 public IFixedValueParameter<GaussianProcessClassificationSolutionCreator> GaussianProcessSolutionCreatorParameter { 56 58 get { return (IFixedValueParameter<GaussianProcessClassificationSolutionCreator>)Parameters[SolutionCreatorParameterName]; } 59 } 60 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 61 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 62 } 63 #endregion 64 #region properties 65 public bool CreateSolution { 66 get { return CreateSolutionParameter.Value.Value; } 67 set { CreateSolutionParameter.Value.Value = value; } 57 68 } 58 69 #endregion … … 82 93 new GaussianProcessClassificationSolutionCreator())); 83 94 Parameters[SolutionCreatorParameterName].Hidden = true; 95 // TODO: it would be better to deactivate the solution creator when this parameter is changed 96 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 97 Parameters[CreateSolutionParameterName].Hidden = true; 84 98 85 99 ParameterizedModelCreators(); … … 91 105 [StorableHook(HookType.AfterDeserialization)] 92 106 private void AfterDeserialization() { 107 // BackwardsCompatibility3.3 108 #region Backwards compatible code, remove with 3.4 109 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 110 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 111 Parameters[CreateSolutionParameterName].Hidden = true; 112 } 113 #endregion 93 114 RegisterEventHandlers(); 94 115 } -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessClassificationSolutionCreator.cs
r12009 r13283 40 40 private const string TrainingAccuracyResultName = "Accuracy (training)"; 41 41 private const string TestAccuracyResultName = "Accuracy (test)"; 42 private const string CreateSolutionParameterName = "CreateSolution"; 42 43 43 44 #region Parameter Properties … … 54 55 get { return (ILookupParameter<ResultCollection>)Parameters[ResultsParameterName]; } 55 56 } 57 public ILookupParameter<BoolValue> CreateSolutionParameter { 58 get { return (ILookupParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 59 } 56 60 #endregion 57 61 … … 64 68 Parameters.Add(new LookupParameter<IClassificationProblemData>(ProblemDataParameterName, "The classification problem data for the Gaussian process solution.")); 65 69 Parameters.Add(new LookupParameter<IGaussianProcessModel>(ModelParameterName, "The Gaussian process classification model to use for the solution.")); 70 Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run")); 71 66 72 // in & out 67 73 Parameters.Add(new LookupParameter<ResultCollection>(ResultsParameterName, "The result collection of the algorithm.")); 68 74 // out 69 75 Parameters.Add(new LookupParameter<IDiscriminantFunctionClassificationSolution>(SolutionParameterName, "The produced Gaussian process solution.")); 76 } 77 78 [StorableHook(HookType.AfterDeserialization)] 79 private void AfterDeserialization() { 80 // BackwardsCompatibility3.3 81 #region Backwards compatible code, remove with 3.4 82 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 83 Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run")); 84 } 85 #endregion 70 86 } 71 87 … … 75 91 76 92 public override IOperation Apply() { 77 if (ModelParameter.ActualValue != null ) {93 if (ModelParameter.ActualValue != null && CreateSolutionParameter.ActualValue.Value == true) { 78 94 var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone(); 79 95 m.FixParameters(); -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs
r12708 r13283 25 25 using HeuristicLab.Common; 26 26 using HeuristicLab.Core; 27 using HeuristicLab.Data; 27 28 using HeuristicLab.Optimization; 28 29 using HeuristicLab.Parameters; … … 48 49 49 50 private const string ModelParameterName = "Model"; 51 private const string CreateSolutionParameterName = "CreateSolution"; 52 50 53 51 54 #region parameter properties … … 55 58 public IFixedValueParameter<GaussianProcessRegressionSolutionCreator> GaussianProcessSolutionCreatorParameter { 56 59 get { return (IFixedValueParameter<GaussianProcessRegressionSolutionCreator>)Parameters[SolutionCreatorParameterName]; } 60 } 61 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 62 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 63 } 64 #endregion 65 #region properties 66 public bool CreateSolution { 67 get { return CreateSolutionParameter.Value.Value; } 68 set { CreateSolutionParameter.Value.Value = value; } 57 69 } 58 70 #endregion … … 78 90 Parameters.Add(new ConstrainedValueParameter<IGaussianProcessRegressionModelCreator>(ModelCreatorParameterName, "The operator to create the Gaussian process model.", 79 91 new ItemSet<IGaussianProcessRegressionModelCreator>(modelCreators), defaultModelCreator)); 80 // th is parameter is not intended to be changed,92 // the solution creator cannot be changed 81 93 Parameters.Add(new FixedValueParameter<GaussianProcessRegressionSolutionCreator>(SolutionCreatorParameterName, "The solution creator for the algorithm", 82 94 new GaussianProcessRegressionSolutionCreator())); 83 95 Parameters[SolutionCreatorParameterName].Hidden = true; 96 // TODO: it would be better to deactivate the solution creator when this parameter is changed 97 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 98 Parameters[CreateSolutionParameterName].Hidden = true; 84 99 85 100 ParameterizedModelCreators(); … … 91 106 [StorableHook(HookType.AfterDeserialization)] 92 107 private void AfterDeserialization() { 108 // BackwardsCompatibility3.3 109 #region Backwards compatible code, remove with 3.4 110 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 111 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 112 Parameters[CreateSolutionParameterName].Hidden = true; 113 } 114 #endregion 93 115 RegisterEventHandlers(); 94 116 } -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionSolutionCreator.cs
r12009 r13283 40 40 private const string TrainingRSquaredResultName = "Training R²"; 41 41 private const string TestRSquaredResultName = "Test R²"; 42 private const string CreateSolutionParameterName = "CreateSolution"; 42 43 43 44 #region Parameter Properties … … 54 55 get { return (ILookupParameter<ResultCollection>)Parameters[ResultsParameterName]; } 55 56 } 57 public ILookupParameter<BoolValue> CreateSolutionParameter { 58 get { return (ILookupParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 59 } 56 60 #endregion 57 61 … … 64 68 Parameters.Add(new LookupParameter<IRegressionProblemData>(ProblemDataParameterName, "The regression problem data for the Gaussian process solution.")); 65 69 Parameters.Add(new LookupParameter<IGaussianProcessModel>(ModelParameterName, "The Gaussian process regression model to use for the solution.")); 70 Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run")); 71 66 72 // in & out 67 73 Parameters.Add(new LookupParameter<ResultCollection>(ResultsParameterName, "The result collection of the algorithm.")); 68 74 // out 69 75 Parameters.Add(new LookupParameter<IGaussianProcessSolution>(SolutionParameterName, "The produced Gaussian process solution.")); 76 } 77 78 [StorableHook(HookType.AfterDeserialization)] 79 private void AfterDeserialization() { 80 // BackwardsCompatibility3.3 81 #region Backwards compatible code, remove with 3.4 82 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 83 Parameters.Add(new LookupParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run")); 84 } 85 #endregion 70 86 } 71 87 … … 75 91 76 92 public override IOperation Apply() { 77 if (ModelParameter.ActualValue != null ) {93 if (ModelParameter.ActualValue != null && CreateSolutionParameter.ActualValue.Value == true) { 78 94 var m = (IGaussianProcessModel)ModelParameter.ActualValue.Clone(); 79 95 m.FixParameters(); -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestClassification.cs
r12708 r13283 42 42 private const string SeedParameterName = "Seed"; 43 43 private const string SetSeedRandomlyParameterName = "SetSeedRandomly"; 44 private const string CreateSolutionParameterName = "CreateSolution"; 44 45 45 46 #region parameter properties … … 58 59 public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter { 59 60 get { return (IFixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; } 61 } 62 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 63 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 60 64 } 61 65 #endregion … … 81 85 set { SetSeedRandomlyParameter.Value.Value = value; } 82 86 } 87 public bool CreateSolution { 88 get { return CreateSolutionParameter.Value.Value; } 89 set { CreateSolutionParameter.Value.Value = value; } 90 } 83 91 #endregion 84 92 … … 96 104 Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); 97 105 Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); 106 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 107 Parameters[CreateSolutionParameterName].Hidden = true; 108 98 109 Problem = new ClassificationProblem(); 99 110 } … … 101 112 [StorableHook(HookType.AfterDeserialization)] 102 113 private void AfterDeserialization() { 114 // BackwardsCompatibility3.3 115 #region Backwards compatible code, remove with 3.4 103 116 if (!Parameters.ContainsKey(MParameterName)) 104 117 Parameters.Add(new FixedValueParameter<DoubleValue>(MParameterName, "The ratio of features that will be used in the construction of individual trees (0<m<=1)", new DoubleValue(0.5))); … … 107 120 if (!Parameters.ContainsKey((SetSeedRandomlyParameterName))) 108 121 Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); 122 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 123 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 124 Parameters[CreateSolutionParameterName].Hidden = true; 125 } 126 #endregion 109 127 } 110 128 … … 118 136 if (SetSeedRandomly) Seed = new System.Random().Next(); 119 137 120 var solution = CreateRandomForestClassificationSolution(Problem.ProblemData, NumberOfTrees, R, M, Seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError); 121 Results.Add(new Result(RandomForestClassificationModelResultName, "The random forest classification solution.", solution)); 138 var model = CreateRandomForestClassificationModel(Problem.ProblemData, NumberOfTrees, R, M, Seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError); 122 139 Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the random forest regression solution on the training set.", new DoubleValue(rmsError))); 123 140 Results.Add(new Result("Relative classification error", "Relative classification error of the random forest regression solution on the training set.", new PercentValue(relClassificationError))); 124 141 Results.Add(new Result("Root mean square error (out-of-bag)", "The out-of-bag root of the mean of squared errors of the random forest regression solution.", new DoubleValue(outOfBagRmsError))); 125 142 Results.Add(new Result("Relative classification error (out-of-bag)", "The out-of-bag relative classification error of the random forest regression solution.", new PercentValue(outOfBagRelClassificationError))); 143 144 if (CreateSolution) { 145 var solution = new RandomForestClassificationSolution((IClassificationProblemData)Problem.ProblemData.Clone(), model); 146 Results.Add(new Result(RandomForestClassificationModelResultName, "The random forest classification solution.", solution)); 147 } 148 } 149 150 // keep for compatibility with old API 151 public static RandomForestClassificationSolution CreateRandomForestClassificationSolution(IClassificationProblemData problemData, int nTrees, double r, double m, int seed, 152 out double rmsError, out double relClassificationError, out double outOfBagRmsError, out double outOfBagRelClassificationError) { 153 var model = CreateRandomForestClassificationModel(problemData, nTrees, r, m, seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError); 154 return new RandomForestClassificationSolution((IClassificationProblemData)problemData.Clone(), model); 126 155 } 127 156 128 public static IClassificationSolution CreateRandomForestClassificationSolution(IClassificationProblemData problemData, int nTrees, double r, double m, int seed,157 public static RandomForestModel CreateRandomForestClassificationModel(IClassificationProblemData problemData, int nTrees, double r, double m, int seed, 129 158 out double rmsError, out double relClassificationError, out double outOfBagRmsError, out double outOfBagRelClassificationError) { 130 var model = RandomForestModel.CreateClassificationModel(problemData, nTrees, r, m, seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError); 131 return new RandomForestClassificationSolution((IClassificationProblemData)problemData.Clone(), model); 159 return RandomForestModel.CreateClassificationModel(problemData, nTrees, r, m, seed, out rmsError, out relClassificationError, out outOfBagRmsError, out outOfBagRelClassificationError); 132 160 } 133 161 #endregion -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestRegression.cs
r12708 r13283 42 42 private const string SeedParameterName = "Seed"; 43 43 private const string SetSeedRandomlyParameterName = "SetSeedRandomly"; 44 private const string CreateSolutionParameterName = "CreateSolution"; 44 45 45 46 #region parameter properties … … 58 59 public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter { 59 60 get { return (IFixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; } 61 } 62 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 63 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 60 64 } 61 65 #endregion … … 81 85 set { SetSeedRandomlyParameter.Value.Value = value; } 82 86 } 87 public bool CreateSolution { 88 get { return CreateSolutionParameter.Value.Value; } 89 set { CreateSolutionParameter.Value.Value = value; } 90 } 83 91 #endregion 84 92 [StorableConstructor] … … 95 103 Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0))); 96 104 Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); 105 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 106 Parameters[CreateSolutionParameterName].Hidden = true; 107 97 108 Problem = new RegressionProblem(); 98 109 } … … 100 111 [StorableHook(HookType.AfterDeserialization)] 101 112 private void AfterDeserialization() { 113 // BackwardsCompatibility3.3 114 #region Backwards compatible code, remove with 3.4 102 115 if (!Parameters.ContainsKey(MParameterName)) 103 116 Parameters.Add(new FixedValueParameter<DoubleValue>(MParameterName, "The ratio of features that will be used in the construction of individual trees (0<m<=1)", new DoubleValue(0.5))); … … 106 119 if (!Parameters.ContainsKey((SetSeedRandomlyParameterName))) 107 120 Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true))); 121 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 122 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 123 Parameters[CreateSolutionParameterName].Hidden = true; 124 } 125 #endregion 108 126 } 109 127 … … 116 134 double rmsError, avgRelError, outOfBagRmsError, outOfBagAvgRelError; 117 135 if (SetSeedRandomly) Seed = new System.Random().Next(); 136 var model = CreateRandomForestRegressionModel(Problem.ProblemData, NumberOfTrees, R, M, Seed, 137 out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError); 118 138 119 var solution = CreateRandomForestRegressionSolution(Problem.ProblemData, NumberOfTrees, R, M, Seed, out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError);120 Results.Add(new Result(RandomForestRegressionModelResultName, "The random forest regression solution.", solution));121 139 Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the random forest regression solution on the training set.", new DoubleValue(rmsError))); 122 140 Results.Add(new Result("Average relative error", "The average of relative errors of the random forest regression solution on the training set.", new PercentValue(avgRelError))); 123 141 Results.Add(new Result("Root mean square error (out-of-bag)", "The out-of-bag root of the mean of squared errors of the random forest regression solution.", new DoubleValue(outOfBagRmsError))); 124 142 Results.Add(new Result("Average relative error (out-of-bag)", "The out-of-bag average of relative errors of the random forest regression solution.", new PercentValue(outOfBagAvgRelError))); 143 144 if (CreateSolution) { 145 var solution = new RandomForestRegressionSolution((IRegressionProblemData)Problem.ProblemData.Clone(), model); 146 Results.Add(new Result(RandomForestRegressionModelResultName, "The random forest regression solution.", solution)); 147 } 125 148 } 126 149 127 public static IRegressionSolution CreateRandomForestRegressionSolution(IRegressionProblemData problemData, int nTrees, double r, double m, int seed, 150 // keep for compatibility with old API 151 public static RandomForestRegressionSolution CreateRandomForestRegressionSolution(IRegressionProblemData problemData, int nTrees, double r, double m, int seed, 128 152 out double rmsError, out double avgRelError, out double outOfBagRmsError, out double outOfBagAvgRelError) { 129 var model = RandomForestModel.CreateRegressionModel(problemData, nTrees, r, m, seed, out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError); 153 var model = CreateRandomForestRegressionModel(problemData, nTrees, r, m, seed, 154 out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError); 130 155 return new RandomForestRegressionSolution((IRegressionProblemData)problemData.Clone(), model); 131 156 } 157 158 public static RandomForestModel CreateRandomForestRegressionModel(IRegressionProblemData problemData, int nTrees, 159 double r, double m, int seed, 160 out double rmsError, out double avgRelError, out double outOfBagRmsError, out double outOfBagAvgRelError) { 161 return RandomForestModel.CreateRegressionModel(problemData, nTrees, r, m, seed, out rmsError, out avgRelError, out outOfBagRmsError, out outOfBagAvgRelError); 162 } 163 132 164 #endregion 133 165 } -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassification.cs
r12708 r13283 46 46 private const string GammaParameterName = "Gamma"; 47 47 private const string DegreeParameterName = "Degree"; 48 private const string CreateSolutionParameterName = "CreateSolution"; 48 49 49 50 #region parameter properties … … 65 66 public IValueParameter<IntValue> DegreeParameter { 66 67 get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; } 68 } 69 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 70 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 67 71 } 68 72 #endregion … … 87 91 public IntValue Degree { 88 92 get { return DegreeParameter.Value; } 93 } 94 public bool CreateSolution { 95 get { return CreateSolutionParameter.Value.Value; } 96 set { CreateSolutionParameter.Value.Value = value; } 89 97 } 90 98 #endregion … … 112 120 Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0))); 113 121 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 122 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 123 Parameters[CreateSolutionParameterName].Hidden = true; 114 124 } 115 125 [StorableHook(HookType.AfterDeserialization)] 116 126 private void AfterDeserialization() { 117 127 #region backwards compatibility (change with 3.4) 118 if (!Parameters.ContainsKey(DegreeParameterName)) 119 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 128 if (!Parameters.ContainsKey(DegreeParameterName)) { 129 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, 130 "The degree parameter for the polynomial kernel function.", new IntValue(3))); 131 } 132 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 133 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, 134 "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 135 Parameters[CreateSolutionParameterName].Hidden = true; 136 } 120 137 #endregion 121 138 } … … 129 146 IClassificationProblemData problemData = Problem.ProblemData; 130 147 IEnumerable<string> selectedInputVariables = problemData.AllowedInputVariables; 131 double trainingAccuracy, testAccuracy;132 148 int nSv; 133 var solution = CreateSupportVectorClassificationSolution(problemData, selectedInputVariables, 134 SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Degree.Value, 135 out trainingAccuracy, out testAccuracy, out nSv); 136 137 Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", solution)); 138 Results.Add(new Result("Training accuracy", "The accuracy of the SVR solution on the training partition.", new DoubleValue(trainingAccuracy))); 139 Results.Add(new Result("Test accuracy", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy))); 140 Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv))); 149 ISupportVectorMachineModel model; 150 151 Run(problemData, selectedInputVariables, GetSvmType(SvmType.Value), GetKernelType(KernelType.Value), Cost.Value, Nu.Value, Gamma.Value, Degree.Value, out model, out nSv); 152 153 if (CreateSolution) { 154 var solution = new SupportVectorClassificationSolution((SupportVectorMachineModel)model, (IClassificationProblemData)problemData.Clone()); 155 Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", 156 solution)); 157 } 158 159 { 160 // calculate classification metrics 161 // calculate regression model metrics 162 var ds = problemData.Dataset; 163 var trainRows = problemData.TrainingIndices; 164 var testRows = problemData.TestIndices; 165 var yTrain = ds.GetDoubleValues(problemData.TargetVariable, trainRows); 166 var yTest = ds.GetDoubleValues(problemData.TargetVariable, testRows); 167 var yPredTrain = model.GetEstimatedClassValues(ds, trainRows); 168 var yPredTest = model.GetEstimatedClassValues(ds, testRows); 169 170 OnlineCalculatorError error; 171 var trainAccuracy = OnlineAccuracyCalculator.Calculate(yPredTrain, yTrain, out error); 172 if (error != OnlineCalculatorError.None) trainAccuracy = double.MaxValue; 173 var testAccuracy = OnlineAccuracyCalculator.Calculate(yPredTest, yTest, out error); 174 if (error != OnlineCalculatorError.None) testAccuracy = double.MaxValue; 175 176 Results.Add(new Result("Accuracy (training)", "The mean of squared errors of the SVR solution on the training partition.", new DoubleValue(trainAccuracy))); 177 Results.Add(new Result("Accuracy (test)", "The mean of squared errors of the SVR solution on the test partition.", new DoubleValue(testAccuracy))); 178 179 Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", 180 new IntValue(nSv))); 181 } 141 182 } 142 183 … … 147 188 } 148 189 190 // BackwardsCompatibility3.4 191 #region Backwards compatible code, remove with 3.5 149 192 public static SupportVectorClassificationSolution CreateSupportVectorClassificationSolution(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables, 150 193 int svmType, int kernelType, double cost, double nu, double gamma, int degree, out double trainingAccuracy, out double testAccuracy, out int nSv) { 194 195 ISupportVectorMachineModel model; 196 Run(problemData, allowedInputVariables, svmType, kernelType, cost, nu, gamma, degree, out model, out nSv); 197 var solution = new SupportVectorClassificationSolution((SupportVectorMachineModel)model, (IClassificationProblemData)problemData.Clone()); 198 199 trainingAccuracy = solution.TrainingAccuracy; 200 testAccuracy = solution.TestAccuracy; 201 202 return solution; 203 } 204 205 #endregion 206 207 public static void Run(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables, 208 int svmType, int kernelType, double cost, double nu, double gamma, int degree, 209 out ISupportVectorMachineModel model, out int nSv) { 151 210 var dataset = problemData.Dataset; 152 211 string targetVariable = problemData.TargetVariable; 153 212 IEnumerable<int> rows = problemData.TrainingIndices; 154 213 155 //extract SVM parameters from scope and set them156 svm_parameter parameter = new svm_parameter();157 parameter.svm_type = svmType;158 parameter.kernel_type = kernelType;159 parameter.C = cost;160 parameter.nu = nu;161 parameter.gamma = gamma;162 parameter.cache_size = 500;163 parameter.probability = 0;164 parameter.eps = 0.001;165 parameter.degree = degree;166 parameter.shrinking = 1;167 parameter.coef0 = 0;214 svm_parameter parameter = new svm_parameter { 215 svm_type = svmType, 216 kernel_type = kernelType, 217 C = cost, 218 nu = nu, 219 gamma = gamma, 220 cache_size = 500, 221 probability = 0, 222 eps = 0.001, 223 degree = degree, 224 shrinking = 1, 225 coef0 = 0 226 }; 168 227 169 228 var weightLabels = new List<int>(); … … 182 241 parameter.weight = weights.ToArray(); 183 242 184 185 243 svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); 186 244 RangeTransform rangeTransform = RangeTransform.Compute(problem); 187 245 svm_problem scaledProblem = rangeTransform.Scale(problem); 188 246 var svmModel = svm.svm_train(scaledProblem, parameter); 189 var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues);190 var solution = new SupportVectorClassificationSolution(model, (IClassificationProblemData)problemData.Clone());191 192 247 nSv = svmModel.SV.Length; 193 trainingAccuracy = solution.TrainingAccuracy; 194 testAccuracy = solution.TestAccuracy; 195 196 return solution; 248 249 model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues); 197 250 } 198 251 -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorRegression.cs
r12708 r13283 47 47 private const string EpsilonParameterName = "Epsilon"; 48 48 private const string DegreeParameterName = "Degree"; 49 private const string CreateSolutionParameterName = "CreateSolution"; 49 50 50 51 #region parameter properties … … 69 70 public IValueParameter<IntValue> DegreeParameter { 70 71 get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; } 72 } 73 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 74 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 71 75 } 72 76 #endregion … … 94 98 public IntValue Degree { 95 99 get { return DegreeParameter.Value; } 100 } 101 public bool CreateSolution { 102 get { return CreateSolutionParameter.Value.Value; } 103 set { CreateSolutionParameter.Value.Value = value; } 96 104 } 97 105 #endregion … … 120 128 Parameters.Add(new ValueParameter<DoubleValue>(EpsilonParameterName, "The value of the epsilon parameter for epsilon-SVR.", new DoubleValue(0.1))); 121 129 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 130 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 131 Parameters[CreateSolutionParameterName].Hidden = true; 122 132 } 123 133 [StorableHook(HookType.AfterDeserialization)] 124 134 private void AfterDeserialization() { 125 135 #region backwards compatibility (change with 3.4) 126 if (!Parameters.ContainsKey(DegreeParameterName)) 127 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 136 137 if (!Parameters.ContainsKey(DegreeParameterName)) { 138 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, 139 "The degree parameter for the polynomial kernel function.", new IntValue(3))); 140 } 141 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 142 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 143 Parameters[CreateSolutionParameterName].Hidden = true; 144 } 128 145 #endregion 129 146 } … … 137 154 IRegressionProblemData problemData = Problem.ProblemData; 138 155 IEnumerable<string> selectedInputVariables = problemData.AllowedInputVariables; 139 double trainR2, testR2;140 156 int nSv; 141 var solution = CreateSupportVectorRegressionSolution(problemData, selectedInputVariables, SvmType.Value, 142 KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value, 143 out trainR2, out testR2, out nSv); 144 145 Results.Add(new Result("Support vector regression solution", "The support vector regression solution.", solution)); 146 Results.Add(new Result("Training R²", "The Pearson's R² of the SVR solution on the training partition.", new DoubleValue(trainR2))); 147 Results.Add(new Result("Test R²", "The Pearson's R² of the SVR solution on the test partition.", new DoubleValue(testR2))); 157 ISupportVectorMachineModel model; 158 Run(problemData, selectedInputVariables, SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value, out model, out nSv); 159 160 if (CreateSolution) { 161 var solution = new SupportVectorRegressionSolution((SupportVectorMachineModel)model, (IRegressionProblemData)problemData.Clone()); 162 Results.Add(new Result("Support vector regression solution", "The support vector regression solution.", solution)); 163 } 164 148 165 Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv))); 149 } 150 151 public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables, 166 167 168 { 169 // calculate regression model metrics 170 var ds = problemData.Dataset; 171 var trainRows = problemData.TrainingIndices; 172 var testRows = problemData.TestIndices; 173 var yTrain = ds.GetDoubleValues(problemData.TargetVariable, trainRows); 174 var yTest = ds.GetDoubleValues(problemData.TargetVariable, testRows); 175 var yPredTrain = model.GetEstimatedValues(ds, trainRows).ToArray(); 176 var yPredTest = model.GetEstimatedValues(ds, testRows).ToArray(); 177 178 OnlineCalculatorError error; 179 var trainMse = OnlineMeanSquaredErrorCalculator.Calculate(yPredTrain, yTrain, out error); 180 if (error != OnlineCalculatorError.None) trainMse = double.MaxValue; 181 var testMse = OnlineMeanSquaredErrorCalculator.Calculate(yPredTest, yTest, out error); 182 if (error != OnlineCalculatorError.None) testMse = double.MaxValue; 183 184 Results.Add(new Result("Mean squared error (training)", "The mean of squared errors of the SVR solution on the training partition.", new DoubleValue(trainMse))); 185 Results.Add(new Result("Mean squared error (test)", "The mean of squared errors of the SVR solution on the test partition.", new DoubleValue(testMse))); 186 187 188 var trainMae = OnlineMeanAbsoluteErrorCalculator.Calculate(yPredTrain, yTrain, out error); 189 if (error != OnlineCalculatorError.None) trainMae = double.MaxValue; 190 var testMae = OnlineMeanAbsoluteErrorCalculator.Calculate(yPredTest, yTest, out error); 191 if (error != OnlineCalculatorError.None) testMae = double.MaxValue; 192 193 Results.Add(new Result("Mean absolute error (training)", "The mean of absolute errors of the SVR solution on the training partition.", new DoubleValue(trainMae))); 194 Results.Add(new Result("Mean absolute error (test)", "The mean of absolute errors of the SVR solution on the test partition.", new DoubleValue(testMae))); 195 196 197 var trainRelErr = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(yPredTrain, yTrain, out error); 198 if (error != OnlineCalculatorError.None) trainRelErr = double.MaxValue; 199 var testRelErr = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(yPredTest, yTest, out error); 200 if (error != OnlineCalculatorError.None) testRelErr = double.MaxValue; 201 202 Results.Add(new Result("Average relative error (training)", "The mean of relative errors of the SVR solution on the training partition.", new DoubleValue(trainRelErr))); 203 Results.Add(new Result("Average relative error (test)", "The mean of relative errors of the SVR solution on the test partition.", new DoubleValue(testRelErr))); 204 } 205 } 206 207 // BackwardsCompatibility3.4 208 #region Backwards compatible code, remove with 3.5 209 // for compatibility with old API 210 public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution( 211 IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables, 152 212 string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree, 153 213 out double trainingR2, out double testR2, out int nSv) { 214 ISupportVectorMachineModel model; 215 Run(problemData, allowedInputVariables, svmType, kernelType, cost, nu, gamma, epsilon, degree, out model, out nSv); 216 217 var solution = new SupportVectorRegressionSolution((SupportVectorMachineModel)model, (IRegressionProblemData)problemData.Clone()); 218 trainingR2 = solution.TrainingRSquared; 219 testR2 = solution.TestRSquared; 220 return solution; 221 } 222 #endregion 223 224 public static void Run(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables, 225 string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree, 226 out ISupportVectorMachineModel model, out int nSv) { 154 227 var dataset = problemData.Dataset; 155 228 string targetVariable = problemData.TargetVariable; 156 229 IEnumerable<int> rows = problemData.TrainingIndices; 157 230 158 //extract SVM parameters from scope and set them 159 svm_parameter parameter = new svm_parameter(); 160 parameter.svm_type = GetSvmType(svmType); 161 parameter.kernel_type = GetKernelType(kernelType); 162 parameter.C = cost; 163 parameter.nu = nu; 164 parameter.gamma = gamma; 165 parameter.p = epsilon; 166 parameter.cache_size = 500; 167 parameter.probability = 0; 168 parameter.eps = 0.001; 169 parameter.degree = degree; 170 parameter.shrinking = 1; 171 parameter.coef0 = 0; 172 173 231 svm_parameter parameter = new svm_parameter { 232 svm_type = GetSvmType(svmType), 233 kernel_type = GetKernelType(kernelType), 234 C = cost, 235 nu = nu, 236 gamma = gamma, 237 p = epsilon, 238 cache_size = 500, 239 probability = 0, 240 eps = 0.001, 241 degree = degree, 242 shrinking = 1, 243 coef0 = 0 244 }; 174 245 175 246 svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); … … 178 249 var svmModel = svm.svm_train(scaledProblem, parameter); 179 250 nSv = svmModel.SV.Length; 180 var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables); 181 var solution = new SupportVectorRegressionSolution(model, (IRegressionProblemData)problemData.Clone()); 182 trainingR2 = solution.TrainingRSquared; 183 testR2 = solution.TestRSquared; 184 return solution; 251 252 model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables); 185 253 } 186 254
Note: See TracChangeset
for help on using the changeset viewer.