- Timestamp:
- 11/19/15 11:45:22 (9 years ago)
- Location:
- stable
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
stable
- Property svn:mergeinfo changed
/trunk/sources merged: 12934,13204-13205,13270
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Algorithms.DataAnalysis merged: 12934,13204-13205,13270
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorClassification.cs
r12708 r13283 46 46 private const string GammaParameterName = "Gamma"; 47 47 private const string DegreeParameterName = "Degree"; 48 private const string CreateSolutionParameterName = "CreateSolution"; 48 49 49 50 #region parameter properties … … 65 66 public IValueParameter<IntValue> DegreeParameter { 66 67 get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; } 68 } 69 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 70 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 67 71 } 68 72 #endregion … … 87 91 public IntValue Degree { 88 92 get { return DegreeParameter.Value; } 93 } 94 public bool CreateSolution { 95 get { return CreateSolutionParameter.Value.Value; } 96 set { CreateSolutionParameter.Value.Value = value; } 89 97 } 90 98 #endregion … … 112 120 Parameters.Add(new ValueParameter<DoubleValue>(GammaParameterName, "The value of the gamma parameter in the kernel function.", new DoubleValue(1.0))); 113 121 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 122 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 123 Parameters[CreateSolutionParameterName].Hidden = true; 114 124 } 115 125 [StorableHook(HookType.AfterDeserialization)] 116 126 private void AfterDeserialization() { 117 127 #region backwards compatibility (change with 3.4) 118 if (!Parameters.ContainsKey(DegreeParameterName)) 119 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 128 if (!Parameters.ContainsKey(DegreeParameterName)) { 129 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, 130 "The degree parameter for the polynomial kernel function.", new IntValue(3))); 131 } 132 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 133 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, 134 "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 135 Parameters[CreateSolutionParameterName].Hidden = true; 136 } 120 137 #endregion 121 138 } … … 129 146 IClassificationProblemData problemData = Problem.ProblemData; 130 147 IEnumerable<string> selectedInputVariables = problemData.AllowedInputVariables; 131 double trainingAccuracy, testAccuracy;132 148 int nSv; 133 var solution = CreateSupportVectorClassificationSolution(problemData, selectedInputVariables, 134 SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Degree.Value, 135 out trainingAccuracy, out testAccuracy, out nSv); 136 137 Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", solution)); 138 Results.Add(new Result("Training accuracy", "The accuracy of the SVR solution on the training partition.", new DoubleValue(trainingAccuracy))); 139 Results.Add(new Result("Test accuracy", "The accuracy of the SVR solution on the test partition.", new DoubleValue(testAccuracy))); 140 Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv))); 149 ISupportVectorMachineModel model; 150 151 Run(problemData, selectedInputVariables, GetSvmType(SvmType.Value), GetKernelType(KernelType.Value), Cost.Value, Nu.Value, Gamma.Value, Degree.Value, out model, out nSv); 152 153 if (CreateSolution) { 154 var solution = new SupportVectorClassificationSolution((SupportVectorMachineModel)model, (IClassificationProblemData)problemData.Clone()); 155 Results.Add(new Result("Support vector classification solution", "The support vector classification solution.", 156 solution)); 157 } 158 159 { 160 // calculate classification metrics 161 // calculate regression model metrics 162 var ds = problemData.Dataset; 163 var trainRows = problemData.TrainingIndices; 164 var testRows = problemData.TestIndices; 165 var yTrain = ds.GetDoubleValues(problemData.TargetVariable, trainRows); 166 var yTest = ds.GetDoubleValues(problemData.TargetVariable, testRows); 167 var yPredTrain = model.GetEstimatedClassValues(ds, trainRows); 168 var yPredTest = model.GetEstimatedClassValues(ds, testRows); 169 170 OnlineCalculatorError error; 171 var trainAccuracy = OnlineAccuracyCalculator.Calculate(yPredTrain, yTrain, out error); 172 if (error != OnlineCalculatorError.None) trainAccuracy = double.MaxValue; 173 var testAccuracy = OnlineAccuracyCalculator.Calculate(yPredTest, yTest, out error); 174 if (error != OnlineCalculatorError.None) testAccuracy = double.MaxValue; 175 176 Results.Add(new Result("Accuracy (training)", "The mean of squared errors of the SVR solution on the training partition.", new DoubleValue(trainAccuracy))); 177 Results.Add(new Result("Accuracy (test)", "The mean of squared errors of the SVR solution on the test partition.", new DoubleValue(testAccuracy))); 178 179 Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", 180 new IntValue(nSv))); 181 } 141 182 } 142 183 … … 147 188 } 148 189 190 // BackwardsCompatibility3.4 191 #region Backwards compatible code, remove with 3.5 149 192 public static SupportVectorClassificationSolution CreateSupportVectorClassificationSolution(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables, 150 193 int svmType, int kernelType, double cost, double nu, double gamma, int degree, out double trainingAccuracy, out double testAccuracy, out int nSv) { 194 195 ISupportVectorMachineModel model; 196 Run(problemData, allowedInputVariables, svmType, kernelType, cost, nu, gamma, degree, out model, out nSv); 197 var solution = new SupportVectorClassificationSolution((SupportVectorMachineModel)model, (IClassificationProblemData)problemData.Clone()); 198 199 trainingAccuracy = solution.TrainingAccuracy; 200 testAccuracy = solution.TestAccuracy; 201 202 return solution; 203 } 204 205 #endregion 206 207 public static void Run(IClassificationProblemData problemData, IEnumerable<string> allowedInputVariables, 208 int svmType, int kernelType, double cost, double nu, double gamma, int degree, 209 out ISupportVectorMachineModel model, out int nSv) { 151 210 var dataset = problemData.Dataset; 152 211 string targetVariable = problemData.TargetVariable; 153 212 IEnumerable<int> rows = problemData.TrainingIndices; 154 213 155 //extract SVM parameters from scope and set them156 svm_parameter parameter = new svm_parameter();157 parameter.svm_type = svmType;158 parameter.kernel_type = kernelType;159 parameter.C = cost;160 parameter.nu = nu;161 parameter.gamma = gamma;162 parameter.cache_size = 500;163 parameter.probability = 0;164 parameter.eps = 0.001;165 parameter.degree = degree;166 parameter.shrinking = 1;167 parameter.coef0 = 0;214 svm_parameter parameter = new svm_parameter { 215 svm_type = svmType, 216 kernel_type = kernelType, 217 C = cost, 218 nu = nu, 219 gamma = gamma, 220 cache_size = 500, 221 probability = 0, 222 eps = 0.001, 223 degree = degree, 224 shrinking = 1, 225 coef0 = 0 226 }; 168 227 169 228 var weightLabels = new List<int>(); … … 182 241 parameter.weight = weights.ToArray(); 183 242 184 185 243 svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); 186 244 RangeTransform rangeTransform = RangeTransform.Compute(problem); 187 245 svm_problem scaledProblem = rangeTransform.Scale(problem); 188 246 var svmModel = svm.svm_train(scaledProblem, parameter); 189 var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues);190 var solution = new SupportVectorClassificationSolution(model, (IClassificationProblemData)problemData.Clone());191 192 247 nSv = svmModel.SV.Length; 193 trainingAccuracy = solution.TrainingAccuracy; 194 testAccuracy = solution.TestAccuracy; 195 196 return solution; 248 249 model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables, problemData.ClassValues); 197 250 } 198 251 -
stable/HeuristicLab.Algorithms.DataAnalysis/3.4/SupportVectorMachine/SupportVectorRegression.cs
r12708 r13283 47 47 private const string EpsilonParameterName = "Epsilon"; 48 48 private const string DegreeParameterName = "Degree"; 49 private const string CreateSolutionParameterName = "CreateSolution"; 49 50 50 51 #region parameter properties … … 69 70 public IValueParameter<IntValue> DegreeParameter { 70 71 get { return (IValueParameter<IntValue>)Parameters[DegreeParameterName]; } 72 } 73 public IFixedValueParameter<BoolValue> CreateSolutionParameter { 74 get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; } 71 75 } 72 76 #endregion … … 94 98 public IntValue Degree { 95 99 get { return DegreeParameter.Value; } 100 } 101 public bool CreateSolution { 102 get { return CreateSolutionParameter.Value.Value; } 103 set { CreateSolutionParameter.Value.Value = value; } 96 104 } 97 105 #endregion … … 120 128 Parameters.Add(new ValueParameter<DoubleValue>(EpsilonParameterName, "The value of the epsilon parameter for epsilon-SVR.", new DoubleValue(0.1))); 121 129 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 130 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 131 Parameters[CreateSolutionParameterName].Hidden = true; 122 132 } 123 133 [StorableHook(HookType.AfterDeserialization)] 124 134 private void AfterDeserialization() { 125 135 #region backwards compatibility (change with 3.4) 126 if (!Parameters.ContainsKey(DegreeParameterName)) 127 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, "The degree parameter for the polynomial kernel function.", new IntValue(3))); 136 137 if (!Parameters.ContainsKey(DegreeParameterName)) { 138 Parameters.Add(new ValueParameter<IntValue>(DegreeParameterName, 139 "The degree parameter for the polynomial kernel function.", new IntValue(3))); 140 } 141 if (!Parameters.ContainsKey(CreateSolutionParameterName)) { 142 Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName, "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true))); 143 Parameters[CreateSolutionParameterName].Hidden = true; 144 } 128 145 #endregion 129 146 } … … 137 154 IRegressionProblemData problemData = Problem.ProblemData; 138 155 IEnumerable<string> selectedInputVariables = problemData.AllowedInputVariables; 139 double trainR2, testR2;140 156 int nSv; 141 var solution = CreateSupportVectorRegressionSolution(problemData, selectedInputVariables, SvmType.Value, 142 KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value, 143 out trainR2, out testR2, out nSv); 144 145 Results.Add(new Result("Support vector regression solution", "The support vector regression solution.", solution)); 146 Results.Add(new Result("Training R²", "The Pearson's R² of the SVR solution on the training partition.", new DoubleValue(trainR2))); 147 Results.Add(new Result("Test R²", "The Pearson's R² of the SVR solution on the test partition.", new DoubleValue(testR2))); 157 ISupportVectorMachineModel model; 158 Run(problemData, selectedInputVariables, SvmType.Value, KernelType.Value, Cost.Value, Nu.Value, Gamma.Value, Epsilon.Value, Degree.Value, out model, out nSv); 159 160 if (CreateSolution) { 161 var solution = new SupportVectorRegressionSolution((SupportVectorMachineModel)model, (IRegressionProblemData)problemData.Clone()); 162 Results.Add(new Result("Support vector regression solution", "The support vector regression solution.", solution)); 163 } 164 148 165 Results.Add(new Result("Number of support vectors", "The number of support vectors of the SVR solution.", new IntValue(nSv))); 149 } 150 151 public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables, 166 167 168 { 169 // calculate regression model metrics 170 var ds = problemData.Dataset; 171 var trainRows = problemData.TrainingIndices; 172 var testRows = problemData.TestIndices; 173 var yTrain = ds.GetDoubleValues(problemData.TargetVariable, trainRows); 174 var yTest = ds.GetDoubleValues(problemData.TargetVariable, testRows); 175 var yPredTrain = model.GetEstimatedValues(ds, trainRows).ToArray(); 176 var yPredTest = model.GetEstimatedValues(ds, testRows).ToArray(); 177 178 OnlineCalculatorError error; 179 var trainMse = OnlineMeanSquaredErrorCalculator.Calculate(yPredTrain, yTrain, out error); 180 if (error != OnlineCalculatorError.None) trainMse = double.MaxValue; 181 var testMse = OnlineMeanSquaredErrorCalculator.Calculate(yPredTest, yTest, out error); 182 if (error != OnlineCalculatorError.None) testMse = double.MaxValue; 183 184 Results.Add(new Result("Mean squared error (training)", "The mean of squared errors of the SVR solution on the training partition.", new DoubleValue(trainMse))); 185 Results.Add(new Result("Mean squared error (test)", "The mean of squared errors of the SVR solution on the test partition.", new DoubleValue(testMse))); 186 187 188 var trainMae = OnlineMeanAbsoluteErrorCalculator.Calculate(yPredTrain, yTrain, out error); 189 if (error != OnlineCalculatorError.None) trainMae = double.MaxValue; 190 var testMae = OnlineMeanAbsoluteErrorCalculator.Calculate(yPredTest, yTest, out error); 191 if (error != OnlineCalculatorError.None) testMae = double.MaxValue; 192 193 Results.Add(new Result("Mean absolute error (training)", "The mean of absolute errors of the SVR solution on the training partition.", new DoubleValue(trainMae))); 194 Results.Add(new Result("Mean absolute error (test)", "The mean of absolute errors of the SVR solution on the test partition.", new DoubleValue(testMae))); 195 196 197 var trainRelErr = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(yPredTrain, yTrain, out error); 198 if (error != OnlineCalculatorError.None) trainRelErr = double.MaxValue; 199 var testRelErr = OnlineMeanAbsolutePercentageErrorCalculator.Calculate(yPredTest, yTest, out error); 200 if (error != OnlineCalculatorError.None) testRelErr = double.MaxValue; 201 202 Results.Add(new Result("Average relative error (training)", "The mean of relative errors of the SVR solution on the training partition.", new DoubleValue(trainRelErr))); 203 Results.Add(new Result("Average relative error (test)", "The mean of relative errors of the SVR solution on the test partition.", new DoubleValue(testRelErr))); 204 } 205 } 206 207 // BackwardsCompatibility3.4 208 #region Backwards compatible code, remove with 3.5 209 // for compatibility with old API 210 public static SupportVectorRegressionSolution CreateSupportVectorRegressionSolution( 211 IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables, 152 212 string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree, 153 213 out double trainingR2, out double testR2, out int nSv) { 214 ISupportVectorMachineModel model; 215 Run(problemData, allowedInputVariables, svmType, kernelType, cost, nu, gamma, epsilon, degree, out model, out nSv); 216 217 var solution = new SupportVectorRegressionSolution((SupportVectorMachineModel)model, (IRegressionProblemData)problemData.Clone()); 218 trainingR2 = solution.TrainingRSquared; 219 testR2 = solution.TestRSquared; 220 return solution; 221 } 222 #endregion 223 224 public static void Run(IRegressionProblemData problemData, IEnumerable<string> allowedInputVariables, 225 string svmType, string kernelType, double cost, double nu, double gamma, double epsilon, int degree, 226 out ISupportVectorMachineModel model, out int nSv) { 154 227 var dataset = problemData.Dataset; 155 228 string targetVariable = problemData.TargetVariable; 156 229 IEnumerable<int> rows = problemData.TrainingIndices; 157 230 158 //extract SVM parameters from scope and set them 159 svm_parameter parameter = new svm_parameter(); 160 parameter.svm_type = GetSvmType(svmType); 161 parameter.kernel_type = GetKernelType(kernelType); 162 parameter.C = cost; 163 parameter.nu = nu; 164 parameter.gamma = gamma; 165 parameter.p = epsilon; 166 parameter.cache_size = 500; 167 parameter.probability = 0; 168 parameter.eps = 0.001; 169 parameter.degree = degree; 170 parameter.shrinking = 1; 171 parameter.coef0 = 0; 172 173 231 svm_parameter parameter = new svm_parameter { 232 svm_type = GetSvmType(svmType), 233 kernel_type = GetKernelType(kernelType), 234 C = cost, 235 nu = nu, 236 gamma = gamma, 237 p = epsilon, 238 cache_size = 500, 239 probability = 0, 240 eps = 0.001, 241 degree = degree, 242 shrinking = 1, 243 coef0 = 0 244 }; 174 245 175 246 svm_problem problem = SupportVectorMachineUtil.CreateSvmProblem(dataset, targetVariable, allowedInputVariables, rows); … … 178 249 var svmModel = svm.svm_train(scaledProblem, parameter); 179 250 nSv = svmModel.SV.Length; 180 var model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables); 181 var solution = new SupportVectorRegressionSolution(model, (IRegressionProblemData)problemData.Clone()); 182 trainingR2 = solution.TrainingRSquared; 183 testR2 = solution.TestRSquared; 184 return solution; 251 252 model = new SupportVectorMachineModel(svmModel, rangeTransform, targetVariable, allowedInputVariables); 185 253 } 186 254
Note: See TracChangeset
for help on using the changeset viewer.