- Timestamp:
- 07/21/11 11:45:19 (13 years ago)
- Location:
- trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork
- Files:
-
- 1 edited
- 2 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs
r6578 r6579 36 36 namespace HeuristicLab.Algorithms.DataAnalysis { 37 37 /// <summary> 38 /// Neural network regression data analysis algorithm.38 /// Neural network classification data analysis algorithm. 39 39 /// </summary> 40 [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB).")]40 [Item("Neural Network Classification", "Neural network classification data analysis algorithm (wrapper for ALGLIB).")] 41 41 [Creatable("Data Analysis")] 42 42 [StorableClass] 43 public sealed class NeuralNetwork Regression : FixedDataAnalysisAlgorithm<IRegressionProblem> {43 public sealed class NeuralNetworkClassification : FixedDataAnalysisAlgorithm<IClassificationProblem> { 44 44 private const string DecayParameterName = "Decay"; 45 45 private const string HiddenLayersParameterName = "HiddenLayers"; … … 47 47 private const string NodesInSecondHiddenLayerParameterName = "NodesInSecondHiddenLayer"; 48 48 private const string RestartsParameterName = "Restarts"; 49 private const string NeuralNetworkRegressionModelResultName = "Neural network regression solution";49 private const string NeuralNetworkRegressionModelResultName = "Neural network classification solution"; 50 50 51 51 #region parameter properties … … 110 110 111 111 [StorableConstructor] 112 private NeuralNetwork Regression(bool deserializing) : base(deserializing) { }113 private NeuralNetwork Regression(NeuralNetworkRegression original, Cloner cloner)112 private NeuralNetworkClassification(bool deserializing) : base(deserializing) { } 113 private NeuralNetworkClassification(NeuralNetworkClassification original, Cloner cloner) 114 114 : base(original, cloner) { 115 115 } 116 public NeuralNetwork Regression()116 public NeuralNetworkClassification() 117 117 : base() { 118 118 var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { new IntValue(0), new IntValue(1), new IntValue(2) }); … … 127 127 Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of restarts for learning.", new IntValue(2))); 128 128 129 Problem = new RegressionProblem();129 Problem = new ClassificationProblem(); 130 130 } 131 131 [StorableHook(HookType.AfterDeserialization)] … … 133 133 134 134 public override IDeepCloneable Clone(Cloner cloner) { 135 return new NeuralNetwork Regression(this, cloner);135 return new NeuralNetworkClassification(this, cloner); 136 136 } 137 137 138 138 #region neural network 139 139 protected override void Run() { 140 double rmsError, avgRelError ;141 var solution = CreateNeuralNetwork RegressionSolution(Problem.ProblemData, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError);140 double rmsError, avgRelError, relClassError; 141 var solution = CreateNeuralNetworkClassificationSolution(Problem.ProblemData, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError, out relClassError); 142 142 Results.Add(new Result(NeuralNetworkRegressionModelResultName, "The neural network regression solution.", solution)); 143 143 Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network regression solution on the training set.", new DoubleValue(rmsError))); 144 144 Results.Add(new Result("Average relative error", "The average of relative errors of the neural network regression solution on the training set.", new PercentValue(avgRelError))); 145 Results.Add(new Result("Relative classification error", "The percentage of misclassified samples.", new PercentValue(relClassError))); 145 146 } 146 147 147 public static I RegressionSolution CreateNeuralNetworkRegressionSolution(IRegressionProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,148 out double rmsError, out double avgRelError ) {148 public static IClassificationSolution CreateNeuralNetworkClassificationSolution(IClassificationProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts, 149 out double rmsError, out double avgRelError, out double relClassError) { 149 150 Dataset dataset = problemData.Dataset; 150 151 string targetVariable = problemData.TargetVariable; … … 153 154 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 154 155 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 155 throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset."); 156 157 double targetMin = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Min(); 158 targetMin = targetMin - targetMin * 0.1; // -10% 159 double targetMax = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Max(); 160 targetMax = targetMax + targetMax * 0.1; // + 10% 156 throw new NotSupportedException("Neural network classification does not support NaN or infinity values in the input dataset."); 161 157 162 158 alglib.multilayerperceptron multiLayerPerceptron = null; 159 int numberOfClasses = problemData.ClassValues.Count(); 163 160 if (nLayers == 0) { 164 alglib.mlpcreate r0(allowedInputVariables.Count(), 1, targetMin, targetMax, out multiLayerPerceptron);161 alglib.mlpcreatec0(allowedInputVariables.Count(), numberOfClasses, out multiLayerPerceptron); 165 162 } else if (nLayers == 1) { 166 alglib.mlpcreate r1(allowedInputVariables.Count(), nHiddenNodes1, 1, targetMin, targetMax, out multiLayerPerceptron);163 alglib.mlpcreatec1(allowedInputVariables.Count(), nHiddenNodes1, numberOfClasses, out multiLayerPerceptron); 167 164 } else if (nLayers == 2) { 168 alglib.mlpcreate r2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, targetMin, targetMax, out multiLayerPerceptron);165 alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, numberOfClasses, out multiLayerPerceptron); 169 166 } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); 170 167 alglib.mlpreport rep; 171 168 int nRows = inputMatrix.GetLength(0); 169 int nFeatures = inputMatrix.GetLength(1) - 1; 170 double[] classValues = dataset.GetVariableValues(targetVariable).Distinct().OrderBy(x => x).ToArray(); 171 int nClasses = classValues.Count(); 172 // map original class values to values [0..nClasses-1] 173 Dictionary<double, double> classIndizes = new Dictionary<double, double>(); 174 for (int i = 0; i < nClasses; i++) { 175 classIndizes[classValues[i]] = i; 176 } 177 for (int row = 0; row < nRows; row++) { 178 inputMatrix[row, nFeatures] = classIndizes[inputMatrix[row, nFeatures]]; 179 } 172 180 173 181 int info; 174 182 // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary 175 183 alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep); 176 if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution");184 if (info != 2) throw new ArgumentException("Error in calculation of neural network classification solution"); 177 185 178 186 rmsError = alglib.mlprmserror(multiLayerPerceptron, inputMatrix, nRows); 179 187 avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows); 188 relClassError = alglib.mlpclserror(multiLayerPerceptron, inputMatrix, nRows) / (double)nRows; 180 189 181 return new NeuralNetwork RegressionSolution(problemData, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables));190 return new NeuralNetworkClassificationSolution(problemData, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables, problemData.ClassValues.ToArray())); 182 191 } 183 192 #endregion -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassificationSolution.cs
r6577 r6579 31 31 namespace HeuristicLab.Algorithms.DataAnalysis { 32 32 /// <summary> 33 /// Represents a neural network solution for a regression problem which can be visualized in the GUI.33 /// Represents a neural network solution for a classification problem which can be visualized in the GUI. 34 34 /// </summary> 35 [Item("NeuralNetwork RegressionSolution", "Represents a neural network solution for a regression problem which can be visualized in the GUI.")]35 [Item("NeuralNetworkClassificationSolution", "Represents a neural network solution for a classification problem which can be visualized in the GUI.")] 36 36 [StorableClass] 37 public sealed class NeuralNetwork RegressionSolution : RegressionSolution, INeuralNetworkRegressionSolution {37 public sealed class NeuralNetworkClassificationSolution : ClassificationSolution, INeuralNetworkClassificationSolution { 38 38 39 39 public new INeuralNetworkModel Model { … … 43 43 44 44 [StorableConstructor] 45 private NeuralNetwork RegressionSolution(bool deserializing) : base(deserializing) { }46 private NeuralNetwork RegressionSolution(NeuralNetworkRegressionSolution original, Cloner cloner)45 private NeuralNetworkClassificationSolution(bool deserializing) : base(deserializing) { } 46 private NeuralNetworkClassificationSolution(NeuralNetworkClassificationSolution original, Cloner cloner) 47 47 : base(original, cloner) { 48 48 } 49 public NeuralNetwork RegressionSolution(IRegressionProblemData problemData, INeuralNetworkModel nnModel)49 public NeuralNetworkClassificationSolution(IClassificationProblemData problemData, INeuralNetworkModel nnModel) 50 50 : base(nnModel, problemData) { 51 51 } 52 52 53 53 public override IDeepCloneable Clone(Cloner cloner) { 54 return new NeuralNetwork RegressionSolution(this, cloner);54 return new NeuralNetworkClassificationSolution(this, cloner); 55 55 } 56 56 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs
r6578 r6579 177 177 178 178 rmsError = alglib.mlprmserror(multiLayerPerceptron, inputMatrix, nRows); 179 avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows); 179 avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows); 180 180 181 181 return new NeuralNetworkRegressionSolution(problemData, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables));
Note: See TracChangeset
for help on using the changeset viewer.