Changeset 6580
- Timestamp:
- 07/21/11 13:37:43 (13 years ago)
- Location:
- trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4
- Files:
-
- 3 edited
- 8 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj
r6579 r6580 112 112 <Compile Include="HeuristicLabAlgorithmsDataAnalysisPlugin.cs" /> 113 113 <Compile Include="FixedDataAnalysisAlgorithm.cs" /> 114 <Compile Include="Interfaces\INeuralNetworkEnsembleClassificationSolution.cs" /> 115 <Compile Include="Interfaces\INeuralNetworkEnsembleRegressionSolution.cs" /> 116 <Compile Include="Interfaces\INeuralNetworkEnsembleModel.cs" /> 114 117 <Compile Include="Interfaces\INeuralNetworkClassificationSolution.cs" /> 115 118 <Compile Include="Interfaces\INeuralNetworkRegressionSolution.cs" /> … … 135 138 <Compile Include="Linear\MultinomialLogitClassificationSolution.cs" /> 136 139 <Compile Include="Linear\MultinomialLogitModel.cs" /> 140 <Compile Include="NeuralNetwork\NeuralNetworkEnsembleClassification.cs" /> 141 <Compile Include="NeuralNetwork\NeuralNetworkEnsembleClassificationSolution.cs" /> 142 <Compile Include="NeuralNetwork\NeuralNetworkEnsembleModel.cs" /> 143 <Compile Include="NeuralNetwork\NeuralNetworkEnsembleRegressionSolution.cs" /> 144 <Compile Include="NeuralNetwork\NeuralNetworkEnsembleRegression.cs" /> 137 145 <Compile Include="NeuralNetwork\NeuralNetworkClassification.cs" /> 138 146 <Compile Include="NeuralNetwork\NeuralNetworkClassificationSolution.cs" /> -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/INeuralNetworkEnsembleClassificationSolution.cs
r6577 r6580 26 26 namespace HeuristicLab.Algorithms.DataAnalysis { 27 27 /// <summary> 28 /// Interface to represent a neural network regression solution28 /// Interface to represent a neural network ensemble classification solution 29 29 /// </summary> 30 public interface INeuralNetwork RegressionSolution : IRegressionSolution {31 new INeuralNetwork Model Model { get; }30 public interface INeuralNetworkEnsembleClassificationSolution : IClassificationSolution { 31 new INeuralNetworkEnsembleModel Model { get; } 32 32 } 33 33 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/INeuralNetworkEnsembleModel.cs
r6577 r6580 27 27 namespace HeuristicLab.Algorithms.DataAnalysis { 28 28 /// <summary> 29 /// Interface to represent a neural network model for either regression or classification29 /// Interface to represent a neural network ensemble model for either regression or classification 30 30 /// </summary> 31 public interface INeuralNetwork Model : IRegressionModel, IClassificationModel {31 public interface INeuralNetworkEnsembleModel : IRegressionModel, IClassificationModel { 32 32 } 33 33 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/INeuralNetworkEnsembleRegressionSolution.cs
r6577 r6580 26 26 namespace HeuristicLab.Algorithms.DataAnalysis { 27 27 /// <summary> 28 /// Interface to represent a neural network regression solution28 /// Interface to represent a neural network ensemble regression solution 29 29 /// </summary> 30 public interface INeuralNetwork RegressionSolution : IRegressionSolution {31 new INeuralNetwork Model Model { get; }30 public interface INeuralNetworkEnsembleRegressionSolution : IRegressionSolution { 31 new INeuralNetworkEnsembleModel Model { get; } 32 32 } 33 33 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs
r6579 r6580 38 38 /// Neural network classification data analysis algorithm. 39 39 /// </summary> 40 [Item("Neural Network Classification", "Neural network classification data analysis algorithm (wrapper for ALGLIB). ")]40 [Item("Neural Network Classification", "Neural network classification data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/neuralnetworks.php")] 41 41 [Creatable("Data Analysis")] 42 42 [StorableClass] … … 156 156 throw new NotSupportedException("Neural network classification does not support NaN or infinity values in the input dataset."); 157 157 158 alglib.multilayerperceptron multiLayerPerceptron = null;159 int numberOfClasses = problemData.ClassValues.Count();160 if (nLayers == 0) {161 alglib.mlpcreatec0(allowedInputVariables.Count(), numberOfClasses, out multiLayerPerceptron);162 } else if (nLayers == 1) {163 alglib.mlpcreatec1(allowedInputVariables.Count(), nHiddenNodes1, numberOfClasses, out multiLayerPerceptron);164 } else if (nLayers == 2) {165 alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, numberOfClasses, out multiLayerPerceptron);166 } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");167 alglib.mlpreport rep;168 158 int nRows = inputMatrix.GetLength(0); 169 159 int nFeatures = inputMatrix.GetLength(1) - 1; … … 178 168 inputMatrix[row, nFeatures] = classIndizes[inputMatrix[row, nFeatures]]; 179 169 } 170 171 alglib.multilayerperceptron multiLayerPerceptron = null; 172 if (nLayers == 0) { 173 alglib.mlpcreatec0(allowedInputVariables.Count(), nClasses, out multiLayerPerceptron); 174 } else if (nLayers == 1) { 175 alglib.mlpcreatec1(allowedInputVariables.Count(), nHiddenNodes1, nClasses, out multiLayerPerceptron); 176 } else if (nLayers == 2) { 177 alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, out multiLayerPerceptron); 178 } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); 179 alglib.mlpreport rep; 180 180 181 181 int info; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassification.cs
r6579 r6580 36 36 namespace HeuristicLab.Algorithms.DataAnalysis { 37 37 /// <summary> 38 /// Neural network regression data analysis algorithm.38 /// Neural network ensemble classification data analysis algorithm. 39 39 /// </summary> 40 [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB).")]40 [Item("Neural Network Ensemble Classification", "Neural network ensemble classification data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/mlpensembles.php")] 41 41 [Creatable("Data Analysis")] 42 42 [StorableClass] 43 public sealed class NeuralNetworkRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> { 43 public sealed class NeuralNetworkEnsembleClassification : FixedDataAnalysisAlgorithm<IClassificationProblem> { 44 private const string EnsembleSizeParameterName = "EnsembleSize"; 44 45 private const string DecayParameterName = "Decay"; 45 46 private const string HiddenLayersParameterName = "HiddenLayers"; … … 47 48 private const string NodesInSecondHiddenLayerParameterName = "NodesInSecondHiddenLayer"; 48 49 private const string RestartsParameterName = "Restarts"; 49 private const string NeuralNetwork RegressionModelResultName = "Neural network regression solution";50 private const string NeuralNetworkEnsembleClassificationModelResultName = "Neural network ensemble classification solution"; 50 51 51 52 #region parameter properties 53 public IFixedValueParameter<IntValue> EnsembleSizeParameter { 54 get { return (IFixedValueParameter<IntValue>)Parameters[EnsembleSizeParameterName]; } 55 } 52 56 public IFixedValueParameter<DoubleValue> DecayParameter { 53 57 get { return (IFixedValueParameter<DoubleValue>)Parameters[DecayParameterName]; } … … 68 72 69 73 #region properties 74 public int EnsembleSize { 75 get { return EnsembleSizeParameter.Value.Value; } 76 set { 77 if (value < 1) throw new ArgumentException("The number of models in the ensemble must be positive and at least one.", "EnsembleSize"); 78 EnsembleSizeParameter.Value.Value = value; 79 } 80 } 70 81 public double Decay { 71 82 get { return DecayParameter.Value.Value; } … … 110 121 111 122 [StorableConstructor] 112 private NeuralNetwork Regression(bool deserializing) : base(deserializing) { }113 private NeuralNetwork Regression(NeuralNetworkRegression original, Cloner cloner)123 private NeuralNetworkEnsembleClassification(bool deserializing) : base(deserializing) { } 124 private NeuralNetworkEnsembleClassification(NeuralNetworkEnsembleClassification original, Cloner cloner) 114 125 : base(original, cloner) { 115 126 } 116 public NeuralNetwork Regression()127 public NeuralNetworkEnsembleClassification() 117 128 : base() { 118 129 var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { new IntValue(0), new IntValue(1), new IntValue(2) }); … … 121 132 select v) 122 133 .Single(); 123 Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(1))); 134 Parameters.Add(new FixedValueParameter<IntValue>(EnsembleSizeParameterName, "The number of simple neural network models in the ensemble. A good value is 10.", new IntValue(10))); 135 Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(0.001))); 124 136 Parameters.Add(new ConstrainedValueParameter<IntValue>(HiddenLayersParameterName, "The number of hidden layers for the neural network (0, 1, or 2)", validHiddenLayerValues, selectedHiddenLayerValue)); 125 Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. Th is value is not used if the number of hidden layers is zero.", new IntValue(10)));126 Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(10 )));137 Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. The value should be rather large (30-100 nodes) in order to make the network highly flexible and run into the early stopping criterion). This value is not used if the number of hidden layers is zero.", new IntValue(100))); 138 Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(100))); 127 139 Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of restarts for learning.", new IntValue(2))); 128 140 129 Problem = new RegressionProblem();141 Problem = new ClassificationProblem(); 130 142 } 131 143 [StorableHook(HookType.AfterDeserialization)] … … 133 145 134 146 public override IDeepCloneable Clone(Cloner cloner) { 135 return new NeuralNetwork Regression(this, cloner);136 } 137 138 #region neural network 147 return new NeuralNetworkEnsembleClassification(this, cloner); 148 } 149 150 #region neural network ensemble 139 151 protected override void Run() { 140 double rmsError, avgRelError; 141 var solution = CreateNeuralNetworkRegressionSolution(Problem.ProblemData, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError); 142 Results.Add(new Result(NeuralNetworkRegressionModelResultName, "The neural network regression solution.", solution)); 143 Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network regression solution on the training set.", new DoubleValue(rmsError))); 144 Results.Add(new Result("Average relative error", "The average of relative errors of the neural network regression solution on the training set.", new PercentValue(avgRelError))); 145 } 146 147 public static IRegressionSolution CreateNeuralNetworkRegressionSolution(IRegressionProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts, 148 out double rmsError, out double avgRelError) { 152 double rmsError, avgRelError, relClassError; 153 var solution = CreateNeuralNetworkEnsembleClassificationSolution(Problem.ProblemData, EnsembleSize, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError, out relClassError); 154 Results.Add(new Result(NeuralNetworkEnsembleClassificationModelResultName, "The neural network ensemble classification solution.", solution)); 155 Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network ensemble regression solution on the training set.", new DoubleValue(rmsError))); 156 Results.Add(new Result("Average relative error", "The average of relative errors of the neural network ensemble regression solution on the training set.", new PercentValue(avgRelError))); 157 Results.Add(new Result("Relative classification error", "The percentage of misclassified samples.", new PercentValue(relClassError))); 158 } 159 160 public static IClassificationSolution CreateNeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, int ensembleSize, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts, 161 out double rmsError, out double avgRelError, out double relClassError) { 149 162 Dataset dataset = problemData.Dataset; 150 163 string targetVariable = problemData.TargetVariable; … … 153 166 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 154 167 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 155 throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset."); 156 157 double targetMin = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Min(); 158 targetMin = targetMin - targetMin * 0.1; // -10% 159 double targetMax = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Max(); 160 targetMax = targetMax + targetMax * 0.1; // + 10% 161 162 alglib.multilayerperceptron multiLayerPerceptron = null; 168 throw new NotSupportedException("Neural network ensemble classification does not support NaN or infinity values in the input dataset."); 169 170 int nRows = inputMatrix.GetLength(0); 171 int nFeatures = inputMatrix.GetLength(1) - 1; 172 double[] classValues = dataset.GetVariableValues(targetVariable).Distinct().OrderBy(x => x).ToArray(); 173 int nClasses = classValues.Count(); 174 // map original class values to values [0..nClasses-1] 175 Dictionary<double, double> classIndizes = new Dictionary<double, double>(); 176 for (int i = 0; i < nClasses; i++) { 177 classIndizes[classValues[i]] = i; 178 } 179 for (int row = 0; row < nRows; row++) { 180 inputMatrix[row, nFeatures] = classIndizes[inputMatrix[row, nFeatures]]; 181 } 182 183 alglib.mlpensemble mlpEnsemble = null; 163 184 if (nLayers == 0) { 164 alglib.mlp creater0(allowedInputVariables.Count(), 1, targetMin, targetMax, out multiLayerPerceptron);185 alglib.mlpecreatec0(allowedInputVariables.Count(), nClasses, ensembleSize, out mlpEnsemble); 165 186 } else if (nLayers == 1) { 166 alglib.mlp creater1(allowedInputVariables.Count(), nHiddenNodes1, 1, targetMin, targetMax, out multiLayerPerceptron);187 alglib.mlpecreatec1(allowedInputVariables.Count(), nHiddenNodes1, nClasses, ensembleSize, out mlpEnsemble); 167 188 } else if (nLayers == 2) { 168 alglib.mlp creater2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, targetMin, targetMax, out multiLayerPerceptron);189 alglib.mlpecreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, ensembleSize, out mlpEnsemble); 169 190 } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); 170 191 alglib.mlpreport rep; 171 int nRows = inputMatrix.GetLength(0);172 192 173 193 int info; 174 // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary175 alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);176 if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution"); 177 178 rmsError = alglib.mlprmserror(multiLayerPerceptron, inputMatrix, nRows);179 avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows);180 181 return new NeuralNetwork RegressionSolution(problemData, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables));194 alglib.mlpetraines(mlpEnsemble, inputMatrix, nRows, decay, restarts, out info, out rep); 195 if (info != 6) throw new ArgumentException("Error in calculation of neural network ensemble regression solution"); 196 197 rmsError = alglib.mlpermserror(mlpEnsemble, inputMatrix, nRows); 198 avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows); 199 relClassError = alglib.mlperelclserror(mlpEnsemble, inputMatrix, nRows); 200 201 return new NeuralNetworkEnsembleClassificationSolution(problemData, new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables, problemData.ClassValues.ToArray())); 182 202 } 183 203 #endregion -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassificationSolution.cs
r6577 r6580 31 31 namespace HeuristicLab.Algorithms.DataAnalysis { 32 32 /// <summary> 33 /// Represents a neural network solution for a regression problem which can be visualized in the GUI.33 /// Represents a neural network ensemble solution for a classification problem which can be visualized in the GUI. 34 34 /// </summary> 35 [Item("NeuralNetwork RegressionSolution", "Represents a neural network solution for a regression problem which can be visualized in the GUI.")]35 [Item("NeuralNetworkEnsembleClassificationSolution", "Represents a neural network ensemble solution for a classification problem which can be visualized in the GUI.")] 36 36 [StorableClass] 37 public sealed class NeuralNetwork RegressionSolution : RegressionSolution, INeuralNetworkRegressionSolution {37 public sealed class NeuralNetworkEnsembleClassificationSolution : ClassificationSolution, INeuralNetworkEnsembleClassificationSolution { 38 38 39 public new INeuralNetwork Model Model {40 get { return (INeuralNetwork Model)base.Model; }39 public new INeuralNetworkEnsembleModel Model { 40 get { return (INeuralNetworkEnsembleModel)base.Model; } 41 41 set { base.Model = value; } 42 42 } 43 43 44 44 [StorableConstructor] 45 private NeuralNetwork RegressionSolution(bool deserializing) : base(deserializing) { }46 private NeuralNetwork RegressionSolution(NeuralNetworkRegressionSolution original, Cloner cloner)45 private NeuralNetworkEnsembleClassificationSolution(bool deserializing) : base(deserializing) { } 46 private NeuralNetworkEnsembleClassificationSolution(NeuralNetworkEnsembleClassificationSolution original, Cloner cloner) 47 47 : base(original, cloner) { 48 48 } 49 public NeuralNetwork RegressionSolution(IRegressionProblemData problemData, INeuralNetworkModel nnModel)49 public NeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, INeuralNetworkEnsembleModel nnModel) 50 50 : base(nnModel, problemData) { 51 51 } 52 52 53 53 public override IDeepCloneable Clone(Cloner cloner) { 54 return new NeuralNetwork RegressionSolution(this, cloner);54 return new NeuralNetworkEnsembleClassificationSolution(this, cloner); 55 55 } 56 56 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs
r6577 r6580 33 33 namespace HeuristicLab.Algorithms.DataAnalysis { 34 34 /// <summary> 35 /// Represents a neural network model for regression and classification35 /// Represents a neural network ensembel model for regression and classification 36 36 /// </summary> 37 37 [StorableClass] 38 [Item("NeuralNetwork Model", "Represents a neural networkfor regression and classification.")]39 public sealed class NeuralNetwork Model : NamedItem, INeuralNetworkModel {40 41 private alglib.m ultilayerperceptron multiLayerPerceptron;42 public alglib.m ultilayerperceptron MultiLayerPerceptron{43 get { return m ultiLayerPerceptron; }44 set { 45 if (value != m ultiLayerPerceptron) {38 [Item("NeuralNetworkEnsembleModel", "Represents a neural network ensemble for regression and classification.")] 39 public sealed class NeuralNetworkEnsembleModel : NamedItem, INeuralNetworkEnsembleModel { 40 41 private alglib.mlpensemble mlpEnsemble; 42 public alglib.mlpensemble MultiLayerPerceptronEnsemble { 43 get { return mlpEnsemble; } 44 set { 45 if (value != mlpEnsemble) { 46 46 if (value == null) throw new ArgumentNullException(); 47 m ultiLayerPerceptron= value;47 mlpEnsemble = value; 48 48 OnChanged(EventArgs.Empty); 49 49 } … … 58 58 private double[] classValues; 59 59 [StorableConstructor] 60 private NeuralNetwork Model(bool deserializing)60 private NeuralNetworkEnsembleModel(bool deserializing) 61 61 : base(deserializing) { 62 62 if (deserializing) 63 m ultiLayerPerceptron = new alglib.multilayerperceptron();64 } 65 private NeuralNetwork Model(NeuralNetworkModel original, Cloner cloner)63 mlpEnsemble = new alglib.mlpensemble(); 64 } 65 private NeuralNetworkEnsembleModel(NeuralNetworkEnsembleModel original, Cloner cloner) 66 66 : base(original, cloner) { 67 multiLayerPerceptron = new alglib.multilayerperceptron(); 68 multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone(); 69 multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone(); 70 multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone(); 71 multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone(); 72 multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone(); 73 multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone(); 74 multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone(); 75 multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone(); 76 multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone(); 77 multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone(); 78 multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone(); 67 mlpEnsemble = new alglib.mlpensemble(); 68 mlpEnsemble.innerobj.columnmeans = (double[])original.mlpEnsemble.innerobj.columnmeans.Clone(); 69 mlpEnsemble.innerobj.columnsigmas = (double[])original.mlpEnsemble.innerobj.columnsigmas.Clone(); 70 mlpEnsemble.innerobj.dfdnet = (double[])original.mlpEnsemble.innerobj.dfdnet.Clone(); 71 mlpEnsemble.innerobj.ensemblesize = original.mlpEnsemble.innerobj.ensemblesize; 72 mlpEnsemble.innerobj.issoftmax = original.mlpEnsemble.innerobj.issoftmax; 73 mlpEnsemble.innerobj.neurons = (double[])original.mlpEnsemble.innerobj.neurons.Clone(); 74 mlpEnsemble.innerobj.nin = original.mlpEnsemble.innerobj.nin; 75 mlpEnsemble.innerobj.nout = original.mlpEnsemble.innerobj.nout; 76 mlpEnsemble.innerobj.postprocessing = original.mlpEnsemble.innerobj.postprocessing; 77 mlpEnsemble.innerobj.serializedlen = original.mlpEnsemble.innerobj.serializedlen; 78 mlpEnsemble.innerobj.serializedmlp = (double[])original.mlpEnsemble.innerobj.serializedmlp.Clone(); 79 mlpEnsemble.innerobj.structinfo = (int[])original.mlpEnsemble.innerobj.structinfo.Clone(); 80 mlpEnsemble.innerobj.tmpmeans = (double[])original.mlpEnsemble.innerobj.tmpmeans.Clone(); 81 mlpEnsemble.innerobj.tmpsigmas = (double[])original.mlpEnsemble.innerobj.tmpsigmas.Clone(); 82 mlpEnsemble.innerobj.tmpweights = (double[])original.mlpEnsemble.innerobj.tmpweights.Clone(); 83 mlpEnsemble.innerobj.wcount = original.mlpEnsemble.innerobj.wcount; 84 mlpEnsemble.innerobj.weights = (double[])original.mlpEnsemble.innerobj.weights.Clone(); 85 mlpEnsemble.innerobj.y = (double[])original.mlpEnsemble.innerobj.y.Clone(); 79 86 targetVariable = original.targetVariable; 80 87 allowedInputVariables = (string[])original.allowedInputVariables.Clone(); … … 82 89 this.classValues = (double[])original.classValues.Clone(); 83 90 } 84 public NeuralNetwork Model(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)91 public NeuralNetworkEnsembleModel(alglib.mlpensemble mlpEnsemble, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) 85 92 : base() { 86 93 this.name = ItemName; 87 94 this.description = ItemDescription; 88 this.m ultiLayerPerceptron = multiLayerPerceptron;95 this.mlpEnsemble = mlpEnsemble; 89 96 this.targetVariable = targetVariable; 90 97 this.allowedInputVariables = allowedInputVariables.ToArray(); … … 94 101 95 102 public override IDeepCloneable Clone(Cloner cloner) { 96 return new NeuralNetwork Model(this, cloner);103 return new NeuralNetworkEnsembleModel(this, cloner); 97 104 } 98 105 … … 109 116 x[column] = inputData[row, column]; 110 117 } 111 alglib.mlp process(multiLayerPerceptron, x, ref y);118 alglib.mlpeprocess(mlpEnsemble, x, ref y); 112 119 yield return y[0]; 113 120 } … … 126 133 x[column] = inputData[row, column]; 127 134 } 128 alglib.mlp process(multiLayerPerceptron, x, ref y);135 alglib.mlpeprocess(mlpEnsemble, x, ref y); 129 136 // find class for with the largest probability value 130 137 int maxProbClassIndex = 0; … … 151 158 #region persistence 152 159 [Storable] 153 private double[,] MultiLayerPerceptronChunks { 154 get { 155 return multiLayerPerceptron.innerobj.chunks; 156 } 157 set { 158 multiLayerPerceptron.innerobj.chunks = value; 159 } 160 } 161 [Storable] 162 private double[] MultiLayerPerceptronColumnMeans { 163 get { 164 return multiLayerPerceptron.innerobj.columnmeans; 165 } 166 set { 167 multiLayerPerceptron.innerobj.columnmeans = value; 168 } 169 } 170 [Storable] 171 private double[] MultiLayerPerceptronColumnSigmas { 172 get { 173 return multiLayerPerceptron.innerobj.columnsigmas; 174 } 175 set { 176 multiLayerPerceptron.innerobj.columnsigmas = value; 177 } 178 } 179 [Storable] 180 private double[] MultiLayerPerceptronDError { 181 get { 182 return multiLayerPerceptron.innerobj.derror; 183 } 184 set { 185 multiLayerPerceptron.innerobj.derror = value; 186 } 187 } 188 [Storable] 189 private double[] MultiLayerPerceptronDfdnet { 190 get { 191 return multiLayerPerceptron.innerobj.dfdnet; 192 } 193 set { 194 multiLayerPerceptron.innerobj.dfdnet = value; 195 } 196 } 197 [Storable] 198 private double[] MultiLayerPerceptronNeurons { 199 get { 200 return multiLayerPerceptron.innerobj.neurons; 201 } 202 set { 203 multiLayerPerceptron.innerobj.neurons = value; 204 } 205 } 206 [Storable] 207 private double[] MultiLayerPerceptronNwbuf { 208 get { 209 return multiLayerPerceptron.innerobj.nwbuf; 210 } 211 set { 212 multiLayerPerceptron.innerobj.nwbuf = value; 160 private double[] MultiLayerPerceptronEnsembleColumnMeans { 161 get { 162 return mlpEnsemble.innerobj.columnmeans; 163 } 164 set { 165 mlpEnsemble.innerobj.columnmeans = value; 166 } 167 } 168 [Storable] 169 private double[] MultiLayerPerceptronEnsembleColumnSigmas { 170 get { 171 return mlpEnsemble.innerobj.columnsigmas; 172 } 173 set { 174 mlpEnsemble.innerobj.columnsigmas = value; 175 } 176 } 177 [Storable] 178 private double[] MultiLayerPerceptronEnsembleDfdnet { 179 get { 180 return mlpEnsemble.innerobj.dfdnet; 181 } 182 set { 183 mlpEnsemble.innerobj.dfdnet = value; 184 } 185 } 186 [Storable] 187 private int MultiLayerPerceptronEnsembleSize { 188 get { 189 return mlpEnsemble.innerobj.ensemblesize; 190 } 191 set { 192 mlpEnsemble.innerobj.ensemblesize = value; 193 } 194 } 195 [Storable] 196 private bool MultiLayerPerceptronEnsembleIsSoftMax { 197 get { 198 return mlpEnsemble.innerobj.issoftmax; 199 } 200 set { 201 mlpEnsemble.innerobj.issoftmax = value; 202 } 203 } 204 [Storable] 205 private double[] MultiLayerPerceptronEnsembleNeurons { 206 get { 207 return mlpEnsemble.innerobj.neurons; 208 } 209 set { 210 mlpEnsemble.innerobj.neurons = value; 211 } 212 } 213 [Storable] 214 private int MultiLayerPerceptronEnsembleNin { 215 get { 216 return mlpEnsemble.innerobj.nin; 217 } 218 set { 219 mlpEnsemble.innerobj.nin = value; 220 } 221 } 222 [Storable] 223 private int MultiLayerPerceptronEnsembleNout { 224 get { 225 return mlpEnsemble.innerobj.nout; 226 } 227 set { 228 mlpEnsemble.innerobj.nout = value; 229 } 230 } 231 [Storable] 232 private bool MultiLayerPerceptronEnsemblePostprocessing { 233 get { 234 return mlpEnsemble.innerobj.postprocessing; 235 } 236 set { 237 mlpEnsemble.innerobj.postprocessing = value; 238 } 239 } 240 [Storable] 241 private int MultiLayerPerceptronEnsembleSerializedLen { 242 get { 243 return mlpEnsemble.innerobj.serializedlen; 244 } 245 set { 246 mlpEnsemble.innerobj.serializedlen = value; 247 } 248 } 249 [Storable] 250 private double[] MultiLayerPerceptronEnsembleSerializedMlp { 251 get { 252 return mlpEnsemble.innerobj.serializedmlp; 253 } 254 set { 255 mlpEnsemble.innerobj.serializedmlp = value; 213 256 } 214 257 } … … 216 259 private int[] MultiLayerPerceptronStuctinfo { 217 260 get { 218 return multiLayerPerceptron.innerobj.structinfo; 219 } 220 set { 221 multiLayerPerceptron.innerobj.structinfo = value; 222 } 223 } 261 return mlpEnsemble.innerobj.structinfo; 262 } 263 set { 264 mlpEnsemble.innerobj.structinfo = value; 265 } 266 } 267 [Storable] 268 private double[] MultiLayerPerceptronEnsembleTmpMeans { 269 get { 270 return mlpEnsemble.innerobj.tmpmeans; 271 } 272 set { 273 mlpEnsemble.innerobj.tmpmeans = value; 274 } 275 } 276 [Storable] 277 private double[] MultiLayerPerceptronEnsembleTmpSigmas { 278 get { 279 return mlpEnsemble.innerobj.tmpsigmas; 280 } 281 set { 282 mlpEnsemble.innerobj.tmpsigmas = value; 283 } 284 } 285 [Storable] 286 private double[] MultiLayerPerceptronEnsembleTmpWeights { 287 get { 288 return mlpEnsemble.innerobj.tmpweights; 289 } 290 set { 291 mlpEnsemble.innerobj.tmpweights = value; 292 } 293 } 294 [Storable] 295 private int MultiLayerPerceptronEnsembleWCount { 296 get { 297 return mlpEnsemble.innerobj.wcount; 298 } 299 set { 300 mlpEnsemble.innerobj.wcount = value; 301 } 302 } 303 224 304 [Storable] 225 305 private double[] MultiLayerPerceptronWeights { 226 306 get { 227 return multiLayerPerceptron.innerobj.weights; 228 } 229 set { 230 multiLayerPerceptron.innerobj.weights = value; 231 } 232 } 233 [Storable] 234 private double[] MultiLayerPerceptronX { 235 get { 236 return multiLayerPerceptron.innerobj.x; 237 } 238 set { 239 multiLayerPerceptron.innerobj.x = value; 307 return mlpEnsemble.innerobj.weights; 308 } 309 set { 310 mlpEnsemble.innerobj.weights = value; 240 311 } 241 312 } … … 243 314 private double[] MultiLayerPerceptronY { 244 315 get { 245 return m ultiLayerPerceptron.innerobj.y;246 } 247 set { 248 m ultiLayerPerceptron.innerobj.y = value;316 return mlpEnsemble.innerobj.y; 317 } 318 set { 319 mlpEnsemble.innerobj.y = value; 249 320 } 250 321 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegression.cs
r6579 r6580 36 36 namespace HeuristicLab.Algorithms.DataAnalysis { 37 37 /// <summary> 38 /// Neural network regression data analysis algorithm.38 /// Neural network ensemble regression data analysis algorithm. 39 39 /// </summary> 40 [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB).")]40 [Item("Neural Network Ensemble Regression", "Neural network ensemble regression data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/mlpensembles.php")] 41 41 [Creatable("Data Analysis")] 42 42 [StorableClass] 43 public sealed class NeuralNetworkRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> { 43 public sealed class NeuralNetworkEnsembleRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> { 44 private const string EnsembleSizeParameterName = "EnsembleSize"; 44 45 private const string DecayParameterName = "Decay"; 45 46 private const string HiddenLayersParameterName = "HiddenLayers"; … … 47 48 private const string NodesInSecondHiddenLayerParameterName = "NodesInSecondHiddenLayer"; 48 49 private const string RestartsParameterName = "Restarts"; 49 private const string NeuralNetwork RegressionModelResultName = "Neural networkregression solution";50 private const string NeuralNetworkEnsembleRegressionModelResultName = "Neural network ensemble regression solution"; 50 51 51 52 #region parameter properties 53 public IFixedValueParameter<IntValue> EnsembleSizeParameter { 54 get { return (IFixedValueParameter<IntValue>)Parameters[EnsembleSizeParameterName]; } 55 } 52 56 public IFixedValueParameter<DoubleValue> DecayParameter { 53 57 get { return (IFixedValueParameter<DoubleValue>)Parameters[DecayParameterName]; } … … 68 72 69 73 #region properties 74 public int EnsembleSize { 75 get { return EnsembleSizeParameter.Value.Value; } 76 set { 77 if (value < 1) throw new ArgumentException("The number of models in the ensemble must be positive and at least one.", "EnsembleSize"); 78 EnsembleSizeParameter.Value.Value = value; 79 } 80 } 70 81 public double Decay { 71 82 get { return DecayParameter.Value.Value; } … … 110 121 111 122 [StorableConstructor] 112 private NeuralNetwork Regression(bool deserializing) : base(deserializing) { }113 private NeuralNetwork Regression(NeuralNetworkRegression original, Cloner cloner)123 private NeuralNetworkEnsembleRegression(bool deserializing) : base(deserializing) { } 124 private NeuralNetworkEnsembleRegression(NeuralNetworkEnsembleRegression original, Cloner cloner) 114 125 : base(original, cloner) { 115 126 } 116 public NeuralNetwork Regression()127 public NeuralNetworkEnsembleRegression() 117 128 : base() { 118 129 var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { new IntValue(0), new IntValue(1), new IntValue(2) }); … … 121 132 select v) 122 133 .Single(); 123 Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(1))); 134 Parameters.Add(new FixedValueParameter<IntValue>(EnsembleSizeParameterName, "The number of simple neural network models in the ensemble. A good value is 10.", new IntValue(10))); 135 Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(0.001))); 124 136 Parameters.Add(new ConstrainedValueParameter<IntValue>(HiddenLayersParameterName, "The number of hidden layers for the neural network (0, 1, or 2)", validHiddenLayerValues, selectedHiddenLayerValue)); 125 Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. Th is value is not used if the number of hidden layers is zero.", new IntValue(10)));126 Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(10 )));137 Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. The value should be rather large (30-100 nodes) in order to make the network highly flexible and run into the early stopping criterion). This value is not used if the number of hidden layers is zero.", new IntValue(100))); 138 Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(100))); 127 139 Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of restarts for learning.", new IntValue(2))); 128 140 … … 133 145 134 146 public override IDeepCloneable Clone(Cloner cloner) { 135 return new NeuralNetwork Regression(this, cloner);147 return new NeuralNetworkEnsembleRegression(this, cloner); 136 148 } 137 149 138 #region neural network 150 #region neural network ensemble 139 151 protected override void Run() { 140 152 double rmsError, avgRelError; 141 var solution = CreateNeuralNetwork RegressionSolution(Problem.ProblemData, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError);142 Results.Add(new Result(NeuralNetwork RegressionModelResultName, "The neural networkregression solution.", solution));143 Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network regression solution on the training set.", new DoubleValue(rmsError)));144 Results.Add(new Result("Average relative error", "The average of relative errors of the neural network regression solution on the training set.", new PercentValue(avgRelError)));153 var solution = CreateNeuralNetworkEnsembleRegressionSolution(Problem.ProblemData, EnsembleSize, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError); 154 Results.Add(new Result(NeuralNetworkEnsembleRegressionModelResultName, "The neural network ensemble regression solution.", solution)); 155 Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network ensemble regression solution on the training set.", new DoubleValue(rmsError))); 156 Results.Add(new Result("Average relative error", "The average of relative errors of the neural network ensemble regression solution on the training set.", new PercentValue(avgRelError))); 145 157 } 146 158 147 public static IRegressionSolution CreateNeuralNetwork RegressionSolution(IRegressionProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,159 public static IRegressionSolution CreateNeuralNetworkEnsembleRegressionSolution(IRegressionProblemData problemData, int ensembleSize, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts, 148 160 out double rmsError, out double avgRelError) { 149 161 Dataset dataset = problemData.Dataset; … … 153 165 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 154 166 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 155 throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset.");167 throw new NotSupportedException("Neural network ensemble regression does not support NaN or infinity values in the input dataset."); 156 168 157 double targetMin = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Min(); 158 targetMin = targetMin - targetMin * 0.1; // -10% 159 double targetMax = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Max(); 160 targetMax = targetMax + targetMax * 0.1; // + 10% 161 162 alglib.multilayerperceptron multiLayerPerceptron = null; 169 alglib.mlpensemble mlpEnsemble = null; 163 170 if (nLayers == 0) { 164 alglib.mlp creater0(allowedInputVariables.Count(), 1, targetMin, targetMax, out multiLayerPerceptron);171 alglib.mlpecreate0(allowedInputVariables.Count(), 1, ensembleSize, out mlpEnsemble); 165 172 } else if (nLayers == 1) { 166 alglib.mlp creater1(allowedInputVariables.Count(), nHiddenNodes1, 1, targetMin, targetMax, out multiLayerPerceptron);173 alglib.mlpecreate1(allowedInputVariables.Count(), nHiddenNodes1, 1, ensembleSize, out mlpEnsemble); 167 174 } else if (nLayers == 2) { 168 alglib.mlp creater2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, targetMin, targetMax, out multiLayerPerceptron);175 alglib.mlpecreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, ensembleSize, out mlpEnsemble); 169 176 } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); 170 177 alglib.mlpreport rep; … … 172 179 173 180 int info; 174 // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary 175 alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep); 176 if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution"); 181 alglib.mlpetraines(mlpEnsemble, inputMatrix, nRows, decay, restarts, out info, out rep); 182 if (info != 6) throw new ArgumentException("Error in calculation of neural network ensemble regression solution"); 177 183 178 rmsError = alglib.mlp rmserror(multiLayerPerceptron, inputMatrix, nRows);179 avgRelError = alglib.mlp avgrelerror(multiLayerPerceptron, inputMatrix, nRows);184 rmsError = alglib.mlpermserror(mlpEnsemble, inputMatrix, nRows); 185 avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows); 180 186 181 return new NeuralNetwork RegressionSolution(problemData, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables));187 return new NeuralNetworkEnsembleRegressionSolution(problemData, new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables)); 182 188 } 183 189 #endregion -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegressionSolution.cs
r6577 r6580 31 31 namespace HeuristicLab.Algorithms.DataAnalysis { 32 32 /// <summary> 33 /// Represents a neural network solution for a regression problem which can be visualized in the GUI.33 /// Represents a neural network ensemble solution for a regression problem which can be visualized in the GUI. 34 34 /// </summary> 35 [Item("NeuralNetwork RegressionSolution", "Represents a neural networksolution for a regression problem which can be visualized in the GUI.")]35 [Item("NeuralNetworkEnsembleRegressionSolution", "Represents a neural network ensemble solution for a regression problem which can be visualized in the GUI.")] 36 36 [StorableClass] 37 public sealed class NeuralNetwork RegressionSolution : RegressionSolution, INeuralNetworkRegressionSolution {37 public sealed class NeuralNetworkEnsembleRegressionSolution : RegressionSolution, INeuralNetworkEnsembleRegressionSolution { 38 38 39 public new INeuralNetwork Model Model {40 get { return (INeuralNetwork Model)base.Model; }39 public new INeuralNetworkEnsembleModel Model { 40 get { return (INeuralNetworkEnsembleModel)base.Model; } 41 41 set { base.Model = value; } 42 42 } 43 43 44 44 [StorableConstructor] 45 private NeuralNetwork RegressionSolution(bool deserializing) : base(deserializing) { }46 private NeuralNetwork RegressionSolution(NeuralNetworkRegressionSolution original, Cloner cloner)45 private NeuralNetworkEnsembleRegressionSolution(bool deserializing) : base(deserializing) { } 46 private NeuralNetworkEnsembleRegressionSolution(NeuralNetworkEnsembleRegressionSolution original, Cloner cloner) 47 47 : base(original, cloner) { 48 48 } 49 public NeuralNetwork RegressionSolution(IRegressionProblemData problemData, INeuralNetworkModel nnModel)49 public NeuralNetworkEnsembleRegressionSolution(IRegressionProblemData problemData, INeuralNetworkEnsembleModel nnModel) 50 50 : base(nnModel, problemData) { 51 51 } 52 52 53 53 public override IDeepCloneable Clone(Cloner cloner) { 54 return new NeuralNetwork RegressionSolution(this, cloner);54 return new NeuralNetworkEnsembleRegressionSolution(this, cloner); 55 55 } 56 56 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs
r6579 r6580 38 38 /// Neural network regression data analysis algorithm. 39 39 /// </summary> 40 [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB). ")]40 [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/neuralnetworks.php")] 41 41 [Creatable("Data Analysis")] 42 42 [StorableClass]
Note: See TracChangeset
for help on using the changeset viewer.