- Timestamp:
- 04/09/21 19:41:33 (4 years ago)
- Location:
- trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork
- Files:
-
- 3 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs
r17180 r17931 209 209 alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, out multiLayerPerceptron); 210 210 } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); 211 211 212 alglib.mlpreport rep; 212 213 213 int info; 214 214 // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary -
trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs
r17180 r17931 20 20 #endregion 21 21 22 extern alias alglib_3_7; 22 23 using System; 23 24 using System.Collections.Generic; … … 32 33 /// Represents a neural network model for regression and classification 33 34 /// </summary> 34 [StorableType(" AEB9B960-FCA6-4A6D-BD5F-27BCE9CC5BEA")]35 [StorableType("DABDBD64-E93B-4F50-A343-C8A92C1C48A4")] 35 36 [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")] 36 37 public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel { 37 38 38 39 private object mlpLocker = new object(); 40 41 42 39 43 private alglib.multilayerperceptron multiLayerPerceptron; 44 [Storable] 45 private string SerializedMultiLayerPerceptron { 46 get { alglib.mlpserialize(multiLayerPerceptron, out var ser); return ser; } 47 set { if (value != null) alglib.mlpunserialize(value, out multiLayerPerceptron); } 48 } 40 49 41 50 public override IEnumerable<string> VariablesUsedForPrediction { … … 48 57 private double[] classValues; 49 58 [StorableConstructor] 50 private NeuralNetworkModel(StorableConstructorFlag _) : base(_) { 51 multiLayerPerceptron = new alglib.multilayerperceptron(); 52 } 59 private NeuralNetworkModel(StorableConstructorFlag _) : base(_) { } 53 60 private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner) 54 61 : base(original, cloner) { 55 multiLayerPerceptron = new alglib.multilayerperceptron(); 56 multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone(); 57 multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone(); 58 multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone(); 59 multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone(); 60 multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone(); 61 multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone(); 62 multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone(); 63 multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone(); 64 multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone(); 65 multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone(); 66 multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone(); 62 if (original.multiLayerPerceptron != null) 63 multiLayerPerceptron = (alglib.multilayerperceptron)original.multiLayerPerceptron.make_copy(); 67 64 allowedInputVariables = (string[])original.allowedInputVariables.Clone(); 68 65 if (original.classValues != null) … … 73 70 this.name = ItemName; 74 71 this.description = ItemDescription; 75 this.multiLayerPerceptron = multiLayerPerceptron;72 this.multiLayerPerceptron = (alglib.multilayerperceptron)multiLayerPerceptron.make_copy(); 76 73 this.allowedInputVariables = allowedInputVariables.ToArray(); 77 74 if (classValues != null) … … 95 92 x[column] = inputData[row, column]; 96 93 } 97 // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-sa ve!94 // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-safe! 98 95 lock (mlpLocker) { 99 96 alglib.mlpprocess(multiLayerPerceptron, x, ref y); … … 115 112 x[column] = inputData[row, column]; 116 113 } 117 // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-sa ve!114 // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-safe! 118 115 lock (mlpLocker) { 119 116 alglib.mlpprocess(multiLayerPerceptron, x, ref y); … … 156 153 return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData)); 157 154 } 158 159 #region persistence160 [Storable]161 private double[,] MultiLayerPerceptronChunks {162 get {163 return multiLayerPerceptron.innerobj.chunks;164 }165 set {166 multiLayerPerceptron.innerobj.chunks = value;167 }168 }169 [Storable]170 private double[] MultiLayerPerceptronColumnMeans {171 get {172 return multiLayerPerceptron.innerobj.columnmeans;173 }174 set {175 multiLayerPerceptron.innerobj.columnmeans = value;176 }177 }178 [Storable]179 private double[] MultiLayerPerceptronColumnSigmas {180 get {181 return multiLayerPerceptron.innerobj.columnsigmas;182 }183 set {184 multiLayerPerceptron.innerobj.columnsigmas = value;185 }186 }187 [Storable]188 private double[] MultiLayerPerceptronDError {189 get {190 return multiLayerPerceptron.innerobj.derror;191 }192 set {193 multiLayerPerceptron.innerobj.derror = value;194 }195 }196 [Storable]197 private double[] MultiLayerPerceptronDfdnet {198 get {199 return multiLayerPerceptron.innerobj.dfdnet;200 }201 set {202 multiLayerPerceptron.innerobj.dfdnet = value;203 }204 }205 [Storable]206 private double[] MultiLayerPerceptronNeurons {207 get {208 return multiLayerPerceptron.innerobj.neurons;209 }210 set {211 multiLayerPerceptron.innerobj.neurons = value;212 }213 }214 [Storable]215 private double[] MultiLayerPerceptronNwbuf {216 get {217 return multiLayerPerceptron.innerobj.nwbuf;218 }219 set {220 multiLayerPerceptron.innerobj.nwbuf = value;221 }222 }223 [Storable]224 private int[] MultiLayerPerceptronStuctinfo {225 get {226 return multiLayerPerceptron.innerobj.structinfo;227 }228 set {229 multiLayerPerceptron.innerobj.structinfo = value;230 }231 }232 [Storable]233 private double[] MultiLayerPerceptronWeights {234 get {235 return multiLayerPerceptron.innerobj.weights;236 }237 set {238 multiLayerPerceptron.innerobj.weights = value;239 }240 }241 [Storable]242 private double[] MultiLayerPerceptronX {243 get {244 return multiLayerPerceptron.innerobj.x;245 }246 set {247 multiLayerPerceptron.innerobj.x = value;248 }249 }250 [Storable]251 private double[] MultiLayerPerceptronY {252 get {253 return multiLayerPerceptron.innerobj.y;254 }255 set {256 multiLayerPerceptron.innerobj.y = value;257 }258 }259 #endregion260 155 } 261 156 } -
trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModelAlglib_3_7.cs
r17926 r17931 20 20 #endregion 21 21 22 extern alias alglib_3_7; 22 23 using System; 23 24 using System.Collections.Generic; … … 34 35 [StorableType("AEB9B960-FCA6-4A6D-BD5F-27BCE9CC5BEA")] 35 36 [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")] 36 public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel { 37 [Obsolete("This version uses alglib version 3.7. Use NeuralNetworkModel instead.")] 38 public sealed class NeuralNetworkModelAlglib_3_7 : ClassificationModel, INeuralNetworkModel { 37 39 38 40 private object mlpLocker = new object(); 39 private alglib .multilayerperceptron multiLayerPerceptron;41 private alglib_3_7.alglib.multilayerperceptron multiLayerPerceptron; 40 42 41 43 public override IEnumerable<string> VariablesUsedForPrediction { … … 48 50 private double[] classValues; 49 51 [StorableConstructor] 50 private NeuralNetworkModel (StorableConstructorFlag _) : base(_) {51 multiLayerPerceptron = new alglib .multilayerperceptron();52 } 53 private NeuralNetworkModel (NeuralNetworkModeloriginal, Cloner cloner)52 private NeuralNetworkModelAlglib_3_7(StorableConstructorFlag _) : base(_) { 53 multiLayerPerceptron = new alglib_3_7.alglib.multilayerperceptron(); 54 } 55 private NeuralNetworkModelAlglib_3_7(NeuralNetworkModelAlglib_3_7 original, Cloner cloner) 54 56 : base(original, cloner) { 55 multiLayerPerceptron = new alglib .multilayerperceptron();57 multiLayerPerceptron = new alglib_3_7.alglib.multilayerperceptron(); 56 58 multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone(); 57 59 multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone(); … … 69 71 this.classValues = (double[])original.classValues.Clone(); 70 72 } 71 public NeuralNetworkModel (alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)73 public NeuralNetworkModelAlglib_3_7(alglib_3_7.alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null) 72 74 : base(targetVariable) { 73 75 this.name = ItemName; … … 80 82 81 83 public override IDeepCloneable Clone(Cloner cloner) { 82 return new NeuralNetworkModel (this, cloner);84 return new NeuralNetworkModelAlglib_3_7(this, cloner); 83 85 } 84 86 … … 97 99 // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save! 98 100 lock (mlpLocker) { 99 alglib .mlpprocess(multiLayerPerceptron, x, ref y);101 alglib_3_7.alglib.mlpprocess(multiLayerPerceptron, x, ref y); 100 102 } 101 103 yield return y[0]; … … 117 119 // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save! 118 120 lock (mlpLocker) { 119 alglib .mlpprocess(multiLayerPerceptron, x, ref y);121 alglib_3_7.alglib.mlpprocess(multiLayerPerceptron, x, ref y); 120 122 } 121 123 // find class for with the largest probability value -
trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs
r17180 r17931 186 186 IEnumerable<int> rows = problemData.TrainingIndices; 187 187 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows); 188 int nRows = inputMatrix.GetLength(0); 188 189 if (inputMatrix.ContainsNanOrInfinity()) 189 190 throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset."); … … 191 192 alglib.multilayerperceptron multiLayerPerceptron = null; 192 193 if (nLayers == 0) { 193 alglib.mlpcreate0(allowedInputVariables.Count(), 1, out multiLayerPerceptron);194 alglib.mlpcreate0(allowedInputVariables.Count(), nout: 1, out multiLayerPerceptron); 194 195 } else if (nLayers == 1) { 195 alglib.mlpcreate1(allowedInputVariables.Count(), nHiddenNodes1, 1, out multiLayerPerceptron);196 alglib.mlpcreate1(allowedInputVariables.Count(), nHiddenNodes1, nout: 1, out multiLayerPerceptron); 196 197 } else if (nLayers == 2) { 197 alglib.mlpcreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, out multiLayerPerceptron);198 alglib.mlpcreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nout: 1, out multiLayerPerceptron); 198 199 } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers"); 199 alglib.mlpreport rep;200 int nRows = inputMatrix.GetLength(0);201 200 202 201 int info; 203 202 // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary 204 alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);203 alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out _); 205 204 if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution"); 206 205
Note: See TracChangeset
for help on using the changeset viewer.