Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
11/19/21 16:07:45 (3 years ago)
Author:
mkommend
Message:

#2521: Merged trunk changes into branch.

Location:
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork
Files:
3 edited
1 copied

Legend:

Unmodified
Added
Removed
  • branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs

    r17226 r18086  
    209209        alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, out multiLayerPerceptron);
    210210      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
     211
    211212      alglib.mlpreport rep;
    212 
    213213      int info;
    214214      // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
  • branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs

    r17226 r18086  
    2020#endregion
    2121
     22extern alias alglib_3_7;
    2223using System;
    2324using System.Collections.Generic;
     
    3233  /// Represents a neural network model for regression and classification
    3334  /// </summary>
    34   [StorableType("AEB9B960-FCA6-4A6D-BD5F-27BCE9CC5BEA")]
     35  [StorableType("DABDBD64-E93B-4F50-A343-C8A92C1C48A4")]
    3536  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
    3637  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
    3738
    3839    private object mlpLocker = new object();
     40
     41
     42
    3943    private alglib.multilayerperceptron multiLayerPerceptron;
     44    [Storable]
     45    private string SerializedMultiLayerPerceptron {
     46      get { alglib.mlpserialize(multiLayerPerceptron, out var ser); return ser; }
     47      set { if (value != null) alglib.mlpunserialize(value, out multiLayerPerceptron); }
     48    }
    4049
    4150    public override IEnumerable<string> VariablesUsedForPrediction {
     
    4857    private double[] classValues;
    4958    [StorableConstructor]
    50     private NeuralNetworkModel(StorableConstructorFlag _) : base(_) {
    51       multiLayerPerceptron = new alglib.multilayerperceptron();
    52     }
     59    private NeuralNetworkModel(StorableConstructorFlag _) : base(_) { }
    5360    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
    5461      : base(original, cloner) {
    55       multiLayerPerceptron = new alglib.multilayerperceptron();
    56       multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
    57       multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
    58       multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
    59       multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
    60       multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
    61       multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
    62       multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
    63       multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
    64       multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
    65       multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
    66       multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
     62      if (original.multiLayerPerceptron != null)
     63        multiLayerPerceptron = (alglib.multilayerperceptron)original.multiLayerPerceptron.make_copy();
    6764      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
    6865      if (original.classValues != null)
     
    7370      this.name = ItemName;
    7471      this.description = ItemDescription;
    75       this.multiLayerPerceptron = multiLayerPerceptron;
     72      this.multiLayerPerceptron = (alglib.multilayerperceptron)multiLayerPerceptron.make_copy();
    7673      this.allowedInputVariables = allowedInputVariables.ToArray();
    7774      if (classValues != null)
     
    9592          x[column] = inputData[row, column];
    9693        }
    97         // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
     94        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-safe!
    9895        lock (mlpLocker) {
    9996          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     
    115112          x[column] = inputData[row, column];
    116113        }
    117         // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
     114        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-safe!
    118115        lock (mlpLocker) {
    119116          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     
    156153      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
    157154    }
    158 
    159     #region persistence
    160     [Storable]
    161     private double[,] MultiLayerPerceptronChunks {
    162       get {
    163         return multiLayerPerceptron.innerobj.chunks;
    164       }
    165       set {
    166         multiLayerPerceptron.innerobj.chunks = value;
    167       }
    168     }
    169     [Storable]
    170     private double[] MultiLayerPerceptronColumnMeans {
    171       get {
    172         return multiLayerPerceptron.innerobj.columnmeans;
    173       }
    174       set {
    175         multiLayerPerceptron.innerobj.columnmeans = value;
    176       }
    177     }
    178     [Storable]
    179     private double[] MultiLayerPerceptronColumnSigmas {
    180       get {
    181         return multiLayerPerceptron.innerobj.columnsigmas;
    182       }
    183       set {
    184         multiLayerPerceptron.innerobj.columnsigmas = value;
    185       }
    186     }
    187     [Storable]
    188     private double[] MultiLayerPerceptronDError {
    189       get {
    190         return multiLayerPerceptron.innerobj.derror;
    191       }
    192       set {
    193         multiLayerPerceptron.innerobj.derror = value;
    194       }
    195     }
    196     [Storable]
    197     private double[] MultiLayerPerceptronDfdnet {
    198       get {
    199         return multiLayerPerceptron.innerobj.dfdnet;
    200       }
    201       set {
    202         multiLayerPerceptron.innerobj.dfdnet = value;
    203       }
    204     }
    205     [Storable]
    206     private double[] MultiLayerPerceptronNeurons {
    207       get {
    208         return multiLayerPerceptron.innerobj.neurons;
    209       }
    210       set {
    211         multiLayerPerceptron.innerobj.neurons = value;
    212       }
    213     }
    214     [Storable]
    215     private double[] MultiLayerPerceptronNwbuf {
    216       get {
    217         return multiLayerPerceptron.innerobj.nwbuf;
    218       }
    219       set {
    220         multiLayerPerceptron.innerobj.nwbuf = value;
    221       }
    222     }
    223     [Storable]
    224     private int[] MultiLayerPerceptronStuctinfo {
    225       get {
    226         return multiLayerPerceptron.innerobj.structinfo;
    227       }
    228       set {
    229         multiLayerPerceptron.innerobj.structinfo = value;
    230       }
    231     }
    232     [Storable]
    233     private double[] MultiLayerPerceptronWeights {
    234       get {
    235         return multiLayerPerceptron.innerobj.weights;
    236       }
    237       set {
    238         multiLayerPerceptron.innerobj.weights = value;
    239       }
    240     }
    241     [Storable]
    242     private double[] MultiLayerPerceptronX {
    243       get {
    244         return multiLayerPerceptron.innerobj.x;
    245       }
    246       set {
    247         multiLayerPerceptron.innerobj.x = value;
    248       }
    249     }
    250     [Storable]
    251     private double[] MultiLayerPerceptronY {
    252       get {
    253         return multiLayerPerceptron.innerobj.y;
    254       }
    255       set {
    256         multiLayerPerceptron.innerobj.y = value;
    257       }
    258     }
    259     #endregion
    260155  }
    261156}
  • branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs

    r17226 r18086  
    186186      IEnumerable<int> rows = problemData.TrainingIndices;
    187187      double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);
     188      int nRows = inputMatrix.GetLength(0);
    188189      if (inputMatrix.ContainsNanOrInfinity())
    189190        throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset.");
     
    197198        alglib.mlpcreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, out multiLayerPerceptron);
    198199      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
    199       alglib.mlpreport rep;
    200       int nRows = inputMatrix.GetLength(0);
    201200
    202201      int info;
    203202      // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
    204       alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);
     203      alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out _);
    205204      if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution");
    206205
Note: See TracChangeset for help on using the changeset viewer.