Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
04/09/21 19:41:33 (3 years ago)
Author:
gkronber
Message:

#3117: update alglib to version 3.17

Location:
trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork
Files:
3 edited
1 copied

Legend:

Unmodified
Added
Removed
  • trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs

    r17180 r17931  
    209209        alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, out multiLayerPerceptron);
    210210      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
     211
    211212      alglib.mlpreport rep;
    212 
    213213      int info;
    214214      // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
  • trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs

    r17180 r17931  
    2020#endregion
    2121
     22extern alias alglib_3_7;
    2223using System;
    2324using System.Collections.Generic;
     
    3233  /// Represents a neural network model for regression and classification
    3334  /// </summary>
    34   [StorableType("AEB9B960-FCA6-4A6D-BD5F-27BCE9CC5BEA")]
     35  [StorableType("DABDBD64-E93B-4F50-A343-C8A92C1C48A4")]
    3536  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
    3637  public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
    3738
    3839    private object mlpLocker = new object();
     40
     41
     42
    3943    private alglib.multilayerperceptron multiLayerPerceptron;
     44    [Storable]
     45    private string SerializedMultiLayerPerceptron {
     46      get { alglib.mlpserialize(multiLayerPerceptron, out var ser); return ser; }
     47      set { if (value != null) alglib.mlpunserialize(value, out multiLayerPerceptron); }
     48    }
    4049
    4150    public override IEnumerable<string> VariablesUsedForPrediction {
     
    4857    private double[] classValues;
    4958    [StorableConstructor]
    50     private NeuralNetworkModel(StorableConstructorFlag _) : base(_) {
    51       multiLayerPerceptron = new alglib.multilayerperceptron();
    52     }
     59    private NeuralNetworkModel(StorableConstructorFlag _) : base(_) { }
    5360    private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
    5461      : base(original, cloner) {
    55       multiLayerPerceptron = new alglib.multilayerperceptron();
    56       multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
    57       multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
    58       multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
    59       multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
    60       multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
    61       multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
    62       multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
    63       multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
    64       multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
    65       multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
    66       multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
     62      if (original.multiLayerPerceptron != null)
     63        multiLayerPerceptron = (alglib.multilayerperceptron)original.multiLayerPerceptron.make_copy();
    6764      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
    6865      if (original.classValues != null)
     
    7370      this.name = ItemName;
    7471      this.description = ItemDescription;
    75       this.multiLayerPerceptron = multiLayerPerceptron;
     72      this.multiLayerPerceptron = (alglib.multilayerperceptron)multiLayerPerceptron.make_copy();
    7673      this.allowedInputVariables = allowedInputVariables.ToArray();
    7774      if (classValues != null)
     
    9592          x[column] = inputData[row, column];
    9693        }
    97         // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
     94        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-safe!
    9895        lock (mlpLocker) {
    9996          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     
    115112          x[column] = inputData[row, column];
    116113        }
    117         // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
     114        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-safe!
    118115        lock (mlpLocker) {
    119116          alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     
    156153      return new NeuralNetworkClassificationSolution(this, new ClassificationProblemData(problemData));
    157154    }
    158 
    159     #region persistence
    160     [Storable]
    161     private double[,] MultiLayerPerceptronChunks {
    162       get {
    163         return multiLayerPerceptron.innerobj.chunks;
    164       }
    165       set {
    166         multiLayerPerceptron.innerobj.chunks = value;
    167       }
    168     }
    169     [Storable]
    170     private double[] MultiLayerPerceptronColumnMeans {
    171       get {
    172         return multiLayerPerceptron.innerobj.columnmeans;
    173       }
    174       set {
    175         multiLayerPerceptron.innerobj.columnmeans = value;
    176       }
    177     }
    178     [Storable]
    179     private double[] MultiLayerPerceptronColumnSigmas {
    180       get {
    181         return multiLayerPerceptron.innerobj.columnsigmas;
    182       }
    183       set {
    184         multiLayerPerceptron.innerobj.columnsigmas = value;
    185       }
    186     }
    187     [Storable]
    188     private double[] MultiLayerPerceptronDError {
    189       get {
    190         return multiLayerPerceptron.innerobj.derror;
    191       }
    192       set {
    193         multiLayerPerceptron.innerobj.derror = value;
    194       }
    195     }
    196     [Storable]
    197     private double[] MultiLayerPerceptronDfdnet {
    198       get {
    199         return multiLayerPerceptron.innerobj.dfdnet;
    200       }
    201       set {
    202         multiLayerPerceptron.innerobj.dfdnet = value;
    203       }
    204     }
    205     [Storable]
    206     private double[] MultiLayerPerceptronNeurons {
    207       get {
    208         return multiLayerPerceptron.innerobj.neurons;
    209       }
    210       set {
    211         multiLayerPerceptron.innerobj.neurons = value;
    212       }
    213     }
    214     [Storable]
    215     private double[] MultiLayerPerceptronNwbuf {
    216       get {
    217         return multiLayerPerceptron.innerobj.nwbuf;
    218       }
    219       set {
    220         multiLayerPerceptron.innerobj.nwbuf = value;
    221       }
    222     }
    223     [Storable]
    224     private int[] MultiLayerPerceptronStuctinfo {
    225       get {
    226         return multiLayerPerceptron.innerobj.structinfo;
    227       }
    228       set {
    229         multiLayerPerceptron.innerobj.structinfo = value;
    230       }
    231     }
    232     [Storable]
    233     private double[] MultiLayerPerceptronWeights {
    234       get {
    235         return multiLayerPerceptron.innerobj.weights;
    236       }
    237       set {
    238         multiLayerPerceptron.innerobj.weights = value;
    239       }
    240     }
    241     [Storable]
    242     private double[] MultiLayerPerceptronX {
    243       get {
    244         return multiLayerPerceptron.innerobj.x;
    245       }
    246       set {
    247         multiLayerPerceptron.innerobj.x = value;
    248       }
    249     }
    250     [Storable]
    251     private double[] MultiLayerPerceptronY {
    252       get {
    253         return multiLayerPerceptron.innerobj.y;
    254       }
    255       set {
    256         multiLayerPerceptron.innerobj.y = value;
    257       }
    258     }
    259     #endregion
    260155  }
    261156}
  • trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModelAlglib_3_7.cs

    r17926 r17931  
    2020#endregion
    2121
     22extern alias alglib_3_7;
    2223using System;
    2324using System.Collections.Generic;
     
    3435  [StorableType("AEB9B960-FCA6-4A6D-BD5F-27BCE9CC5BEA")]
    3536  [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
    36   public sealed class NeuralNetworkModel : ClassificationModel, INeuralNetworkModel {
     37  [Obsolete("This version uses alglib version 3.7. Use NeuralNetworkModel instead.")]
     38  public sealed class NeuralNetworkModelAlglib_3_7 : ClassificationModel, INeuralNetworkModel {
    3739
    3840    private object mlpLocker = new object();
    39     private alglib.multilayerperceptron multiLayerPerceptron;
     41    private alglib_3_7.alglib.multilayerperceptron multiLayerPerceptron;
    4042
    4143    public override IEnumerable<string> VariablesUsedForPrediction {
     
    4850    private double[] classValues;
    4951    [StorableConstructor]
    50     private NeuralNetworkModel(StorableConstructorFlag _) : base(_) {
    51       multiLayerPerceptron = new alglib.multilayerperceptron();
    52     }
    53     private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
     52    private NeuralNetworkModelAlglib_3_7(StorableConstructorFlag _) : base(_) {
     53      multiLayerPerceptron = new alglib_3_7.alglib.multilayerperceptron();
     54    }
     55    private NeuralNetworkModelAlglib_3_7(NeuralNetworkModelAlglib_3_7 original, Cloner cloner)
    5456      : base(original, cloner) {
    55       multiLayerPerceptron = new alglib.multilayerperceptron();
     57      multiLayerPerceptron = new alglib_3_7.alglib.multilayerperceptron();
    5658      multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
    5759      multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
     
    6971        this.classValues = (double[])original.classValues.Clone();
    7072    }
    71     public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
     73    public NeuralNetworkModelAlglib_3_7(alglib_3_7.alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
    7274      : base(targetVariable) {
    7375      this.name = ItemName;
     
    8082
    8183    public override IDeepCloneable Clone(Cloner cloner) {
    82       return new NeuralNetworkModel(this, cloner);
     84      return new NeuralNetworkModelAlglib_3_7(this, cloner);
    8385    }
    8486
     
    9799        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
    98100        lock (mlpLocker) {
    99           alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     101          alglib_3_7.alglib.mlpprocess(multiLayerPerceptron, x, ref y);
    100102        }
    101103        yield return y[0];
     
    117119        // NOTE: mlpprocess changes data in multiLayerPerceptron and is therefore not thread-save!
    118120        lock (mlpLocker) {
    119           alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     121          alglib_3_7.alglib.mlpprocess(multiLayerPerceptron, x, ref y);
    120122        }
    121123        // find class for with the largest probability value
  • trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs

    r17180 r17931  
    186186      IEnumerable<int> rows = problemData.TrainingIndices;
    187187      double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);
     188      int nRows = inputMatrix.GetLength(0);
    188189      if (inputMatrix.ContainsNanOrInfinity())
    189190        throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset.");
     
    191192      alglib.multilayerperceptron multiLayerPerceptron = null;
    192193      if (nLayers == 0) {
    193         alglib.mlpcreate0(allowedInputVariables.Count(), 1, out multiLayerPerceptron);
     194        alglib.mlpcreate0(allowedInputVariables.Count(), nout: 1, out multiLayerPerceptron);
    194195      } else if (nLayers == 1) {
    195         alglib.mlpcreate1(allowedInputVariables.Count(), nHiddenNodes1, 1, out multiLayerPerceptron);
     196        alglib.mlpcreate1(allowedInputVariables.Count(), nHiddenNodes1, nout: 1, out multiLayerPerceptron);
    196197      } else if (nLayers == 2) {
    197         alglib.mlpcreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, out multiLayerPerceptron);
     198        alglib.mlpcreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nout: 1, out multiLayerPerceptron);
    198199      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
    199       alglib.mlpreport rep;
    200       int nRows = inputMatrix.GetLength(0);
    201200
    202201      int info;
    203202      // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
    204       alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);
     203      alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out _);
    205204      if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution");
    206205
Note: See TracChangeset for help on using the changeset viewer.