Changeset 6580


Ignore:
Timestamp:
07/21/11 13:37:43 (8 years ago)
Author:
gkronber
Message:

#1474: added implementations for regression and classification with neural network ensembles (wrappers for alglib).

Location:
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4
Files:
3 edited
8 copied

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r6579 r6580  
    112112    <Compile Include="HeuristicLabAlgorithmsDataAnalysisPlugin.cs" />
    113113    <Compile Include="FixedDataAnalysisAlgorithm.cs" />
     114    <Compile Include="Interfaces\INeuralNetworkEnsembleClassificationSolution.cs" />
     115    <Compile Include="Interfaces\INeuralNetworkEnsembleRegressionSolution.cs" />
     116    <Compile Include="Interfaces\INeuralNetworkEnsembleModel.cs" />
    114117    <Compile Include="Interfaces\INeuralNetworkClassificationSolution.cs" />
    115118    <Compile Include="Interfaces\INeuralNetworkRegressionSolution.cs" />
     
    135138    <Compile Include="Linear\MultinomialLogitClassificationSolution.cs" />
    136139    <Compile Include="Linear\MultinomialLogitModel.cs" />
     140    <Compile Include="NeuralNetwork\NeuralNetworkEnsembleClassification.cs" />
     141    <Compile Include="NeuralNetwork\NeuralNetworkEnsembleClassificationSolution.cs" />
     142    <Compile Include="NeuralNetwork\NeuralNetworkEnsembleModel.cs" />
     143    <Compile Include="NeuralNetwork\NeuralNetworkEnsembleRegressionSolution.cs" />
     144    <Compile Include="NeuralNetwork\NeuralNetworkEnsembleRegression.cs" />
    137145    <Compile Include="NeuralNetwork\NeuralNetworkClassification.cs" />
    138146    <Compile Include="NeuralNetwork\NeuralNetworkClassificationSolution.cs" />
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/INeuralNetworkEnsembleClassificationSolution.cs

    r6577 r6580  
    2626namespace HeuristicLab.Algorithms.DataAnalysis {
    2727  /// <summary>
    28   /// Interface to represent a neural network regression solution
     28  /// Interface to represent a neural network ensemble classification solution
    2929  /// </summary>
    30   public interface INeuralNetworkRegressionSolution : IRegressionSolution {
    31     new INeuralNetworkModel Model { get; }
     30  public interface INeuralNetworkEnsembleClassificationSolution : IClassificationSolution {
     31    new INeuralNetworkEnsembleModel Model { get; }
    3232  }
    3333}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/INeuralNetworkEnsembleModel.cs

    r6577 r6580  
    2727namespace HeuristicLab.Algorithms.DataAnalysis {
    2828  /// <summary>
    29   /// Interface to represent a neural network model for either regression or classification
     29  /// Interface to represent a neural network ensemble model for either regression or classification
    3030  /// </summary>
    31   public interface INeuralNetworkModel : IRegressionModel, IClassificationModel {
     31  public interface INeuralNetworkEnsembleModel : IRegressionModel, IClassificationModel {
    3232  }
    3333}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Interfaces/INeuralNetworkEnsembleRegressionSolution.cs

    r6577 r6580  
    2626namespace HeuristicLab.Algorithms.DataAnalysis {
    2727  /// <summary>
    28   /// Interface to represent a neural network regression solution
     28  /// Interface to represent a neural network ensemble regression solution
    2929  /// </summary>
    30   public interface INeuralNetworkRegressionSolution : IRegressionSolution {
    31     new INeuralNetworkModel Model { get; }
     30  public interface INeuralNetworkEnsembleRegressionSolution : IRegressionSolution {
     31    new INeuralNetworkEnsembleModel Model { get; }
    3232  }
    3333}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs

    r6579 r6580  
    3838  /// Neural network classification data analysis algorithm.
    3939  /// </summary>
    40   [Item("Neural Network Classification", "Neural network classification data analysis algorithm (wrapper for ALGLIB).")]
     40  [Item("Neural Network Classification", "Neural network classification data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/neuralnetworks.php")]
    4141  [Creatable("Data Analysis")]
    4242  [StorableClass]
     
    156156        throw new NotSupportedException("Neural network classification does not support NaN or infinity values in the input dataset.");
    157157
    158       alglib.multilayerperceptron multiLayerPerceptron = null;
    159       int numberOfClasses = problemData.ClassValues.Count();
    160       if (nLayers == 0) {
    161         alglib.mlpcreatec0(allowedInputVariables.Count(), numberOfClasses, out multiLayerPerceptron);
    162       } else if (nLayers == 1) {
    163         alglib.mlpcreatec1(allowedInputVariables.Count(), nHiddenNodes1, numberOfClasses, out multiLayerPerceptron);
    164       } else if (nLayers == 2) {
    165         alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, numberOfClasses, out multiLayerPerceptron);
    166       } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
    167       alglib.mlpreport rep;
    168158      int nRows = inputMatrix.GetLength(0);
    169159      int nFeatures = inputMatrix.GetLength(1) - 1;
     
    178168        inputMatrix[row, nFeatures] = classIndizes[inputMatrix[row, nFeatures]];
    179169      }
     170
     171      alglib.multilayerperceptron multiLayerPerceptron = null;
     172      if (nLayers == 0) {
     173        alglib.mlpcreatec0(allowedInputVariables.Count(), nClasses, out multiLayerPerceptron);
     174      } else if (nLayers == 1) {
     175        alglib.mlpcreatec1(allowedInputVariables.Count(), nHiddenNodes1, nClasses, out multiLayerPerceptron);
     176      } else if (nLayers == 2) {
     177        alglib.mlpcreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, out multiLayerPerceptron);
     178      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
     179      alglib.mlpreport rep;
    180180
    181181      int info;
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassification.cs

    r6579 r6580  
    3636namespace HeuristicLab.Algorithms.DataAnalysis {
    3737  /// <summary>
    38   /// Neural network regression data analysis algorithm.
     38  /// Neural network ensemble classification data analysis algorithm.
    3939  /// </summary>
    40   [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB).")]
     40  [Item("Neural Network Ensemble Classification", "Neural network ensemble classification data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/mlpensembles.php")]
    4141  [Creatable("Data Analysis")]
    4242  [StorableClass]
    43   public sealed class NeuralNetworkRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> {
     43  public sealed class NeuralNetworkEnsembleClassification : FixedDataAnalysisAlgorithm<IClassificationProblem> {
     44    private const string EnsembleSizeParameterName = "EnsembleSize";
    4445    private const string DecayParameterName = "Decay";
    4546    private const string HiddenLayersParameterName = "HiddenLayers";
     
    4748    private const string NodesInSecondHiddenLayerParameterName = "NodesInSecondHiddenLayer";
    4849    private const string RestartsParameterName = "Restarts";
    49     private const string NeuralNetworkRegressionModelResultName = "Neural network regression solution";
     50    private const string NeuralNetworkEnsembleClassificationModelResultName = "Neural network ensemble classification solution";
    5051
    5152    #region parameter properties
     53    public IFixedValueParameter<IntValue> EnsembleSizeParameter {
     54      get { return (IFixedValueParameter<IntValue>)Parameters[EnsembleSizeParameterName]; }
     55    }
    5256    public IFixedValueParameter<DoubleValue> DecayParameter {
    5357      get { return (IFixedValueParameter<DoubleValue>)Parameters[DecayParameterName]; }
     
    6872
    6973    #region properties
     74    public int EnsembleSize {
     75      get { return EnsembleSizeParameter.Value.Value; }
     76      set {
     77        if (value < 1) throw new ArgumentException("The number of models in the ensemble must be positive and at least one.", "EnsembleSize");
     78        EnsembleSizeParameter.Value.Value = value;
     79      }
     80    }
    7081    public double Decay {
    7182      get { return DecayParameter.Value.Value; }
     
    110121
    111122    [StorableConstructor]
    112     private NeuralNetworkRegression(bool deserializing) : base(deserializing) { }
    113     private NeuralNetworkRegression(NeuralNetworkRegression original, Cloner cloner)
     123    private NeuralNetworkEnsembleClassification(bool deserializing) : base(deserializing) { }
     124    private NeuralNetworkEnsembleClassification(NeuralNetworkEnsembleClassification original, Cloner cloner)
    114125      : base(original, cloner) {
    115126    }
    116     public NeuralNetworkRegression()
     127    public NeuralNetworkEnsembleClassification()
    117128      : base() {
    118129      var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { new IntValue(0), new IntValue(1), new IntValue(2) });
     
    121132                                      select v)
    122133                                     .Single();
    123       Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(1)));
     134      Parameters.Add(new FixedValueParameter<IntValue>(EnsembleSizeParameterName, "The number of simple neural network models in the ensemble. A good value is 10.", new IntValue(10)));
     135      Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(0.001)));
    124136      Parameters.Add(new ConstrainedValueParameter<IntValue>(HiddenLayersParameterName, "The number of hidden layers for the neural network (0, 1, or 2)", validHiddenLayerValues, selectedHiddenLayerValue));
    125       Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. This value is not used if the number of hidden layers is zero.", new IntValue(10)));
    126       Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(10)));
     137      Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. The value should be rather large (30-100 nodes) in order to make the network highly flexible and run into the early stopping criterion). This value is not used if the number of hidden layers is zero.", new IntValue(100)));
     138      Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(100)));
    127139      Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of restarts for learning.", new IntValue(2)));
    128140
    129       Problem = new RegressionProblem();
     141      Problem = new ClassificationProblem();
    130142    }
    131143    [StorableHook(HookType.AfterDeserialization)]
     
    133145
    134146    public override IDeepCloneable Clone(Cloner cloner) {
    135       return new NeuralNetworkRegression(this, cloner);
    136     }
    137 
    138     #region neural network
     147      return new NeuralNetworkEnsembleClassification(this, cloner);
     148    }
     149
     150    #region neural network ensemble
    139151    protected override void Run() {
    140       double rmsError, avgRelError;
    141       var solution = CreateNeuralNetworkRegressionSolution(Problem.ProblemData, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError);
    142       Results.Add(new Result(NeuralNetworkRegressionModelResultName, "The neural network regression solution.", solution));
    143       Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network regression solution on the training set.", new DoubleValue(rmsError)));
    144       Results.Add(new Result("Average relative error", "The average of relative errors of the neural network regression solution on the training set.", new PercentValue(avgRelError)));
    145     }
    146 
    147     public static IRegressionSolution CreateNeuralNetworkRegressionSolution(IRegressionProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,
    148       out double rmsError, out double avgRelError) {
     152      double rmsError, avgRelError, relClassError;
     153      var solution = CreateNeuralNetworkEnsembleClassificationSolution(Problem.ProblemData, EnsembleSize, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError, out relClassError);
     154      Results.Add(new Result(NeuralNetworkEnsembleClassificationModelResultName, "The neural network ensemble classification solution.", solution));
     155      Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network ensemble regression solution on the training set.", new DoubleValue(rmsError)));
     156      Results.Add(new Result("Average relative error", "The average of relative errors of the neural network ensemble regression solution on the training set.", new PercentValue(avgRelError)));
     157      Results.Add(new Result("Relative classification error", "The percentage of misclassified samples.", new PercentValue(relClassError)));
     158    }
     159
     160    public static IClassificationSolution CreateNeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, int ensembleSize, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,
     161      out double rmsError, out double avgRelError, out double relClassError) {
    149162      Dataset dataset = problemData.Dataset;
    150163      string targetVariable = problemData.TargetVariable;
     
    153166      double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    154167      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    155         throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset.");
    156 
    157       double targetMin = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Min();
    158       targetMin = targetMin - targetMin * 0.1; // -10%
    159       double targetMax = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Max();
    160       targetMax = targetMax + targetMax * 0.1; // + 10%
    161 
    162       alglib.multilayerperceptron multiLayerPerceptron = null;
     168        throw new NotSupportedException("Neural network ensemble classification does not support NaN or infinity values in the input dataset.");
     169
     170      int nRows = inputMatrix.GetLength(0);
     171      int nFeatures = inputMatrix.GetLength(1) - 1;
     172      double[] classValues = dataset.GetVariableValues(targetVariable).Distinct().OrderBy(x => x).ToArray();
     173      int nClasses = classValues.Count();
     174      // map original class values to values [0..nClasses-1]
     175      Dictionary<double, double> classIndizes = new Dictionary<double, double>();
     176      for (int i = 0; i < nClasses; i++) {
     177        classIndizes[classValues[i]] = i;
     178      }
     179      for (int row = 0; row < nRows; row++) {
     180        inputMatrix[row, nFeatures] = classIndizes[inputMatrix[row, nFeatures]];
     181      }
     182
     183      alglib.mlpensemble mlpEnsemble = null;
    163184      if (nLayers == 0) {
    164         alglib.mlpcreater0(allowedInputVariables.Count(), 1, targetMin, targetMax, out multiLayerPerceptron);
     185        alglib.mlpecreatec0(allowedInputVariables.Count(), nClasses, ensembleSize, out mlpEnsemble);
    165186      } else if (nLayers == 1) {
    166         alglib.mlpcreater1(allowedInputVariables.Count(), nHiddenNodes1, 1, targetMin, targetMax, out multiLayerPerceptron);
     187        alglib.mlpecreatec1(allowedInputVariables.Count(), nHiddenNodes1, nClasses, ensembleSize, out mlpEnsemble);
    167188      } else if (nLayers == 2) {
    168         alglib.mlpcreater2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, targetMin, targetMax, out multiLayerPerceptron);
     189        alglib.mlpecreatec2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, nClasses, ensembleSize, out mlpEnsemble);
    169190      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
    170191      alglib.mlpreport rep;
    171       int nRows = inputMatrix.GetLength(0);
    172192
    173193      int info;
    174       // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
    175       alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);
    176       if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution");
    177 
    178       rmsError = alglib.mlprmserror(multiLayerPerceptron, inputMatrix, nRows);
    179       avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows);     
    180 
    181       return new NeuralNetworkRegressionSolution(problemData, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables));
     194      alglib.mlpetraines(mlpEnsemble, inputMatrix, nRows, decay, restarts, out info, out rep);
     195      if (info != 6) throw new ArgumentException("Error in calculation of neural network ensemble regression solution");
     196
     197      rmsError = alglib.mlpermserror(mlpEnsemble, inputMatrix, nRows);
     198      avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows);
     199      relClassError = alglib.mlperelclserror(mlpEnsemble, inputMatrix, nRows);
     200
     201      return new NeuralNetworkEnsembleClassificationSolution(problemData, new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables, problemData.ClassValues.ToArray()));
    182202    }
    183203    #endregion
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassificationSolution.cs

    r6577 r6580  
    3131namespace HeuristicLab.Algorithms.DataAnalysis {
    3232  /// <summary>
    33   /// Represents a neural network solution for a regression problem which can be visualized in the GUI.
     33  /// Represents a neural network ensemble solution for a classification problem which can be visualized in the GUI.
    3434  /// </summary>
    35   [Item("NeuralNetworkRegressionSolution", "Represents a neural network solution for a regression problem which can be visualized in the GUI.")]
     35  [Item("NeuralNetworkEnsembleClassificationSolution", "Represents a neural network ensemble solution for a classification problem which can be visualized in the GUI.")]
    3636  [StorableClass]
    37   public sealed class NeuralNetworkRegressionSolution : RegressionSolution, INeuralNetworkRegressionSolution {
     37  public sealed class NeuralNetworkEnsembleClassificationSolution : ClassificationSolution, INeuralNetworkEnsembleClassificationSolution {
    3838
    39     public new INeuralNetworkModel Model {
    40       get { return (INeuralNetworkModel)base.Model; }
     39    public new INeuralNetworkEnsembleModel Model {
     40      get { return (INeuralNetworkEnsembleModel)base.Model; }
    4141      set { base.Model = value; }
    4242    }
    4343
    4444    [StorableConstructor]
    45     private NeuralNetworkRegressionSolution(bool deserializing) : base(deserializing) { }
    46     private NeuralNetworkRegressionSolution(NeuralNetworkRegressionSolution original, Cloner cloner)
     45    private NeuralNetworkEnsembleClassificationSolution(bool deserializing) : base(deserializing) { }
     46    private NeuralNetworkEnsembleClassificationSolution(NeuralNetworkEnsembleClassificationSolution original, Cloner cloner)
    4747      : base(original, cloner) {
    4848    }
    49     public NeuralNetworkRegressionSolution(IRegressionProblemData problemData, INeuralNetworkModel nnModel)
     49    public NeuralNetworkEnsembleClassificationSolution(IClassificationProblemData problemData, INeuralNetworkEnsembleModel nnModel)
    5050      : base(nnModel, problemData) {
    5151    }
    5252
    5353    public override IDeepCloneable Clone(Cloner cloner) {
    54       return new NeuralNetworkRegressionSolution(this, cloner);
     54      return new NeuralNetworkEnsembleClassificationSolution(this, cloner);
    5555    }
    5656  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs

    r6577 r6580  
    3333namespace HeuristicLab.Algorithms.DataAnalysis {
    3434  /// <summary>
    35   /// Represents a neural network model for regression and classification
     35  /// Represents a neural network ensembel model for regression and classification
    3636  /// </summary>
    3737  [StorableClass]
    38   [Item("NeuralNetworkModel", "Represents a neural network for regression and classification.")]
    39   public sealed class NeuralNetworkModel : NamedItem, INeuralNetworkModel {
    40 
    41     private alglib.multilayerperceptron multiLayerPerceptron;
    42     public alglib.multilayerperceptron MultiLayerPerceptron {
    43       get { return multiLayerPerceptron; }
    44       set {
    45         if (value != multiLayerPerceptron) {
     38  [Item("NeuralNetworkEnsembleModel", "Represents a neural network ensemble for regression and classification.")]
     39  public sealed class NeuralNetworkEnsembleModel : NamedItem, INeuralNetworkEnsembleModel {
     40
     41    private alglib.mlpensemble mlpEnsemble;
     42    public alglib.mlpensemble MultiLayerPerceptronEnsemble {
     43      get { return mlpEnsemble; }
     44      set {
     45        if (value != mlpEnsemble) {
    4646          if (value == null) throw new ArgumentNullException();
    47           multiLayerPerceptron = value;
     47          mlpEnsemble = value;
    4848          OnChanged(EventArgs.Empty);
    4949        }
     
    5858    private double[] classValues;
    5959    [StorableConstructor]
    60     private NeuralNetworkModel(bool deserializing)
     60    private NeuralNetworkEnsembleModel(bool deserializing)
    6161      : base(deserializing) {
    6262      if (deserializing)
    63         multiLayerPerceptron = new alglib.multilayerperceptron();
    64     }
    65     private NeuralNetworkModel(NeuralNetworkModel original, Cloner cloner)
     63        mlpEnsemble = new alglib.mlpensemble();
     64    }
     65    private NeuralNetworkEnsembleModel(NeuralNetworkEnsembleModel original, Cloner cloner)
    6666      : base(original, cloner) {
    67       multiLayerPerceptron = new alglib.multilayerperceptron();
    68       multiLayerPerceptron.innerobj.chunks = (double[,])original.multiLayerPerceptron.innerobj.chunks.Clone();
    69       multiLayerPerceptron.innerobj.columnmeans = (double[])original.multiLayerPerceptron.innerobj.columnmeans.Clone();
    70       multiLayerPerceptron.innerobj.columnsigmas = (double[])original.multiLayerPerceptron.innerobj.columnsigmas.Clone();
    71       multiLayerPerceptron.innerobj.derror = (double[])original.multiLayerPerceptron.innerobj.derror.Clone();
    72       multiLayerPerceptron.innerobj.dfdnet = (double[])original.multiLayerPerceptron.innerobj.dfdnet.Clone();
    73       multiLayerPerceptron.innerobj.neurons = (double[])original.multiLayerPerceptron.innerobj.neurons.Clone();
    74       multiLayerPerceptron.innerobj.nwbuf = (double[])original.multiLayerPerceptron.innerobj.nwbuf.Clone();
    75       multiLayerPerceptron.innerobj.structinfo = (int[])original.multiLayerPerceptron.innerobj.structinfo.Clone();
    76       multiLayerPerceptron.innerobj.weights = (double[])original.multiLayerPerceptron.innerobj.weights.Clone();
    77       multiLayerPerceptron.innerobj.x = (double[])original.multiLayerPerceptron.innerobj.x.Clone();
    78       multiLayerPerceptron.innerobj.y = (double[])original.multiLayerPerceptron.innerobj.y.Clone();
     67      mlpEnsemble = new alglib.mlpensemble();
     68      mlpEnsemble.innerobj.columnmeans = (double[])original.mlpEnsemble.innerobj.columnmeans.Clone();
     69      mlpEnsemble.innerobj.columnsigmas = (double[])original.mlpEnsemble.innerobj.columnsigmas.Clone();
     70      mlpEnsemble.innerobj.dfdnet = (double[])original.mlpEnsemble.innerobj.dfdnet.Clone();
     71      mlpEnsemble.innerobj.ensemblesize = original.mlpEnsemble.innerobj.ensemblesize;
     72      mlpEnsemble.innerobj.issoftmax = original.mlpEnsemble.innerobj.issoftmax;
     73      mlpEnsemble.innerobj.neurons = (double[])original.mlpEnsemble.innerobj.neurons.Clone();
     74      mlpEnsemble.innerobj.nin = original.mlpEnsemble.innerobj.nin;
     75      mlpEnsemble.innerobj.nout = original.mlpEnsemble.innerobj.nout;
     76      mlpEnsemble.innerobj.postprocessing = original.mlpEnsemble.innerobj.postprocessing;
     77      mlpEnsemble.innerobj.serializedlen = original.mlpEnsemble.innerobj.serializedlen;
     78      mlpEnsemble.innerobj.serializedmlp = (double[])original.mlpEnsemble.innerobj.serializedmlp.Clone();
     79      mlpEnsemble.innerobj.structinfo = (int[])original.mlpEnsemble.innerobj.structinfo.Clone();
     80      mlpEnsemble.innerobj.tmpmeans = (double[])original.mlpEnsemble.innerobj.tmpmeans.Clone();
     81      mlpEnsemble.innerobj.tmpsigmas = (double[])original.mlpEnsemble.innerobj.tmpsigmas.Clone();
     82      mlpEnsemble.innerobj.tmpweights = (double[])original.mlpEnsemble.innerobj.tmpweights.Clone();
     83      mlpEnsemble.innerobj.wcount = original.mlpEnsemble.innerobj.wcount;
     84      mlpEnsemble.innerobj.weights = (double[])original.mlpEnsemble.innerobj.weights.Clone();
     85      mlpEnsemble.innerobj.y = (double[])original.mlpEnsemble.innerobj.y.Clone();
    7986      targetVariable = original.targetVariable;
    8087      allowedInputVariables = (string[])original.allowedInputVariables.Clone();
     
    8289        this.classValues = (double[])original.classValues.Clone();
    8390    }
    84     public NeuralNetworkModel(alglib.multilayerperceptron multiLayerPerceptron, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
     91    public NeuralNetworkEnsembleModel(alglib.mlpensemble mlpEnsemble, string targetVariable, IEnumerable<string> allowedInputVariables, double[] classValues = null)
    8592      : base() {
    8693      this.name = ItemName;
    8794      this.description = ItemDescription;
    88       this.multiLayerPerceptron = multiLayerPerceptron;
     95      this.mlpEnsemble = mlpEnsemble;
    8996      this.targetVariable = targetVariable;
    9097      this.allowedInputVariables = allowedInputVariables.ToArray();
     
    94101
    95102    public override IDeepCloneable Clone(Cloner cloner) {
    96       return new NeuralNetworkModel(this, cloner);
     103      return new NeuralNetworkEnsembleModel(this, cloner);
    97104    }
    98105
     
    109116          x[column] = inputData[row, column];
    110117        }
    111         alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     118        alglib.mlpeprocess(mlpEnsemble, x, ref y);
    112119        yield return y[0];
    113120      }
     
    126133          x[column] = inputData[row, column];
    127134        }
    128         alglib.mlpprocess(multiLayerPerceptron, x, ref y);
     135        alglib.mlpeprocess(mlpEnsemble, x, ref y);
    129136        // find class for with the largest probability value
    130137        int maxProbClassIndex = 0;
     
    151158    #region persistence
    152159    [Storable]
    153     private double[,] MultiLayerPerceptronChunks {
    154       get {
    155         return multiLayerPerceptron.innerobj.chunks;
    156       }
    157       set {
    158         multiLayerPerceptron.innerobj.chunks = value;
    159       }
    160     }
    161     [Storable]
    162     private double[] MultiLayerPerceptronColumnMeans {
    163       get {
    164         return multiLayerPerceptron.innerobj.columnmeans;
    165       }
    166       set {
    167         multiLayerPerceptron.innerobj.columnmeans = value;
    168       }
    169     }
    170     [Storable]
    171     private double[] MultiLayerPerceptronColumnSigmas {
    172       get {
    173         return multiLayerPerceptron.innerobj.columnsigmas;
    174       }
    175       set {
    176         multiLayerPerceptron.innerobj.columnsigmas = value;
    177       }
    178     }
    179     [Storable]
    180     private double[] MultiLayerPerceptronDError {
    181       get {
    182         return multiLayerPerceptron.innerobj.derror;
    183       }
    184       set {
    185         multiLayerPerceptron.innerobj.derror = value;
    186       }
    187     }
    188     [Storable]
    189     private double[] MultiLayerPerceptronDfdnet {
    190       get {
    191         return multiLayerPerceptron.innerobj.dfdnet;
    192       }
    193       set {
    194         multiLayerPerceptron.innerobj.dfdnet = value;
    195       }
    196     }
    197     [Storable]
    198     private double[] MultiLayerPerceptronNeurons {
    199       get {
    200         return multiLayerPerceptron.innerobj.neurons;
    201       }
    202       set {
    203         multiLayerPerceptron.innerobj.neurons = value;
    204       }
    205     }
    206     [Storable]
    207     private double[] MultiLayerPerceptronNwbuf {
    208       get {
    209         return multiLayerPerceptron.innerobj.nwbuf;
    210       }
    211       set {
    212         multiLayerPerceptron.innerobj.nwbuf = value;
     160    private double[] MultiLayerPerceptronEnsembleColumnMeans {
     161      get {
     162        return mlpEnsemble.innerobj.columnmeans;
     163      }
     164      set {
     165        mlpEnsemble.innerobj.columnmeans = value;
     166      }
     167    }
     168    [Storable]
     169    private double[] MultiLayerPerceptronEnsembleColumnSigmas {
     170      get {
     171        return mlpEnsemble.innerobj.columnsigmas;
     172      }
     173      set {
     174        mlpEnsemble.innerobj.columnsigmas = value;
     175      }
     176    }
     177    [Storable]
     178    private double[] MultiLayerPerceptronEnsembleDfdnet {
     179      get {
     180        return mlpEnsemble.innerobj.dfdnet;
     181      }
     182      set {
     183        mlpEnsemble.innerobj.dfdnet = value;
     184      }
     185    }
     186    [Storable]
     187    private int MultiLayerPerceptronEnsembleSize {
     188      get {
     189        return mlpEnsemble.innerobj.ensemblesize;
     190      }
     191      set {
     192        mlpEnsemble.innerobj.ensemblesize = value;
     193      }
     194    }
     195    [Storable]
     196    private bool MultiLayerPerceptronEnsembleIsSoftMax {
     197      get {
     198        return mlpEnsemble.innerobj.issoftmax;
     199      }
     200      set {
     201        mlpEnsemble.innerobj.issoftmax = value;
     202      }
     203    }
     204    [Storable]
     205    private double[] MultiLayerPerceptronEnsembleNeurons {
     206      get {
     207        return mlpEnsemble.innerobj.neurons;
     208      }
     209      set {
     210        mlpEnsemble.innerobj.neurons = value;
     211      }
     212    }
     213    [Storable]
     214    private int MultiLayerPerceptronEnsembleNin {
     215      get {
     216        return mlpEnsemble.innerobj.nin;
     217      }
     218      set {
     219        mlpEnsemble.innerobj.nin = value;
     220      }
     221    }
     222    [Storable]
     223    private int MultiLayerPerceptronEnsembleNout {
     224      get {
     225        return mlpEnsemble.innerobj.nout;
     226      }
     227      set {
     228        mlpEnsemble.innerobj.nout = value;
     229      }
     230    }
     231    [Storable]
     232    private bool MultiLayerPerceptronEnsemblePostprocessing {
     233      get {
     234        return mlpEnsemble.innerobj.postprocessing;
     235      }
     236      set {
     237        mlpEnsemble.innerobj.postprocessing = value;
     238      }
     239    }
     240    [Storable]
     241    private int MultiLayerPerceptronEnsembleSerializedLen {
     242      get {
     243        return mlpEnsemble.innerobj.serializedlen;
     244      }
     245      set {
     246        mlpEnsemble.innerobj.serializedlen = value;
     247      }
     248    }
     249    [Storable]
     250    private double[] MultiLayerPerceptronEnsembleSerializedMlp {
     251      get {
     252        return mlpEnsemble.innerobj.serializedmlp;
     253      }
     254      set {
     255        mlpEnsemble.innerobj.serializedmlp = value;
    213256      }
    214257    }
     
    216259    private int[] MultiLayerPerceptronStuctinfo {
    217260      get {
    218         return multiLayerPerceptron.innerobj.structinfo;
    219       }
    220       set {
    221         multiLayerPerceptron.innerobj.structinfo = value;
    222       }
    223     }
     261        return mlpEnsemble.innerobj.structinfo;
     262      }
     263      set {
     264        mlpEnsemble.innerobj.structinfo = value;
     265      }
     266    }
     267    [Storable]
     268    private double[] MultiLayerPerceptronEnsembleTmpMeans {
     269      get {
     270        return mlpEnsemble.innerobj.tmpmeans;
     271      }
     272      set {
     273        mlpEnsemble.innerobj.tmpmeans = value;
     274      }
     275    }
     276    [Storable]
     277    private double[] MultiLayerPerceptronEnsembleTmpSigmas {
     278      get {
     279        return mlpEnsemble.innerobj.tmpsigmas;
     280      }
     281      set {
     282        mlpEnsemble.innerobj.tmpsigmas = value;
     283      }
     284    }
     285    [Storable]
     286    private double[] MultiLayerPerceptronEnsembleTmpWeights {
     287      get {
     288        return mlpEnsemble.innerobj.tmpweights;
     289      }
     290      set {
     291        mlpEnsemble.innerobj.tmpweights = value;
     292      }
     293    }
     294    [Storable]
     295    private int MultiLayerPerceptronEnsembleWCount {
     296      get {
     297        return mlpEnsemble.innerobj.wcount;
     298      }
     299      set {
     300        mlpEnsemble.innerobj.wcount = value;
     301      }
     302    }
     303
    224304    [Storable]
    225305    private double[] MultiLayerPerceptronWeights {
    226306      get {
    227         return multiLayerPerceptron.innerobj.weights;
    228       }
    229       set {
    230         multiLayerPerceptron.innerobj.weights = value;
    231       }
    232     }
    233     [Storable]
    234     private double[] MultiLayerPerceptronX {
    235       get {
    236         return multiLayerPerceptron.innerobj.x;
    237       }
    238       set {
    239         multiLayerPerceptron.innerobj.x = value;
     307        return mlpEnsemble.innerobj.weights;
     308      }
     309      set {
     310        mlpEnsemble.innerobj.weights = value;
    240311      }
    241312    }
     
    243314    private double[] MultiLayerPerceptronY {
    244315      get {
    245         return multiLayerPerceptron.innerobj.y;
    246       }
    247       set {
    248         multiLayerPerceptron.innerobj.y = value;
     316        return mlpEnsemble.innerobj.y;
     317      }
     318      set {
     319        mlpEnsemble.innerobj.y = value;
    249320      }
    250321    }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegression.cs

    r6579 r6580  
    3636namespace HeuristicLab.Algorithms.DataAnalysis {
    3737  /// <summary>
    38   /// Neural network regression data analysis algorithm.
     38  /// Neural network ensemble regression data analysis algorithm.
    3939  /// </summary>
    40   [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB).")]
     40  [Item("Neural Network Ensemble Regression", "Neural network ensemble regression data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/mlpensembles.php")]
    4141  [Creatable("Data Analysis")]
    4242  [StorableClass]
    43   public sealed class NeuralNetworkRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> {
     43  public sealed class NeuralNetworkEnsembleRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> {
     44    private const string EnsembleSizeParameterName = "EnsembleSize";
    4445    private const string DecayParameterName = "Decay";
    4546    private const string HiddenLayersParameterName = "HiddenLayers";
     
    4748    private const string NodesInSecondHiddenLayerParameterName = "NodesInSecondHiddenLayer";
    4849    private const string RestartsParameterName = "Restarts";
    49     private const string NeuralNetworkRegressionModelResultName = "Neural network regression solution";
     50    private const string NeuralNetworkEnsembleRegressionModelResultName = "Neural network ensemble regression solution";
    5051
    5152    #region parameter properties
     53    public IFixedValueParameter<IntValue> EnsembleSizeParameter {
     54      get { return (IFixedValueParameter<IntValue>)Parameters[EnsembleSizeParameterName]; }
     55    }
    5256    public IFixedValueParameter<DoubleValue> DecayParameter {
    5357      get { return (IFixedValueParameter<DoubleValue>)Parameters[DecayParameterName]; }
     
    6872
    6973    #region properties
     74    public int EnsembleSize {
     75      get { return EnsembleSizeParameter.Value.Value; }
     76      set {
     77        if (value < 1) throw new ArgumentException("The number of models in the ensemble must be positive and at least one.", "EnsembleSize");
     78        EnsembleSizeParameter.Value.Value = value;
     79      }
     80    }
    7081    public double Decay {
    7182      get { return DecayParameter.Value.Value; }
     
    110121
    111122    [StorableConstructor]
    112     private NeuralNetworkRegression(bool deserializing) : base(deserializing) { }
    113     private NeuralNetworkRegression(NeuralNetworkRegression original, Cloner cloner)
     123    private NeuralNetworkEnsembleRegression(bool deserializing) : base(deserializing) { }
     124    private NeuralNetworkEnsembleRegression(NeuralNetworkEnsembleRegression original, Cloner cloner)
    114125      : base(original, cloner) {
    115126    }
    116     public NeuralNetworkRegression()
     127    public NeuralNetworkEnsembleRegression()
    117128      : base() {
    118129      var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { new IntValue(0), new IntValue(1), new IntValue(2) });
     
    121132                                      select v)
    122133                                     .Single();
    123       Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(1)));
     134      Parameters.Add(new FixedValueParameter<IntValue>(EnsembleSizeParameterName, "The number of simple neural network models in the ensemble. A good value is 10.", new IntValue(10)));
     135      Parameters.Add(new FixedValueParameter<DoubleValue>(DecayParameterName, "The decay parameter for the training phase of the neural network. This parameter determines the strengh of regularization and should be set to a value between 0.001 (weak regularization) to 100 (very strong regularization). The correct value should be determined via cross-validation.", new DoubleValue(0.001)));
    124136      Parameters.Add(new ConstrainedValueParameter<IntValue>(HiddenLayersParameterName, "The number of hidden layers for the neural network (0, 1, or 2)", validHiddenLayerValues, selectedHiddenLayerValue));
    125       Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. This value is not used if the number of hidden layers is zero.", new IntValue(10)));
    126       Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(10)));
     137      Parameters.Add(new FixedValueParameter<IntValue>(NodesInFirstHiddenLayerParameterName, "The number of nodes in the first hidden layer. The value should be rather large (30-100 nodes) in order to make the network highly flexible and run into the early stopping criterion). This value is not used if the number of hidden layers is zero.", new IntValue(100)));
     138      Parameters.Add(new FixedValueParameter<IntValue>(NodesInSecondHiddenLayerParameterName, "The number of nodes in the second hidden layer. This value is not used if the number of hidden layers is zero or one.", new IntValue(100)));
    127139      Parameters.Add(new FixedValueParameter<IntValue>(RestartsParameterName, "The number of restarts for learning.", new IntValue(2)));
    128140
     
    133145
    134146    public override IDeepCloneable Clone(Cloner cloner) {
    135       return new NeuralNetworkRegression(this, cloner);
     147      return new NeuralNetworkEnsembleRegression(this, cloner);
    136148    }
    137149
    138     #region neural network
     150    #region neural network ensemble
    139151    protected override void Run() {
    140152      double rmsError, avgRelError;
    141       var solution = CreateNeuralNetworkRegressionSolution(Problem.ProblemData, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError);
    142       Results.Add(new Result(NeuralNetworkRegressionModelResultName, "The neural network regression solution.", solution));
    143       Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network regression solution on the training set.", new DoubleValue(rmsError)));
    144       Results.Add(new Result("Average relative error", "The average of relative errors of the neural network regression solution on the training set.", new PercentValue(avgRelError)));
     153      var solution = CreateNeuralNetworkEnsembleRegressionSolution(Problem.ProblemData, EnsembleSize, HiddenLayers, NodesInFirstHiddenLayer, NodesInSecondHiddenLayer, Decay, Restarts, out rmsError, out avgRelError);
     154      Results.Add(new Result(NeuralNetworkEnsembleRegressionModelResultName, "The neural network ensemble regression solution.", solution));
     155      Results.Add(new Result("Root mean square error", "The root of the mean of squared errors of the neural network ensemble regression solution on the training set.", new DoubleValue(rmsError)));
     156      Results.Add(new Result("Average relative error", "The average of relative errors of the neural network ensemble regression solution on the training set.", new PercentValue(avgRelError)));
    145157    }
    146158
    147     public static IRegressionSolution CreateNeuralNetworkRegressionSolution(IRegressionProblemData problemData, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,
     159    public static IRegressionSolution CreateNeuralNetworkEnsembleRegressionSolution(IRegressionProblemData problemData, int ensembleSize, int nLayers, int nHiddenNodes1, int nHiddenNodes2, double decay, int restarts,
    148160      out double rmsError, out double avgRelError) {
    149161      Dataset dataset = problemData.Dataset;
     
    153165      double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows);
    154166      if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x)))
    155         throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset.");
     167        throw new NotSupportedException("Neural network ensemble regression does not support NaN or infinity values in the input dataset.");
    156168
    157       double targetMin = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Min();
    158       targetMin = targetMin - targetMin * 0.1; // -10%
    159       double targetMax = problemData.Dataset.GetEnumeratedVariableValues(targetVariable).Max();
    160       targetMax = targetMax + targetMax * 0.1; // + 10%
    161 
    162       alglib.multilayerperceptron multiLayerPerceptron = null;
     169      alglib.mlpensemble mlpEnsemble = null;
    163170      if (nLayers == 0) {
    164         alglib.mlpcreater0(allowedInputVariables.Count(), 1, targetMin, targetMax, out multiLayerPerceptron);
     171        alglib.mlpecreate0(allowedInputVariables.Count(), 1, ensembleSize, out mlpEnsemble);
    165172      } else if (nLayers == 1) {
    166         alglib.mlpcreater1(allowedInputVariables.Count(), nHiddenNodes1, 1, targetMin, targetMax, out multiLayerPerceptron);
     173        alglib.mlpecreate1(allowedInputVariables.Count(), nHiddenNodes1, 1, ensembleSize, out mlpEnsemble);
    167174      } else if (nLayers == 2) {
    168         alglib.mlpcreater2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, targetMin, targetMax, out multiLayerPerceptron);
     175        alglib.mlpecreate2(allowedInputVariables.Count(), nHiddenNodes1, nHiddenNodes2, 1, ensembleSize, out mlpEnsemble);
    169176      } else throw new ArgumentException("Number of layers must be zero, one, or two.", "nLayers");
    170177      alglib.mlpreport rep;
     
    172179
    173180      int info;
    174       // using mlptrainlm instead of mlptraines or mlptrainbfgs because only one parameter is necessary
    175       alglib.mlptrainlm(multiLayerPerceptron, inputMatrix, nRows, decay, restarts, out info, out rep);
    176       if (info != 2) throw new ArgumentException("Error in calculation of neural network regression solution");
     181      alglib.mlpetraines(mlpEnsemble, inputMatrix, nRows, decay, restarts, out info, out rep);
     182      if (info != 6) throw new ArgumentException("Error in calculation of neural network ensemble regression solution");
    177183
    178       rmsError = alglib.mlprmserror(multiLayerPerceptron, inputMatrix, nRows);
    179       avgRelError = alglib.mlpavgrelerror(multiLayerPerceptron, inputMatrix, nRows);     
     184      rmsError = alglib.mlpermserror(mlpEnsemble, inputMatrix, nRows);
     185      avgRelError = alglib.mlpeavgrelerror(mlpEnsemble, inputMatrix, nRows);
    180186
    181       return new NeuralNetworkRegressionSolution(problemData, new NeuralNetworkModel(multiLayerPerceptron, targetVariable, allowedInputVariables));
     187      return new NeuralNetworkEnsembleRegressionSolution(problemData, new NeuralNetworkEnsembleModel(mlpEnsemble, targetVariable, allowedInputVariables));
    182188    }
    183189    #endregion
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegressionSolution.cs

    r6577 r6580  
    3131namespace HeuristicLab.Algorithms.DataAnalysis {
    3232  /// <summary>
    33   /// Represents a neural network solution for a regression problem which can be visualized in the GUI.
     33  /// Represents a neural network ensemble solution for a regression problem which can be visualized in the GUI.
    3434  /// </summary>
    35   [Item("NeuralNetworkRegressionSolution", "Represents a neural network solution for a regression problem which can be visualized in the GUI.")]
     35  [Item("NeuralNetworkEnsembleRegressionSolution", "Represents a neural network ensemble solution for a regression problem which can be visualized in the GUI.")]
    3636  [StorableClass]
    37   public sealed class NeuralNetworkRegressionSolution : RegressionSolution, INeuralNetworkRegressionSolution {
     37  public sealed class NeuralNetworkEnsembleRegressionSolution : RegressionSolution, INeuralNetworkEnsembleRegressionSolution {
    3838
    39     public new INeuralNetworkModel Model {
    40       get { return (INeuralNetworkModel)base.Model; }
     39    public new INeuralNetworkEnsembleModel Model {
     40      get { return (INeuralNetworkEnsembleModel)base.Model; }
    4141      set { base.Model = value; }
    4242    }
    4343
    4444    [StorableConstructor]
    45     private NeuralNetworkRegressionSolution(bool deserializing) : base(deserializing) { }
    46     private NeuralNetworkRegressionSolution(NeuralNetworkRegressionSolution original, Cloner cloner)
     45    private NeuralNetworkEnsembleRegressionSolution(bool deserializing) : base(deserializing) { }
     46    private NeuralNetworkEnsembleRegressionSolution(NeuralNetworkEnsembleRegressionSolution original, Cloner cloner)
    4747      : base(original, cloner) {
    4848    }
    49     public NeuralNetworkRegressionSolution(IRegressionProblemData problemData, INeuralNetworkModel nnModel)
     49    public NeuralNetworkEnsembleRegressionSolution(IRegressionProblemData problemData, INeuralNetworkEnsembleModel nnModel)
    5050      : base(nnModel, problemData) {
    5151    }
    5252
    5353    public override IDeepCloneable Clone(Cloner cloner) {
    54       return new NeuralNetworkRegressionSolution(this, cloner);
     54      return new NeuralNetworkEnsembleRegressionSolution(this, cloner);
    5555    }
    5656  }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs

    r6579 r6580  
    3838  /// Neural network regression data analysis algorithm.
    3939  /// </summary>
    40   [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB).")]
     40  [Item("Neural Network Regression", "Neural network regression data analysis algorithm (wrapper for ALGLIB). Further documentation: http://www.alglib.net/dataanalysis/neuralnetworks.php")]
    4141  [Creatable("Data Analysis")]
    4242  [StorableClass]
Note: See TracChangeset for help on using the changeset viewer.