#region License Information
/* HeuristicLab
* Copyright (C) 2002-2017 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System.Linq;
using System.Threading;
using HeuristicLab.Algorithms.DataAnalysis;
using HeuristicLab.Algorithms.OffspringSelectionGeneticAlgorithm;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Core.Networks;
using HeuristicLab.Data;
using HeuristicLab.Encodings.BinaryVectorEncoding;
using HeuristicLab.Optimization;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
namespace HeuristicLab.Networks.IntegratedOptimization.MachineLearning {
[Item("Feature Selection Network 2", "")]
[Creatable("Optimization Networks")]
[StorableClass]
public sealed class FeatureSelectionNetwork : Network {
[StorableConstructor]
private FeatureSelectionNetwork(bool deserializing) : base(deserializing) { }
private FeatureSelectionNetwork(FeatureSelectionNetwork original, Cloner cloner)
: base(original, cloner) {
}
public override IDeepCloneable Clone(Cloner cloner) {
return new FeatureSelectionNetwork(this, cloner);
}
[Storable]
private readonly OrchestratedAlgorithmNode FeatureSelectionAlgorithmNode;
[Storable]
private readonly OrchestratedAlgorithmNode RegressionAlgorithmNode;
[Storable]
private readonly FeatureSelectionOrchestrator Orchestrator;
public FeatureSelectionNetwork()
: base() {
Orchestrator = new FeatureSelectionOrchestrator(this);
Nodes.Add(Orchestrator);
var featureSelectionAlgorithm = CreateFeatureSelectionAlgorithm();
//TODO configure FeatureSelectionProblem
FeatureSelectionAlgorithmNode = new OrchestratedAlgorithmNode("Feature Selection");
FeatureSelectionAlgorithmNode.Algorithm = featureSelectionAlgorithm;
FeatureSelectionAlgorithmNode.EvaluationPort.ConnectedPort = Orchestrator.FeatureSelectionEvaluationPort;
FeatureSelectionAlgorithmNode.EvaluationPort.CloneParametersFromPort(Orchestrator.FeatureSelectionEvaluationPort);
Nodes.Add(FeatureSelectionAlgorithmNode);
var regressionAlgorithm = CreateRegressionAlgorithm();
RegressionAlgorithmNode = new OrchestratedAlgorithmNode("Regression");
RegressionAlgorithmNode.Algorithm = regressionAlgorithm;
Orchestrator.RegressionOrchestrationPort.ConnectedPort = RegressionAlgorithmNode.OrchestrationPort;
RegressionAlgorithmNode.OrchestrationPort.CloneParametersFromPort(Orchestrator.RegressionOrchestrationPort);
Nodes.Add(RegressionAlgorithmNode);
}
private IAlgorithm CreateFeatureSelectionAlgorithm() {
var problem = new OrchestratedBinaryProblem(Orchestrator, 0, false);
problem.Encoding.SolutionCreator = new RandomBinaryVectorCreator() { TrueProbability = new DoubleValue(0.2) };
var osga = new OffspringSelectionGeneticAlgorithm();
osga.Problem = problem;
osga.PopulationSize.Value = 100;
osga.ComparisonFactorLowerBound.Value = 1;
osga.ComparisonFactorUpperBound.Value = 1;
osga.SuccessRatio.Value = 1.0;
osga.MutationProbability.Value = 0.15;
osga.Mutator = osga.MutatorParameter.ValidValues.OfType().First();
return osga;
}
private static IAlgorithm CreateRegressionAlgorithm() {
var linreg = new LinearRegression();
return linreg;
}
public void Prepare() { Prepare(false); }
public void Prepare(bool clearRuns) {
var msg = FeatureSelectionAlgorithmNode.OrchestrationPort.PrepareMessage();
if (clearRuns)
msg["OrchestrationMessage"] = new EnumValue(OrchestrationMessage.Prepare | OrchestrationMessage.ClearRuns);
else
msg["OrchestrationMessage"] = new EnumValue(OrchestrationMessage.Prepare);
FeatureSelectionAlgorithmNode.OrchestrationPort.ReceiveMessage(msg, new CancellationToken());
}
public void Start() {
var problem = (OrchestratedBinaryProblem)FeatureSelectionAlgorithmNode.Algorithm.Problem;
problem.Encoding.Length = Orchestrator.RegressionProblemData.AllowedInputVariables.Count();
var msg = FeatureSelectionAlgorithmNode.OrchestrationPort.PrepareMessage();
msg["OrchestrationMessage"] = new EnumValue(OrchestrationMessage.Start);
FeatureSelectionAlgorithmNode.OrchestrationPort.ReceiveMessage(msg, new CancellationToken());
}
public void Pause() {
var msg = FeatureSelectionAlgorithmNode.OrchestrationPort.PrepareMessage();
msg["OrchestrationMessage"] = new EnumValue(OrchestrationMessage.Pause);
FeatureSelectionAlgorithmNode.OrchestrationPort.ReceiveMessage(msg, new CancellationToken());
}
public void Stop() {
var msg = FeatureSelectionAlgorithmNode.OrchestrationPort.PrepareMessage();
msg["OrchestrationMessage"] = new EnumValue(OrchestrationMessage.Stop);
FeatureSelectionAlgorithmNode.OrchestrationPort.ReceiveMessage(msg, new CancellationToken());
RegressionAlgorithmNode.Algorithm.Runs.Clear();
}
}
}