#region License Information
/* HeuristicLab
* Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using HeuristicLab.Algorithms.DataAnalysis;
using HeuristicLab.Algorithms.OffspringSelectionGeneticAlgorithm;
using HeuristicLab.Analysis;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Encodings.IntegerVectorEncoding;
using HeuristicLab.Optimization;
using HeuristicLab.Parameters;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
using HeuristicLab.Problems.DataAnalysis;
using HeuristicLab.Problems.Instances;
using HeuristicLab.Random;
namespace HeuristicLab.Algorithms.EGO {
[StorableClass]
[Creatable(CreatableAttribute.Categories.Algorithms, Priority = 95)]
[Item("DiscreteEfficientGlobalOptimizationAlgorithm", "Solves a problem by sequentially learning a model, solving a subproblem on the model and evaluating the best found solution for this subproblem.")]
public class DiscreteEfficientGlobalOptimizationAlgorithm : BasicAlgorithm, ISurrogateAlgorithm {
#region Basic-Alg-Essentials
public override bool SupportsPause => true;
public override Type ProblemType => typeof(SingleObjectiveBasicProblem);
public new SingleObjectiveBasicProblem Problem {
get { return (SingleObjectiveBasicProblem)base.Problem; }
set { base.Problem = value; }
}
#endregion
#region ParameterNames
private const string GenerationSizeParameterName = "GenerationSize";
private const string InfillCriterionParameterName = "InfillCriterion";
private const string InfillOptimizationAlgorithmParameterName = "InfillOptimizationAlgorithm";
private const string InfillOptimizationRestartsParameterName = "InfillOptimizationRestarts";
private const string InitialEvaluationsParameterName = "Initial Evaluations";
private const string MaximumEvaluationsParameterName = "Maximum Evaluations";
private const string MaximumRuntimeParameterName = "Maximum Runtime";
private const string RegressionAlgorithmParameterName = "RegressionAlgorithm";
private const string SeedParameterName = "Seed";
private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
private const string MaximalDataSetSizeParameterName = "MaximalDataSetSize";
private const string RemoveDuplicatesParamterName = "RemoveDuplicates";
private const string InitialSamplesParameterName = "InitialSamplesFile";
private const string BaselineVectorParameterName = "BaselineVector";
private const string InitialSamplingPlanParamterName = "InitialSamplingPlan";
#endregion
#region ResultNames
private const string BestQualityResultName = "Best Quality";
private const string BestSolutionResultName = "Best Solution";
private const string EvaluatedSoultionsResultName = "EvaluatedSolutions";
private const string IterationsResultName = "Iterations";
private const string RegressionSolutionResultName = "Model";
private const string QualitiesChartResultName = "Qualities";
private const string BestQualitiesRowResultName = "Best Quality";
private const string CurrentQualitiesRowResultName = "Current Quality";
private const string WorstQualitiesRowResultName = "Worst Quality";
#endregion
#region ParameterProperties
public IFixedValueParameter GenerationSizeParemeter => Parameters[GenerationSizeParameterName] as IFixedValueParameter;
public IConstrainedValueParameter InfillCriterionParameter => Parameters[InfillCriterionParameterName] as IConstrainedValueParameter;
public IValueParameter InfillOptimizationAlgorithmParameter => Parameters[InfillOptimizationAlgorithmParameterName] as IValueParameter;
public IFixedValueParameter InfillOptimizationRestartsParemeter => Parameters[InfillOptimizationRestartsParameterName] as IFixedValueParameter;
public IFixedValueParameter InitialEvaluationsParameter => Parameters[InitialEvaluationsParameterName] as IFixedValueParameter;
public IFixedValueParameter MaximumEvaluationsParameter => Parameters[MaximumEvaluationsParameterName] as IFixedValueParameter;
public IFixedValueParameter MaximumRuntimeParameter => Parameters[MaximumRuntimeParameterName] as IFixedValueParameter;
public IValueParameter> RegressionAlgorithmParameter => Parameters[RegressionAlgorithmParameterName] as IValueParameter>;
public IFixedValueParameter SeedParameter => Parameters[SeedParameterName] as IFixedValueParameter;
public IFixedValueParameter SetSeedRandomlyParameter => Parameters[SetSeedRandomlyParameterName] as IFixedValueParameter;
public IFixedValueParameter MaximalDataSetSizeParameter => Parameters[MaximalDataSetSizeParameterName] as IFixedValueParameter;
public IFixedValueParameter RemoveDuplicatesParameter => Parameters[RemoveDuplicatesParamterName] as IFixedValueParameter;
public IFixedValueParameter InitialSamplesParameter => Parameters[InitialSamplesParameterName] as IFixedValueParameter;
public IValueParameter BaselineVectorParameter => Parameters[BaselineVectorParameterName] as IValueParameter;
public IConstrainedValueParameter> InitialSamplingPlanParameter => Parameters[InitialSamplingPlanParamterName] as IConstrainedValueParameter>;
#endregion
#region Properties
public int GenerationSize => GenerationSizeParemeter.Value.Value;
public IInfillCriterion InfillCriterion => InfillCriterionParameter.Value;
public Algorithm InfillOptimizationAlgorithm => InfillOptimizationAlgorithmParameter.Value;
public int InfillOptimizationRestarts => InfillOptimizationRestartsParemeter.Value.Value;
public int InitialEvaluations => InitialEvaluationsParameter.Value.Value;
public int MaximumEvaluations => MaximumEvaluationsParameter.Value.Value;
public int MaximumRuntime => MaximumRuntimeParameter.Value.Value;
public IDataAnalysisAlgorithm RegressionAlgorithm => RegressionAlgorithmParameter.Value;
public int Seed => SeedParameter.Value.Value;
public bool SetSeedRandomly => SetSeedRandomlyParameter.Value.Value;
public int MaximalDatasetSize => MaximalDataSetSizeParameter.Value.Value;
private IEnumerable> DataSamples => Samples.Count > MaximalDatasetSize && MaximalDatasetSize > 0
? Samples.Skip(Samples.Count - MaximalDatasetSize)
: Samples;
private bool RemoveDuplicates => RemoveDuplicatesParameter.Value.Value;
private IntegerVector BaselineVector => BaselineVectorParameter.Value;
private IInitialSampling InitialSamplingPlan => InitialSamplingPlanParameter.Value;
#endregion
#region StorableProperties
[Storable]
private IRandom Random = new MersenneTwister();
[Storable]
private List> Samples;
[Storable]
private List> InitialSamples;
#endregion
#region ResultsProperties
private double ResultsBestQuality {
get { return ((DoubleValue)Results[BestQualityResultName].Value).Value; }
set { ((DoubleValue)Results[BestQualityResultName].Value).Value = value; }
}
private IntegerVector ResultsBestSolution {
get { return (IntegerVector)Results[BestSolutionResultName].Value; }
set { Results[BestSolutionResultName].Value = value; }
}
private int ResultsEvaluations {
get { return ((IntValue)Results[EvaluatedSoultionsResultName].Value).Value; }
set { ((IntValue)Results[EvaluatedSoultionsResultName].Value).Value = value; }
}
private int ResultsIterations {
get { return ((IntValue)Results[IterationsResultName].Value).Value; }
set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
}
private DataTable ResultsQualities => (DataTable)Results[QualitiesChartResultName].Value;
private DataRow ResultsQualitiesBest => ResultsQualities.Rows[BestQualitiesRowResultName];
private DataRow ResultsQualitiesWorst => ResultsQualities.Rows[WorstQualitiesRowResultName];
private DataRow ResultsQualitiesIteration => ResultsQualities.Rows[CurrentQualitiesRowResultName];
private IRegressionSolution ResultsModel {
get { return (IRegressionSolution)Results[RegressionSolutionResultName].Value; }
set { Results[RegressionSolutionResultName].Value = value; }
}
#endregion
#region HLConstructors
[StorableConstructor]
protected DiscreteEfficientGlobalOptimizationAlgorithm(bool deserializing) : base(deserializing) { }
[StorableHook(HookType.AfterDeserialization)]
protected void AfterDeseialization() {
RegisterEventhandlers();
}
protected DiscreteEfficientGlobalOptimizationAlgorithm(DiscreteEfficientGlobalOptimizationAlgorithm original, Cloner cloner) : base(original, cloner) {
Random = cloner.Clone(Random);
if (original.Samples != null) Samples = original.Samples.Select(x => new Tuple(cloner.Clone(x.Item1), x.Item2)).ToList();
if (original.InitialSamples != null) InitialSamples = original.InitialSamples.Select(x => new Tuple(cloner.Clone(x.Item1), x.Item2)).ToList();
RegisterEventhandlers();
}
public override IDeepCloneable Clone(Cloner cloner) { return new DiscreteEfficientGlobalOptimizationAlgorithm(this, cloner); }
public DiscreteEfficientGlobalOptimizationAlgorithm() {
IProblemInstanceExporter dummy = new RegressionProblem(); //this variable is irrelevant
//the dummy variable enforces a using-Statement for HeuristicLab.Problems.Instances
//"new ValueParameter>" requires no using-Statement, but nontheless it requires HeuristicLab.Problems.Instances to be referenced
//Having HeuristicLab.Problems.Instances referenced but not used, causes the Essential-Unit-tests to fail.
var cmaes = new OffspringSelectionGeneticAlgorithm.OffspringSelectionGeneticAlgorithm() {
MaximumGenerations = new IntValue(300),
PopulationSize = new IntValue(50)
};
var model = new GaussianProcessRegression {
Problem = new RegressionProblem()
};
model.CovarianceFunctionParameter.Value = new CovarianceRationalQuadraticIso();
Parameters.Add(new FixedValueParameter(MaximumEvaluationsParameterName, "", new IntValue(int.MaxValue)));
Parameters.Add(new FixedValueParameter(InitialEvaluationsParameterName, "", new IntValue(10)));
Parameters.Add(new FixedValueParameter(MaximumRuntimeParameterName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(-1)));
Parameters.Add(new FixedValueParameter(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
Parameters.Add(new FixedValueParameter(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
Parameters.Add(new ValueParameter>(RegressionAlgorithmParameterName, "The model used to approximate the problem", model));
Parameters.Add(new ValueParameter(InfillOptimizationAlgorithmParameterName, "The algorithm used to solve the expected improvement subproblem", cmaes));
Parameters.Add(new FixedValueParameter(InfillOptimizationRestartsParameterName, "Number of restarts of the SubAlgortihm to avoid local optima", new IntValue(1)));
Parameters.Add(new FixedValueParameter(GenerationSizeParameterName, "Number points that are sampled every iteration (stadard EGO: 1)", new IntValue(1)));
Parameters.Add(new FixedValueParameter(MaximalDataSetSizeParameterName, "The maximum number of sample points used to generate the model. Set 0 or less to use always all samples ", new IntValue(-1)));
Parameters.Add(new FixedValueParameter(RemoveDuplicatesParamterName, "Wether duplicate samples should be replaced by a single sample with an averaged quality. This GREATLY decreases the chance of ill conditioned models (unbuildable models) but is not theoretically sound as the model ignores the increasing certainty in this region"));
Parameters.Add(new FixedValueParameter(InitialSamplesParameterName, "The file specifying some initial samples used to jump start the algorithm. These samples are not counted as evaluations. If InitialEvaluations is more than the samples specified in the file, the rest is uniformly random generated and evaluated.", new FileValue()));
Parameters.Add(new ValueParameter(BaselineVectorParameterName, "A vector used to create a baseline, this vector is evaluated once and is not part of the modeling process (has no influence on algorithm performance)"));
var eqi = new ExpectedQuantileImprovement();
eqi.MaxEvaluationsParameter.Value = MaximumEvaluationsParameter.Value;
var criteria = new ItemSet { new ExpectedImprovement(), new AugmentedExpectedImprovement(), new ExpectedQuality(), eqi, new MinimalQuantileCriterium(), new PluginExpectedImprovement() };
Parameters.Add(new ConstrainedValueParameter(InfillCriterionParameterName, "Decision what value should decide the next sample", criteria, criteria.First()));
var intialSamplingPlans = new ItemSet> { new UniformRandomDiscreteSampling() };
Parameters.Add(new ConstrainedValueParameter>(InitialSamplingPlanParamterName, "Determies the initial samples from which the first model can be built.", intialSamplingPlans, intialSamplingPlans.First()));
SetInfillProblem();
RegisterEventhandlers();
}
#endregion
public void SetInitialSamples(IntegerVector[] individuals, double[] qualities) {
InitialSamples = individuals.Zip(qualities, (individual, d) => new Tuple(individual, d)).ToList();
}
protected override void Initialize(CancellationToken cancellationToken) {
base.Initialize(cancellationToken);
//encoding
var enc = Problem.Encoding as IntegerVectorEncoding;
if (enc == null) throw new ArgumentException("The EGO algorithm can only be applied to IntegerVectorEncodings");
var infillProblem = InfillOptimizationAlgorithm.Problem as DiscreteInfillProblem;
if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no DiscreteInfillProblem. Troubles with Eventhandling?");
//random
if (SetSeedRandomly) SeedParameter.Value.Value = new System.Random().Next();
Random.Reset(Seed);
Samples = InitialSamples?.ToList() ?? new List>();
//results
Results.Add(new Result(IterationsResultName, new IntValue(0)));
Results.Add(new Result(EvaluatedSoultionsResultName, new IntValue(Samples.Count)));
Results.Add(new Result(BestSolutionResultName, new IntegerVector(1)));
Results.Add(new Result(BestQualityResultName, new DoubleValue(Problem.Maximization ? double.MinValue : double.MaxValue)));
Results.Add(new Result(RegressionSolutionResultName, typeof(IRegressionSolution)));
var table = new DataTable(QualitiesChartResultName);
table.Rows.Add(new DataRow(BestQualitiesRowResultName));
table.Rows.Add(new DataRow(WorstQualitiesRowResultName));
table.Rows.Add(new DataRow(CurrentQualitiesRowResultName));
Results.Add(new Result(QualitiesChartResultName, table));
if (BaselineVector != null && BaselineVector.Length == enc.Length) Results.Add(new Result("BaselineValue", new DoubleValue(Evaluate(BaselineVector).Item2)));
}
protected override void Run(CancellationToken cancellationToken) {
//initial samples
if (Samples.Count < InitialEvaluations) {
var points = InitialSamplingPlan.GetSamples(InitialEvaluations - Samples.Count, Samples.Select(x => x.Item1).ToArray(), (IntegerVectorEncoding)Problem.Encoding, Random);
foreach (var t in points) {
try {
Samples.Add(Evaluate(t));
cancellationToken.ThrowIfCancellationRequested();
}
finally {
Analyze();
}
}
}
//adaptive samples
for (ResultsIterations = 0; ResultsEvaluations < MaximumEvaluations; ResultsIterations++) {
try {
ResultsModel = BuildModel(cancellationToken);
if (ResultsModel == null) break;
cancellationToken.ThrowIfCancellationRequested();
for (var i = 0; i < GenerationSize; i++) {
var samplepoint = OptimizeInfillProblem(cancellationToken);
if (RemoveDuplicates) {
}
var sample = Evaluate(samplepoint);
Samples.Add(sample);
cancellationToken.ThrowIfCancellationRequested();
}
}
finally {
Analyze();
}
}
}
#region Eventhandling
private void RegisterEventhandlers() {
DeregisterEventhandlers();
RegressionAlgorithmParameter.ValueChanged += OnModelAlgorithmChanged;
InfillOptimizationAlgorithmParameter.ValueChanged += OnInfillOptimizationAlgorithmChanged;
InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
InfillCriterionParameter.ValueChanged += InfillCriterionChanged;
InitialSamplesParameter.ToStringChanged += OnInitialSamplesChanged;
}
private void DeregisterEventhandlers() {
RegressionAlgorithmParameter.ValueChanged -= OnModelAlgorithmChanged;
InfillOptimizationAlgorithmParameter.ValueChanged -= OnInfillOptimizationAlgorithmChanged;
InfillOptimizationAlgorithm.ProblemChanged -= InfillOptimizationProblemChanged;
InfillCriterionParameter.ValueChanged -= InfillCriterionChanged;
InitialSamplesParameter.ToStringChanged -= OnInitialSamplesChanged;
}
private void OnInfillOptimizationAlgorithmChanged(object sender, EventArgs args) {
SetInfillProblem();
InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
}
private void InfillOptimizationProblemChanged(object sender, EventArgs e) {
InfillOptimizationAlgorithm.ProblemChanged -= InfillOptimizationProblemChanged;
SetInfillProblem();
InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
}
private void InfillCriterionChanged(object sender, EventArgs e) {
var infillProblem = InfillOptimizationAlgorithm.Problem as DiscreteInfillProblem;
if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no DiscreteInfillProblem. Troubles with Eventhandling?");
infillProblem.InfillCriterion = InfillCriterion;
}
private void OnModelAlgorithmChanged(object sender, EventArgs args) {
RegressionAlgorithm.Problem = new RegressionProblem();
}
private void OnInitialSamplesChanged(object sender, EventArgs args) { }
protected override void OnExecutionTimeChanged() {
base.OnExecutionTimeChanged();
if (CancellationTokenSource == null) return;
if (MaximumRuntime == -1) return;
if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
}
public override void Pause() {
if (InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Started || InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Paused) InfillOptimizationAlgorithm.Stop();
if (RegressionAlgorithm.ExecutionState == ExecutionState.Started || RegressionAlgorithm.ExecutionState == ExecutionState.Paused) RegressionAlgorithm.Stop();
base.Pause();
}
public override void Stop() {
if (InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Started || InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Paused) InfillOptimizationAlgorithm.Stop();
if (RegressionAlgorithm.ExecutionState == ExecutionState.Started || RegressionAlgorithm.ExecutionState == ExecutionState.Paused) RegressionAlgorithm.Stop();
base.Stop();
}
#endregion
#region helpers
private IRegressionSolution BuildModel(CancellationToken cancellationToken) {
var dataset = EgoUtilities.GetDataSet(DataSamples.ToList());
var problemdata = new RegressionProblemData(dataset, dataset.VariableNames.Where(x => !x.Equals("output")), "output");
problemdata.TrainingPartition.Start = 0;
problemdata.TrainingPartition.End = dataset.Rows;
problemdata.TestPartition.Start = dataset.Rows;
problemdata.TestPartition.End = dataset.Rows;
//train
var problem = (RegressionProblem)RegressionAlgorithm.Problem;
problem.ProblemDataParameter.Value = problemdata;
var i = 0;
IRegressionSolution solution = null;
while (solution == null && i++ < 100) {
var results = EgoUtilities.SyncRunSubAlgorithm(RegressionAlgorithm, Random.Next(int.MaxValue), cancellationToken);
solution = results.Select(x => x.Value).OfType().SingleOrDefault();
cancellationToken.ThrowIfCancellationRequested();
}
//try creating a model with old hyperparameters and new dataset;
var gp = RegressionAlgorithm as GaussianProcessRegression;
var oldmodel = ResultsModel as GaussianProcessRegressionSolution;
if (gp != null && oldmodel != null) {
var n = Samples.First().Item1.Length;
var mean = (IMeanFunction)oldmodel.Model.MeanFunction.Clone();
var cov = (ICovarianceFunction)oldmodel.Model.CovarianceFunction.Clone();
if (mean.GetNumberOfParameters(n) != 0 || cov.GetNumberOfParameters(n) != 0) throw new ArgumentException("DEBUG: assumption about fixed paramters wrong");
var noise = 0.0;
double[] hyp = { noise };
try {
var model = new GaussianProcessModel(problemdata.Dataset, problemdata.TargetVariable,
problemdata.AllowedInputVariables, problemdata.TrainingIndices, hyp, mean, cov);
model.FixParameters();
var sol = new GaussianProcessRegressionSolution(model, problemdata);
if (solution == null || solution.TrainingMeanSquaredError > sol.TrainingMeanSquaredError) {
solution = sol;
}
}
catch (ArgumentException) { }
}
if (!ResultsQualities.Rows.ContainsKey("DEBUG: Degenerates")) ResultsQualities.Rows.Add(new DataRow("DEBUG: Degenerates"));
var row = ResultsQualities.Rows["DEBUG: Degenerates"];
row.Values.Add(i - 1);
if (solution == null) Results.Add(new Result("Status", new StringValue("The Algorithm did not return a Model")));
else {
if (!ResultsQualities.Rows.ContainsKey("DEBUG: RMSE")) ResultsQualities.Rows.Add(new DataRow("DEBUG: RMSE"));
row = ResultsQualities.Rows["DEBUG: RMSE"];
row.Values.Add(Math.Sqrt(solution.TrainingMeanSquaredError));
}
RegressionAlgorithm.Runs.Clear();
return solution;
}
private IntegerVector OptimizeInfillProblem(CancellationToken cancellationToken) {
//parameterize and check InfillProblem
var infillProblem = InfillOptimizationAlgorithm.Problem as DiscreteInfillProblem;
if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
if (infillProblem.InfillCriterion != InfillCriterion) throw new ArgumentException("InfillCiriterion for Problem is not correctly set.");
var enc = Problem.Encoding as IntegerVectorEncoding;
infillProblem.Encoding.Bounds = enc.Bounds;
infillProblem.Encoding.Length = enc.Length;
infillProblem.Initialize(ResultsModel, Problem.Maximization);
IntegerVector bestVector = null;
var bestValue = infillProblem.Maximization ? double.NegativeInfinity : double.PositiveInfinity;
for (var i = 0; i < InfillOptimizationRestarts; i++) {
//optimize
var res = EgoUtilities.SyncRunSubAlgorithm(InfillOptimizationAlgorithm, Random.Next(int.MaxValue), cancellationToken);
cancellationToken.ThrowIfCancellationRequested();
//extract results
if (!res.ContainsKey(DiscreteInfillProblem.BestInfillSolutionResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best solution");
var v = res[DiscreteInfillProblem.BestInfillSolutionResultName].Value as IntegerVector;
if (!res.ContainsKey(DiscreteInfillProblem.BestInfillQualityResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best quality");
var d = res[DiscreteInfillProblem.BestInfillQualityResultName].Value as DoubleValue;
if (d == null || v == null) throw new ArgumentException("The InfillOptimizationAlgorithm did not return the expected result types");
//check for improvement
if (infillProblem.Maximization != d.Value > bestValue) continue;
bestValue = d.Value;
bestVector = v;
}
InfillOptimizationAlgorithm.Runs.Clear();
return bestVector;
}
private void Analyze() {
ResultsEvaluations = Samples.Count;
var max = Samples.ArgMax(x => x.Item2);
var min = Samples.ArgMin(x => x.Item2);
var best = Samples[Problem.Maximization ? max : min];
ResultsBestQuality = best.Item2;
ResultsBestSolution = best.Item1;
ResultsQualitiesBest.Values.Add(ResultsBestQuality);
ResultsQualitiesIteration.Values.Add(Samples[Samples.Count - 1].Item2);
ResultsQualitiesWorst.Values.Add(Samples[Problem.Maximization ? min : max].Item2);
Problem.Analyze(Samples.Select(x => GetIndividual(x.Item1)).ToArray(), Samples.Select(x => x.Item2).ToArray(), Results, Random);
if (Samples.Count != 0 && Samples[0].Item1.Length == 2) AnalyzeSampleDistribution();
AnalyzePredictionCorrelation();
}
private void AnalyzeSampleDistribution() {
const string plotname = "DEBUG:Sample Distribution";
const string rowInit = "Initial Samples";
const string rowAll = "All Samples";
if (!Results.ContainsKey(plotname)) Results.Add(new Result(plotname, new ScatterPlot()));
var plot = (ScatterPlot)Results[plotname].Value;
if (!plot.Rows.ContainsKey(rowInit) && InitialSamples != null && InitialSamples.Count > 0)
plot.Rows.Add(new ScatterPlotDataRow(rowInit, "samples from inital file (already evaulated)", InitialSamples.Select(x => new Point2D(x.Item1[0], x.Item1[1]))));
if (!plot.Rows.ContainsKey(rowAll)) plot.Rows.Add(new ScatterPlotDataRow(rowAll, "All samples", new Point2D[0]));
else { plot.Rows[rowAll].Points.Clear(); }
plot.Rows[rowAll].Points.AddRange(Samples.Select(x => new Point2D(x.Item1[0], x.Item1[1])));
}
private void AnalyzePredictionCorrelation() {
const string plotName = "Prediction";
const string rowName = "Samples";
const string lastrowName = "Last Sample";
if (!Results.ContainsKey(plotName)) Results.Add(new Result(plotName, new ScatterPlot()));
var plot = (ScatterPlot)Results[plotName].Value;
if (!plot.Rows.ContainsKey(rowName)) plot.Rows.Add(new ScatterPlotDataRow(rowName, rowName, new List>()));
if (!plot.Rows.ContainsKey(lastrowName)) plot.Rows.Add(new ScatterPlotDataRow(lastrowName, lastrowName, new List>()));
var p = Samples[Samples.Count - 1];
if (ResultsModel != null) plot.Rows[rowName].Points.Add(new Point2D(ResultsModel.Model.GetEstimation(p.Item1), p.Item2, p.Item1));
plot.VisualProperties.YAxisTitle = "True Objective Value";
plot.VisualProperties.XAxisTitle = "Predicted Objective Value";
}
private Individual GetIndividual(IntegerVector r) {
var scope = new Scope();
scope.Variables.Add(new Variable(Problem.Encoding.Name, r));
return new SingleEncodingIndividual(Problem.Encoding, scope);
}
private Tuple Evaluate(IntegerVector point) {
return new Tuple(point, Problem.Evaluate(GetIndividual(point), Random));
}
private void SetInfillProblem() {
InfillOptimizationAlgorithm.Problem = new DiscreteInfillProblem { InfillCriterion = InfillCriterion };
}
#endregion
}
}