#region License Information
/* HeuristicLab
* Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using HeuristicLab.Algorithms.DataAnalysis;
using HeuristicLab.Algorithms.EGO;
using HeuristicLab.Algorithms.SAPBA.Operators;
using HeuristicLab.Analysis;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Encodings.RealVectorEncoding;
using HeuristicLab.Optimization;
using HeuristicLab.Parameters;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
using HeuristicLab.Problems.DataAnalysis;
namespace HeuristicLab.Algorithms.SAPBA {
[StorableClass]
public class LamarckianStrategy : InfillStrategy {
#region Parameternames
public const string NoTrainingPointsParameterName = "Number of Trainingpoints";
public const string LocalInfillCriterionParameterName = "LocalInfillCriterion";
public const string OptimizationAlgorithmParameterName = "Optimization Algorithm";
public const string RegressionAlgorithmParameterName = "Regression Algorithm";
#endregion
#region Parameters
public IFixedValueParameter NoTrainingPointsParameter => Parameters[NoTrainingPointsParameterName] as IFixedValueParameter;
public IValueParameter OptimizationAlgorithmParameter => Parameters[OptimizationAlgorithmParameterName] as IValueParameter;
public IValueParameter> RegressionAlgorithmParameter => Parameters[RegressionAlgorithmParameterName] as IValueParameter>;
public IConstrainedValueParameter LocalInfillCriterionParameter => Parameters[LocalInfillCriterionParameterName] as IConstrainedValueParameter;
#endregion
#region Properties
public IntValue NoTrainingPoints => NoTrainingPointsParameter.Value;
public IAlgorithm OptimizationAlgorithm => OptimizationAlgorithmParameter.Value;
public IDataAnalysisAlgorithm RegressionAlgorithm => RegressionAlgorithmParameter.Value;
public IInfillCriterion LocalInfillCriterion => LocalInfillCriterionParameter.Value;
#endregion
#region Constructors
[StorableConstructor]
protected LamarckianStrategy(bool deserializing) : base(deserializing) { }
[StorableHook(HookType.AfterDeserialization)]
private void AfterDeserialization() {
RegisterParameterEvents();
}
protected LamarckianStrategy(LamarckianStrategy original, Cloner cloner) : base(original, cloner) {
RegisterParameterEvents();
}
public LamarckianStrategy() {
var localCritera = new ItemSet { new ExpectedQuality(), new ExpectedImprovement(), new AugmentedExpectedImprovement(), new ExpectedQuantileImprovement(), new MinimalQuantileCriterium(), new PluginExpectedImprovement() };
var osEs = new OffspringSelectionEvolutionStrategy.OffspringSelectionEvolutionStrategy {
Problem = new InfillProblem(),
ComparisonFactor = { Value = 1.0 },
MaximumGenerations = { Value = 1000 },
MaximumEvaluatedSolutions = { Value = 100000 },
PlusSelection = { Value = true },
PopulationSize = { Value = 1 }
};
osEs.MutatorParameter.Value = osEs.MutatorParameter.ValidValues.OfType().First();
Parameters.Add(new FixedValueParameter(NoTrainingPointsParameterName, "The number of sample points used to create a local model", new IntValue(50)));
Parameters.Add(new ConstrainedValueParameter(LocalInfillCriterionParameterName, "The infill criterion used to cheaply evaluate points.", localCritera, localCritera.First()));
Parameters.Add(new ValueParameter(OptimizationAlgorithmParameterName, "The algorithm used to solve the expected improvement subproblem", osEs));
Parameters.Add(new ValueParameter>(RegressionAlgorithmParameterName, "The model used to approximate the problem", new GaussianProcessRegression { Problem = new RegressionProblem() }));
RegisterParameterEvents();
}
public override IDeepCloneable Clone(Cloner cloner) {
return new LamarckianStrategy(this, cloner);
}
#endregion
//Short lived stores for analysis
private readonly List LamarckValues = new List();
private readonly List SampleValues = new List();
protected override void Initialize() {
base.Initialize();
var infillProblem = OptimizationAlgorithm.Problem as InfillProblem;
if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
infillProblem.InfillCriterion = LocalInfillCriterion;
}
protected override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, ResultCollection globalResults, IRandom random) {
base.Analyze(individuals, qualities, results, globalResults, random);
const string plotName = "Lamarck Comparison";
const string lamarckRow = "Lamarck Values";
const string samplesRow = "Original Values";
if (!globalResults.ContainsKey(plotName))
globalResults.Add(new Result(plotName, new DataTable(plotName)));
var plot = (DataTable)globalResults[plotName].Value;
if (!plot.Rows.ContainsKey(lamarckRow)) plot.Rows.Add(new DataRow(lamarckRow));
if (!plot.Rows.ContainsKey(samplesRow)) plot.Rows.Add(new DataRow(samplesRow));
plot.Rows[lamarckRow].Values.AddRange(LamarckValues);
plot.Rows[samplesRow].Values.AddRange(SampleValues);
LamarckValues.Clear();
SampleValues.Clear();
//analyze Hypervolumes
const string volPlotName = "Hypervolumes Comparison";
const string mainRowName = "Population Volume (log)";
const string subspaceRowName = "Subspace Volume (log) for Lamarck Candidate ";
if (!globalResults.ContainsKey(volPlotName))
globalResults.Add(new Result(volPlotName, new DataTable(volPlotName)));
plot = (DataTable)globalResults[volPlotName].Value;
if (!plot.Rows.ContainsKey(mainRowName)) plot.Rows.Add(new DataRow(mainRowName));
var v = Math.Log(GetStableVolume(GetBoundingBox(individuals.Select(x => x.RealVector()))));
plot.Rows[mainRowName].Values.Add(v);
var indis = individuals
.Zip(qualities, (individual, d) => new Tuple(individual, d))
.OrderBy(t => SapbaAlgorithm.Problem.Maximization ? -t.Item2 : t.Item2)
.Take(NoIndividuals.Value)
.Select(t => t.Item1).ToArray();
for (var i = 0; i < indis.Length; i++) {
var samples = GetNearestSamples(NoTrainingPoints.Value, indis[i].RealVector());
var d = Math.Log(GetStableVolume(GetBoundingBox(samples.Select(x => x.Item1))));
if (!plot.Rows.ContainsKey(subspaceRowName + i)) plot.Rows.Add(new DataRow(subspaceRowName + i));
plot.Rows[subspaceRowName + i].Values.Add(d);
}
}
protected override void ProcessPopulation(Individual[] individuals, double[] qualities, IRandom random) {
if (RegressionSolution == null) return;
if (Generations < NoGenerations.Value) Generations++;
else {
//Select best Individuals
var indis = individuals
.Zip(qualities, (individual, d) => new Tuple(individual, d))
.OrderBy(t => Problem.Maximization ? -t.Item2 : t.Item2)
.Take(NoIndividuals.Value)
.Select(t => t.Item1).ToArray();
//Evaluate individuals
foreach (var individual in indis)
SampleValues.Add(EvaluateSample(individual.RealVector(), random).Item2);
//Perform memetic replacement for all points
for (var i = 0; i < indis.Length; i++) {
var vector = indis[i].RealVector();
var altVector = OptimizeInfillProblem(vector, random);
LamarckValues.Add(EvaluateSample(altVector, random).Item2);
if (LamarckValues[i] < SampleValues[i] == Problem.Maximization) continue;
for (var j = 0; j < vector.Length; j++) vector[j] = altVector[j];
}
BuildRegressionSolution(random);
Generations = 0;
}
}
#region Events
private void RegisterParameterEvents() {
OptimizationAlgorithmParameter.ValueChanged += OnInfillAlgorithmChanged;
OptimizationAlgorithm.ProblemChanged += OnInfillProblemChanged;
LocalInfillCriterionParameter.ValueChanged += OnInfillCriterionChanged;
}
private void OnInfillCriterionChanged(object sender, EventArgs e) {
((InfillProblem)OptimizationAlgorithm.Problem).InfillCriterion = LocalInfillCriterion;
}
private void OnInfillAlgorithmChanged(object sender, EventArgs e) {
OptimizationAlgorithm.Problem = new InfillProblem { InfillCriterion = LocalInfillCriterion };
OptimizationAlgorithm.ProblemChanged -= OnInfillProblemChanged; //avoid double attaching
OptimizationAlgorithm.ProblemChanged += OnInfillProblemChanged;
}
private void OnInfillProblemChanged(object sender, EventArgs e) {
OptimizationAlgorithm.ProblemChanged -= OnInfillProblemChanged;
OptimizationAlgorithm.Problem = new InfillProblem { InfillCriterion = LocalInfillCriterion };
OptimizationAlgorithm.ProblemChanged += OnInfillProblemChanged;
}
#endregion
#region helpers
private RealVector OptimizeInfillProblem(RealVector point, IRandom random) {
var infillProblem = OptimizationAlgorithm.Problem as InfillProblem;
if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
if (infillProblem.InfillCriterion != LocalInfillCriterion) throw new ArgumentException("InfillCiriterion for Problem is not correctly set.");
var points = Math.Min(NoTrainingPoints.Value, Samples.Count);
var samples = GetNearestSamples(points, point);
var regression = SapbaUtilities.BuildModel(samples, RegressionAlgorithm, random);
var box = GetBoundingBox(samples.Select(x => x.Item1));
infillProblem.Encoding.Length = ((RealVectorEncoding)Problem.Encoding).Length;
infillProblem.Encoding.Bounds = box;
infillProblem.Encoding.SolutionCreator = new FixedRealVectorCreator(point);
infillProblem.Initialize(regression, Problem.Maximization);
var res = SapbaUtilities.SyncRunSubAlgorithm(OptimizationAlgorithm, random.Next(int.MaxValue));
if (!res.ContainsKey(InfillProblem.BestInfillSolutionResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best solution");
var v = res[InfillProblem.BestInfillSolutionResultName].Value as RealVector;
if (v == null) throw new ArgumentException("The InfillOptimizationAlgorithm did not return the expected result types");
if (!InBounds(v, box)) throw new ArgumentException("Vector not in bounds");
OptimizationAlgorithm.Runs.Clear();
return v;
}
private Tuple[] GetNearestSamples(int noSamples, RealVector point) {
return Samples.Select(sample => Tuple.Create(SquaredEuclidean(sample.Item1, point), sample)).OrderBy(x => x.Item1).Take(noSamples).Select(x => x.Item2).ToArray();
}
private static DoubleMatrix GetBoundingBox(IEnumerable samples) {
DoubleMatrix m = null;
foreach (var sample in samples)
if (m == null) {
m = new DoubleMatrix(sample.Length, 2);
for (var i = 0; i < sample.Length; i++) m[i, 0] = m[i, 1] = sample[i];
} else
for (var i = 0; i < sample.Length; i++) {
m[i, 0] = Math.Min(m[i, 0], sample[i]);
m[i, 1] = Math.Max(m[i, 1], sample[i]);
}
return m;
}
//the volume of a bounded-box whith slightly increased dimensions (Volume can never reach 0)
private static double GetStableVolume(DoubleMatrix bounds) {
var res = 1.0;
for (var i = 0; i < bounds.Rows; i++) res *= bounds[i, 1] - bounds[i, 0] + 0.1;
return res;
}
private static bool InBounds(RealVector r, DoubleMatrix bounds) {
return !r.Where((t, i) => t < bounds[i, 0] || t > bounds[i, 1]).Any();
}
private static double SquaredEuclidean(RealVector a, RealVector b) {
return a.Select((t, i) => t - b[i]).Sum(d => d * d);
}
#endregion
}
}