Changeset 15064


Ignore:
Timestamp:
06/26/17 09:10:56 (4 years ago)
Author:
bwerth
Message:

#2745 implemented EGO as EngineAlgorithm + some simplifications in the IInfillCriterion interface

Location:
branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO
Files:
14 added
1 deleted
14 edited

Legend:

Unmodified
Added
Removed
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/EfficientGlobalOptimizationAlgorithm.cs

    r14833 r15064  
    2424using System.Linq;
    2525using System.Threading;
    26 using System.Windows.Forms;
    2726using HeuristicLab.Algorithms.DataAnalysis;
    2827using HeuristicLab.Analysis;
     
    3534using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3635using HeuristicLab.Problems.DataAnalysis;
    37 using HeuristicLab.Problems.Instances.DataAnalysis;
    38 using HeuristicLab.Problems.Instances.DataAnalysis.Views;
     36using HeuristicLab.Problems.Instances;
    3937using HeuristicLab.Random;
    4038
     
    4240  [StorableClass]
    4341  [Creatable(CreatableAttribute.Categories.Algorithms, Priority = 95)]
    44   [Item("EfficientGlobalOptimizationAlgortihm", "Solves a problem by sequentially learning a model, solving a subproblem on the model and evaluating the best found solution for this subproblem.")]
     42  [Item("EfficientGlobalOptimizationAlgorithm", "Solves a problem by sequentially learning a model, solving a subproblem on the model and evaluating the best found solution for this subproblem.")]
    4543  public class EfficientGlobalOptimizationAlgorithm : BasicAlgorithm, ISurrogateAlgorithm<RealVector> {
    4644    #region Basic-Alg-Essentials
     
    8482    #endregion
    8583
    86     #region TransmissionResultNames
    87     public const string BestInfillSolutionResultName = "BestInfillSolution";
    88     public const string BestInfillQualityResultName = "BestInfillQuality";
    89     #endregion
    90 
    9184    #region ParameterProperties
    9285    public IFixedValueParameter<IntValue> GenerationSizeParemeter => Parameters[GenerationSizeParameterName] as IFixedValueParameter<IntValue>;
     
    108101
    109102    #region Properties
    110 
    111103    public int GenerationSize => GenerationSizeParemeter.Value.Value;
    112104    public IInfillCriterion InfillCriterion => InfillCriterionParameter.Value;
     
    123115      ? Samples.Skip(Samples.Count - MaximalDatasetSize)
    124116      : Samples;
    125 
    126117    private bool RemoveDuplicates => RemoveDuplicatesParameter.Value.Value;
    127118    private RealVector BaselineVector => BaselineVectorParameter.Value;
     
    161152    private DataTable ResultsQualities => (DataTable)Results[QualitiesChartResultName].Value;
    162153    private DataRow ResultsQualitiesBest => ResultsQualities.Rows[BestQualitiesRowResultName];
    163 
    164154    private DataRow ResultsQualitiesWorst => ResultsQualities.Rows[WorstQualitiesRowResultName];
    165 
    166155    private DataRow ResultsQualitiesIteration => ResultsQualities.Rows[CurrentQualitiesRowResultName];
    167 
    168156    private IRegressionSolution ResultsModel
    169157    {
     
    177165    protected EfficientGlobalOptimizationAlgorithm(bool deserializing) : base(deserializing) { }
    178166    [StorableHook(HookType.AfterDeserialization)]
    179     private void AfterDeseialization() {
     167    protected void AfterDeseialization() {
    180168      RegisterEventhandlers();
    181169    }
    182     protected EfficientGlobalOptimizationAlgorithm(EfficientGlobalOptimizationAlgorithm original, Cloner cloner)
    183       : base(original, cloner) {
     170    protected EfficientGlobalOptimizationAlgorithm(EfficientGlobalOptimizationAlgorithm original, Cloner cloner) : base(original, cloner) {
    184171      Random = cloner.Clone(Random);
    185172      if (original.Samples != null) Samples = original.Samples.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
    186       if (original.InitialSamples != null) Samples = original.InitialSamples.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
     173      if (original.InitialSamples != null) InitialSamples = original.InitialSamples.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
    187174      RegisterEventhandlers();
    188175    }
    189176    public override IDeepCloneable Clone(Cloner cloner) { return new EfficientGlobalOptimizationAlgorithm(this, cloner); }
    190177    public EfficientGlobalOptimizationAlgorithm() {
     178      IProblemInstanceExporter dummy = new RegressionProblem(); //this variable is irrelevant
     179      //the dummy variable enforces a using-Statement for HeuristicLab.Problems.Instances
     180      //"new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>" requires no using using-Statement, but nontheless it requires HeuristicLab.Problems.Instances to be referenced 
     181      //Having HeuristicLab.Problems.Instances referenced but not used, causes the Essential-Unit-tests to fail.
     182
    191183      var cmaes = new CMAEvolutionStrategy.CMAEvolutionStrategy {
    192184        MaximumGenerations = 300,
     
    206198      Parameters.Add(new FixedValueParameter<IntValue>(InfillOptimizationRestartsParameterName, "Number of restarts of the SubAlgortihm to avoid local optima", new IntValue(1)));
    207199      Parameters.Add(new FixedValueParameter<IntValue>(GenerationSizeParameterName, "Number points that are sampled every iteration (stadard EGO: 1)", new IntValue(1)));
    208       Parameters.Add(new ConstrainedValueParameter<IInfillCriterion>(InfillCriterionParameterName, "Decision what value should decide the next sample"));
    209       InfillCriterionParameter.ValidValues.Add(new ExpectedImprovement());
    210       InfillCriterionParameter.ValidValues.Add(new AugmentedExpectedImprovement());
    211       InfillCriterionParameter.ValidValues.Add(new ExpectedQuality());
    212       var eqi = new ExpectedQuantileImprovement();
    213       InfillCriterionParameter.ValidValues.Add(eqi);
    214       eqi.MaxEvaluationsParameter.Value = MaximumEvaluationsParameter.Value;
    215       InfillCriterionParameter.ValidValues.Add(new MinimalQuantileCriterium());
    216       InfillCriterionParameter.ValidValues.Add(new RobustImprovement());
    217       InfillCriterionParameter.ValidValues.Add(new PluginExpectedImprovement());
    218200      Parameters.Add(new FixedValueParameter<IntValue>(MaximalDataSetSizeParameterName, "The maximum number of sample points used to generate the model. Set 0 or less to use always all samples ", new IntValue(-1)));
    219201      Parameters.Add(new FixedValueParameter<BoolValue>(RemoveDuplicatesParamterName, "Wether duplicate samples should be replaced by a single sample with an averaged quality. This GREATLY decreases the chance of ill conditioned models (unbuildable models) but is not theoretically sound as the model ignores the increasing certainty in this region"));
    220202      Parameters.Add(new FixedValueParameter<FileValue>(InitialSamplesParameterName, "The file specifying some initial samples used to jump start the algorithm. These samples are not counted as evaluations. If InitialEvaluations is more than the samples specified in the file, the rest is uniformly random generated and evaluated.", new FileValue()));
    221203      Parameters.Add(new ValueParameter<RealVector>(BaselineVectorParameterName, "A vector used to create a baseline, this vector is evaluated once and is not part of the modeling process (has no influence on algorithm performance)"));
    222       var intialSamplingPlans = new ItemSet<IInitialSampling> { new UniformRandomSampling(), new LatinHyperCubeDesign() };
    223       Parameters.Add(new ConstrainedValueParameter<IInitialSampling>(InitialSamplingPlanParamterName, intialSamplingPlans, intialSamplingPlans.First()));
    224 
     204      var eqi = new ExpectedQuantileImprovement();
     205      eqi.MaxEvaluationsParameter.Value = MaximumEvaluationsParameter.Value;
     206      var criteria = new ItemSet<IInfillCriterion> { new ExpectedImprovement(), new AugmentedExpectedImprovement(), new ExpectedQuality(), eqi, new MinimalQuantileCriterium(), new PluginExpectedImprovement() };
     207      Parameters.Add(new ConstrainedValueParameter<IInfillCriterion>(InfillCriterionParameterName, "Decision what value should decide the next sample", criteria, criteria.First()));
     208      var intialSamplingPlans = new ItemSet<IInitialSampling> { new UniformRandomSampling(), new LatinHyperCubeDesignCreator() };
     209      Parameters.Add(new ConstrainedValueParameter<IInitialSampling>(InitialSamplingPlanParamterName, "Determies the initial samples from which the first model can be built.", intialSamplingPlans, intialSamplingPlans.First()));
    225210      SetInfillProblem();
    226211      RegisterEventhandlers();
    227212    }
    228213    #endregion
    229 
     214    public void SetInitialSamples(RealVector[] individuals, double[] qualities) {
     215      InitialSamples = individuals.Zip(qualities, (individual, d) => new Tuple<RealVector, double>(individual, d)).ToList();
     216    }
    230217    protected override void Initialize(CancellationToken cancellationToken) {
    231218      base.Initialize(cancellationToken);
     
    235222      var infillProblem = InfillOptimizationAlgorithm.Problem as InfillProblem;
    236223      if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no InfillProblem. Troubles with Eventhandling?");
    237       infillProblem.Problem = Problem;
    238 
    239224
    240225      //random
     
    259244
    260245    }
    261 
    262246    protected override void Run(CancellationToken cancellationToken) {
    263247      //initial samples
     
    294278    }
    295279
    296     public void SetInitialSamples(RealVector[] individuals, double[] qualities) {
    297       InitialSamples = individuals.Zip(qualities, (individual, d) => new Tuple<RealVector, double>(individual, d)).ToList();
    298     }
    299 
    300280    #region Eventhandling
    301281    private void RegisterEventhandlers() {
     
    318298    private void OnInfillOptimizationAlgorithmChanged(object sender, EventArgs args) {
    319299      SetInfillProblem();
    320       InfillOptimizationAlgorithm.ProblemChanged -= InfillOptimizationProblemChanged;
    321300      InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
    322301    }
     
    334313      RegressionAlgorithm.Problem = new RegressionProblem();
    335314    }
    336     private void OnInitialSamplesChanged(object sender, EventArgs args) {
    337       IRegressionProblemData samplesData = null;
    338       using (var importTypeDialog = new RegressionImportTypeDialog()) {
    339         if (importTypeDialog.ShowDialog() != DialogResult.OK) return;
    340         samplesData = new RegressionCSVInstanceProvider().ImportData(importTypeDialog.Path, importTypeDialog.ImportType, importTypeDialog.CSVFormat);
    341         InitialSamplesParameter.ToStringChanged -= OnInitialSamplesChanged;
    342         InitialSamplesParameter.Value.Value = importTypeDialog.Path;
    343         InitialSamplesParameter.ToStringChanged -= OnInitialSamplesChanged;
    344 
    345       }
    346 
    347 
    348 
    349       var solutions = new RealVector[samplesData.Dataset.Rows];
    350       var qualities = new double[samplesData.Dataset.Rows];
    351       var inputVariables = samplesData.InputVariables.CheckedItems.ToArray();
    352       for (var i = 0; i < solutions.Length; i++) {
    353         qualities[i] = samplesData.Dataset.GetDoubleValue(samplesData.TargetVariable, i);
    354         solutions[i] = new RealVector(inputVariables.Length);
    355         for (var j = 0; j < inputVariables.Length; j++) solutions[i][j] = samplesData.Dataset.GetDoubleValue(inputVariables[j].Value.Value, i);
    356       }
    357 
    358       SetInitialSamples(solutions, qualities);
    359 
    360     }
    361 
     315    private void OnInitialSamplesChanged(object sender, EventArgs args) { }
    362316    protected override void OnExecutionTimeChanged() {
    363317      base.OnExecutionTimeChanged();
     
    376330      base.Stop();
    377331    }
    378     protected override void OnProblemChanged() {
    379       base.OnProblemChanged();
    380       var infillProblem = InfillOptimizationAlgorithm.Problem as InfillProblem;
    381       if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no InfillProblem. Troubles with Eventhandling?");
    382       infillProblem.Problem = Problem;
    383     }
    384332    #endregion
    385333
    386334    #region helpers
    387     private void SetInfillProblem() {
    388       var infillProblem = new InfillProblem {
    389         InfillCriterion = InfillCriterion,
    390         Problem = Problem
    391       };
    392       InfillOptimizationAlgorithm.Problem = infillProblem;
    393     }
    394335    private IRegressionSolution BuildModel(CancellationToken cancellationToken) {
    395336      var dataset = EgoUtilities.GetDataSet(DataSamples.ToList(), RemoveDuplicates);
     
    406347      IRegressionSolution solution = null;
    407348
    408       while (solution == null && i++ < 100) {  //TODO: Question: Why does GP degenerate to NaN so often? Answer: There is not even the slightest mitigation strategy for "almost duplicates" that ill-condition the covariance matrix.
     349      while (solution == null && i++ < 100) {
    409350        var results = EgoUtilities.SyncRunSubAlgorithm(RegressionAlgorithm, Random.Next(int.MaxValue));
    410351        solution = results.Select(x => x.Value).OfType<IRegressionSolution>().SingleOrDefault();
     
    451392      //parameterize and check InfillProblem
    452393      var infillProblem = InfillOptimizationAlgorithm.Problem as InfillProblem;
    453       if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have InfillProblem. Problem with Eventhandling?");
    454       if (infillProblem.InfillCriterion != InfillCriterion) throw new ArgumentException("InfillCiriterion for Problem is not correct. Problem with Eventhandling?");
    455       if (infillProblem.Problem != Problem) throw new ArgumentException("Expensive real problem is not correctly set in InfillProblem. Problem with Eventhandling?");
    456       InfillCriterion.Initialize(ResultsModel, Problem.Maximization, infillProblem.Encoding);
     394      if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
     395      if (infillProblem.InfillCriterion != InfillCriterion) throw new ArgumentException("InfillCiriterion for Problem is not correctly set.");
     396      var enc = Problem.Encoding as RealVectorEncoding;
     397      infillProblem.Encoding.Bounds = enc.Bounds;
     398      infillProblem.Encoding.Length = enc.Length;
     399      infillProblem.Initialize(ResultsModel, Problem.Maximization);
     400
     401
    457402
    458403      RealVector bestVector = null;
    459404      var bestValue = infillProblem.Maximization ? double.NegativeInfinity : double.PositiveInfinity;
    460 
    461405      for (var i = 0; i < InfillOptimizationRestarts; i++) {
    462406        //optimize
     
    464408        cancellationToken.ThrowIfCancellationRequested();
    465409        //extract results
    466         if (!res.ContainsKey(BestInfillSolutionResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best solution");
    467         var v = res[BestInfillSolutionResultName].Value as RealVector;
    468         if (!res.ContainsKey(BestInfillQualityResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best quality");
    469         var d = res[BestInfillQualityResultName].Value as DoubleValue;
     410        if (!res.ContainsKey(InfillProblem.BestInfillSolutionResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best solution");
     411        var v = res[InfillProblem.BestInfillSolutionResultName].Value as RealVector;
     412        if (!res.ContainsKey(InfillProblem.BestInfillQualityResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best quality");
     413        var d = res[InfillProblem.BestInfillQualityResultName].Value as DoubleValue;
    470414        if (d == null || v == null) throw new ArgumentException("The InfillOptimizationAlgorithm did not return the expected result types");
    471 
    472415        //check for improvement
    473416        if (infillProblem.Maximization != d.Value > bestValue) continue;
     
    475418        bestVector = v;
    476419      }
    477 
    478420      InfillOptimizationAlgorithm.Runs.Clear();
    479421      return bestVector;
    480422    }
    481     private Tuple<RealVector, double> Evaluate(RealVector point) {
    482       return new Tuple<RealVector, double>(point, Problem.Evaluate(GetIndividual(point), Random));
    483     }
     423
    484424    private void Analyze() {
    485425      ResultsEvaluations = Samples.Count;
     
    493433      ResultsQualitiesWorst.Values.Add(Samples[Problem.Maximization ? min : max].Item2);
    494434      Problem.Analyze(Samples.Select(x => GetIndividual(x.Item1)).ToArray(), Samples.Select(x => x.Item2).ToArray(), Results, Random);
    495 
    496       if (Samples.Count != 0 && Samples[0].Item1.Length == 2) {
    497         var plotname = "DEBUG:Sample Distribution";
    498         var rowInit = "Initial Samples";
    499         var rowAll = "All Samples";
    500         if (!Results.ContainsKey(plotname)) Results.Add(new Result(plotname, new ScatterPlot()));
    501         var plot = Results[plotname].Value as ScatterPlot;
    502         if (!plot.Rows.ContainsKey(rowInit) && InitialSamples != null && InitialSamples.Count > 0)
    503           plot.Rows.Add(new ScatterPlotDataRow(rowInit, "samples from inital file (already evaulated)", InitialSamples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1]))));
    504         if (!plot.Rows.ContainsKey(rowAll)) plot.Rows.Add(new ScatterPlotDataRow(rowAll, "All samples", new Point2D<double>[0]));
    505         else { plot.Rows[rowAll].Points.Clear(); }
    506         plot.Rows[rowAll].Points.AddRange(Samples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1])));
    507 
    508 
    509       }
    510     }
     435      if (Samples.Count != 0 && Samples[0].Item1.Length == 2) AnalyzeSampleDistribution();
     436      AnalyzePredictionCorrelation();
     437    }
     438
     439    private void AnalyzeSampleDistribution() {
     440      const string plotname = "DEBUG:Sample Distribution";
     441      const string rowInit = "Initial Samples";
     442      const string rowAll = "All Samples";
     443      if (!Results.ContainsKey(plotname)) Results.Add(new Result(plotname, new ScatterPlot()));
     444      var plot = (ScatterPlot)Results[plotname].Value;
     445      if (!plot.Rows.ContainsKey(rowInit) && InitialSamples != null && InitialSamples.Count > 0)
     446        plot.Rows.Add(new ScatterPlotDataRow(rowInit, "samples from inital file (already evaulated)", InitialSamples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1]))));
     447      if (!plot.Rows.ContainsKey(rowAll)) plot.Rows.Add(new ScatterPlotDataRow(rowAll, "All samples", new Point2D<double>[0]));
     448      else { plot.Rows[rowAll].Points.Clear(); }
     449      plot.Rows[rowAll].Points.AddRange(Samples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1])));
     450    }
     451
     452    private void AnalyzePredictionCorrelation() {
     453      const string plotName = "Prediction";
     454      const string rowName = "Samples";
     455      const string lastrowName = "Last Sample";
     456      if (!Results.ContainsKey(plotName)) Results.Add(new Result(plotName, new ScatterPlot()));
     457      var plot = (ScatterPlot)Results[plotName].Value;
     458      if (!plot.Rows.ContainsKey(rowName)) plot.Rows.Add(new ScatterPlotDataRow(rowName, rowName, new List<Point2D<double>>()));
     459      if (!plot.Rows.ContainsKey(lastrowName)) plot.Rows.Add(new ScatterPlotDataRow(lastrowName, lastrowName, new List<Point2D<double>>()));
     460      var p = Samples[Samples.Count - 1];
     461      if (ResultsModel != null) plot.Rows[rowName].Points.Add(new Point2D<double>(ResultsModel.Model.GetEstimation(p.Item1), p.Item2, p.Item1));
     462      plot.VisualProperties.YAxisTitle = "True Objective Value";
     463      plot.VisualProperties.XAxisTitle = "Predicted Objective Value";
     464
     465    }
     466
    511467    private Individual GetIndividual(RealVector r) {
    512468      var scope = new Scope();
     
    514470      return new SingleEncodingIndividual(Problem.Encoding, scope);
    515471    }
     472    private Tuple<RealVector, double> Evaluate(RealVector point) {
     473      return new Tuple<RealVector, double>(point, Problem.Evaluate(GetIndividual(point), Random));
     474    }
     475
     476    private void SetInfillProblem() {
     477      InfillOptimizationAlgorithm.Problem = new InfillProblem { InfillCriterion = InfillCriterion };
     478    }
    516479    #endregion
    517480  }
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/EgoUtilities.cs

    r14833 r15064  
    2525using System.Threading;
    2626using HeuristicLab.Common;
     27using HeuristicLab.Core;
    2728using HeuristicLab.Data;
    2829using HeuristicLab.Encodings.RealVectorEncoding;
     
    3233namespace HeuristicLab.Algorithms.EGO {
    3334  internal static class EgoUtilities {
     35    //Extention methods for convenience
    3436    public static int ArgMax<T>(this IEnumerable<T> values, Func<T, double> func) {
    3537      var max = double.MinValue;
     
    4951      return ArgMax(values, x => -func.Invoke(x));
    5052    }
     53    public static double GetEstimation(this IRegressionModel model, RealVector r) {
     54      var dataset = GetDataSet(new[] { new Tuple<RealVector, double>(r, 0.0) }, false);
     55      return model.GetEstimatedValues(dataset, new[] { 0 }).First();
     56    }
     57    public static double GetVariance(this IConfidenceRegressionModel model, RealVector r) {
     58      var dataset = GetDataSet(new[] { new Tuple<RealVector, double>(r, 0.0) }, false);
     59      return model.GetEstimatedVariances(dataset, new[] { 0 }).First();
     60    }
     61    public static double GetDoubleValue(this IDataset dataset, int i, int j) {
     62      return dataset.GetDoubleValue("input" + j, i);
     63    }
    5164
     65    //Sub-ALgorithms
    5266    public static ResultCollection SyncRunSubAlgorithm(IAlgorithm alg, int random) {
    5367
     
    6478      EventWaitHandle trigger = new AutoResetEvent(false);
    6579      Exception ex = null;
    66       EventHandler<EventArgs<Exception>> exhandler = (sender, e) => ex = e.Value;
     80      EventHandler<EventArgs<Exception>> exhandler = (sender, e) => { ex = e.Value; trigger.Set(); };
    6781      EventHandler stoppedHandler = (sender, e) => trigger.Set();
     82
     83      alg.ExceptionOccurred -= exhandler; //avoid double attaching in case of pause
    6884      alg.ExceptionOccurred += exhandler;
     85      alg.Stopped -= stoppedHandler;
    6986      alg.Stopped += stoppedHandler;
    70       alg.Prepare();
     87      alg.Paused -= stoppedHandler;
     88      alg.Paused += stoppedHandler;
     89
     90      if (alg.ExecutionState != ExecutionState.Paused) alg.Prepare();
    7191      alg.Start();
    7292      trigger.WaitOne();
     
    7797    }
    7898
    79     public static double GetEstimation(this IRegressionModel model, RealVector r) {
    80       var dataset = GetDataSet(new[] { new Tuple<RealVector, double>(r, 0.0) }, false);
    81       return model.GetEstimatedValues(dataset, new[] { 0 }).First();
    82     }
    83     public static double GetVariance(this IConfidenceRegressionModel model, RealVector r) {
    84       var dataset = GetDataSet(new[] { new Tuple<RealVector, double>(r, 0.0) }, false);
    85       return model.GetEstimatedVariances(dataset, new[] { 0 }).First();
    86     }
    87 
    88 
    89     public static double GetDoubleValue(this IDataset dataset, int i, int j) {
    90       return dataset.GetDoubleValue("input" + j, i);
    91     }
     99    //RegressionModel extensions
     100    public const double DuplicateResolution = 0.0001;
    92101    public static Dataset GetDataSet(IReadOnlyList<Tuple<RealVector, double>> samples, bool removeDuplicates) {
    93       if (removeDuplicates)
    94         samples = RemoveDuplicates(samples); //TODO duplicates require heteroskedasticity in Models
    95 
    96 
     102      if (removeDuplicates) samples = RemoveDuplicates(samples); //TODO duplicate removal leads to incorrect uncertainty values in models
    97103      var dimensions = samples[0].Item1.Length + 1;
    98104      var data = new double[samples.Count, dimensions];
    99105      var names = new string[dimensions - 1];
    100106      for (var i = 0; i < names.Length; i++) names[i] = "input" + i;
    101 
    102107      for (var j = 0; j < samples.Count; j++) {
    103108        for (var i = 0; i < names.Length; i++) data[j, i] = samples[j].Item1[i];
    104109        data[j, dimensions - 1] = samples[j].Item2;
    105 
    106110      }
    107 
    108 
    109111      return new Dataset(names.Concat(new[] { "output" }).ToArray(), data);
    110112    }
    111 
    112113    private static IReadOnlyList<Tuple<RealVector, double>> RemoveDuplicates(IReadOnlyList<Tuple<RealVector, double>> samples) {
    113114      var res = new List<Tuple<RealVector, double, int>>();
    114 
    115115      foreach (var sample in samples) {
    116116        if (res.Count == 0) {
     
    118118          continue;
    119119        }
    120 
    121120        var index = res.ArgMin(x => Euclidian(sample.Item1, x.Item1));
    122121        var d = Euclidian(res[index].Item1, sample.Item1);
    123         if (d > 0.0001)
     122        if (d > DuplicateResolution)
    124123          res.Add(new Tuple<RealVector, double, int>(sample.Item1, sample.Item2, 1));
    125124        else {
     
    131130      return res.Select(x => new Tuple<RealVector, double>(x.Item1, x.Item2 / x.Item3)).ToArray();
    132131    }
    133 
    134132    private static double Euclidian(IEnumerable<double> a, IEnumerable<double> b) {
    135133      return Math.Sqrt(a.Zip(b, (d, d1) => d - d1).Sum(d => d * d));
    136134    }
    137 
    138     public static DoubleMatrix GetBoundingBox(IEnumerable<RealVector> vectors) {
    139       DoubleMatrix res = null;
    140       foreach (var vector in vectors)
    141         if (res == null) {
    142           res = new DoubleMatrix(vector.Length, 2);
    143           for (var i = 0; i < vector.Length; i++)
    144             res[i, 0] = res[i, 1] = vector[i];
    145         } else
    146           for (var i = 0; i < vector.Length; i++) {
    147             res[i, 0] = Math.Min(vector[i], res[i, 0]);
    148             res[i, 1] = Math.Max(vector[i], res[i, 1]);
    149           }
    150       return res;
    151     }
    152 
    153 
    154135  }
    155136}
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/HeuristicLab.Algorithms.EGO-3.4.csproj

    r14833 r15064  
    3939      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.DataAnalysis-3.4.dll</HintPath>
    4040    </Reference>
    41     <Reference Include="HeuristicLab.Algorithms.EvolutionStrategy-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    42       <SpecificVersion>False</SpecificVersion>
    43       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Algorithms.EvolutionStrategy-3.3.dll</HintPath>
    44     </Reference>
    4541    <Reference Include="HeuristicLab.Analysis-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    4642      <SpecificVersion>False</SpecificVersion>
     
    5551      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Common-3.3.dll</HintPath>
    5652    </Reference>
    57     <Reference Include="HeuristicLab.Common.Resources-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    58       <SpecificVersion>False</SpecificVersion>
    59       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Common.Resources-3.3.dll</HintPath>
    60     </Reference>
    6153    <Reference Include="HeuristicLab.Core-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    6254      <SpecificVersion>False</SpecificVersion>
     
    6658      <SpecificVersion>False</SpecificVersion>
    6759      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Data-3.3.dll</HintPath>
     60    </Reference>
     61    <Reference Include="HeuristicLab.Encodings.PermutationEncoding-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     62      <SpecificVersion>False</SpecificVersion>
     63      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Encodings.PermutationEncoding-3.3.dll</HintPath>
    6864    </Reference>
    6965    <Reference Include="HeuristicLab.Encodings.RealVectorEncoding-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     
    7571      <SpecificVersion>False</SpecificVersion>
    7672      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Optimization-3.3.dll</HintPath>
     73    </Reference>
     74    <Reference Include="HeuristicLab.Optimization.Operators-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     75      <SpecificVersion>False</SpecificVersion>
     76      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Optimization.Operators-3.3.dll</HintPath>
    7777    </Reference>
    7878    <Reference Include="HeuristicLab.Parameters-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     
    9292      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.DataAnalysis-3.4.dll</HintPath>
    9393    </Reference>
    94     <Reference Include="HeuristicLab.Problems.Instances-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     94    <Reference Include="HeuristicLab.Problems.Instances-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec" />
     95    <Reference Include="HeuristicLab.Problems.SurrogateProblem-3.3, Version=3.3.0.0, Culture=neutral, processorArchitecture=MSIL">
    9596      <SpecificVersion>False</SpecificVersion>
    96       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances-3.3.dll</HintPath>
    97     </Reference>
    98     <Reference Include="HeuristicLab.Problems.Instances.DataAnalysis-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    99       <SpecificVersion>False</SpecificVersion>
    100       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances.DataAnalysis-3.3.dll</HintPath>
    101     </Reference>
    102     <Reference Include="HeuristicLab.Problems.Instances.DataAnalysis.Views-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
    103       <SpecificVersion>False</SpecificVersion>
    104       <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.Instances.DataAnalysis.Views-3.3.dll</HintPath>
     97      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Problems.SurrogateProblem-3.3.dll</HintPath>
    10598    </Reference>
    10699    <Reference Include="HeuristicLab.Random-3.3, Version=3.3.0.0, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">
     
    108101      <HintPath>..\..\..\trunk\sources\bin\HeuristicLab.Random-3.3.dll</HintPath>
    109102    </Reference>
     103    <Reference Include="Microsoft.Build.Tasks.v4.0" />
    110104    <Reference Include="System" />
    111105    <Reference Include="System.Core" />
    112     <Reference Include="System.Windows.Forms" />
    113     <Reference Include="System.Xml.Linq" />
    114     <Reference Include="System.Data.DataSetExtensions" />
    115106    <Reference Include="Microsoft.CSharp" />
    116     <Reference Include="System.Data" />
    117     <Reference Include="System.Net.Http" />
    118     <Reference Include="System.Xml" />
    119107  </ItemGroup>
    120108  <ItemGroup>
     
    122110    <Compile Include="EfficientGlobalOptimizationAlgorithm.cs" />
    123111    <Compile Include="InfillCriteria\AugmentedExpectedImprovement.cs" />
     112    <Compile Include="InfillCriteria\ExpectedImprovementBase.cs" />
     113    <Compile Include="InfillCriteria\NeighbourDistance.cs" />
    124114    <Compile Include="InfillCriteria\ExpectedQuantileImprovement.cs" />
    125115    <Compile Include="InfillCriteria\PluginExpectedImprovement.cs" />
    126     <Compile Include="InfillCriteria\RobustImprovement.cs" />
    127116    <Compile Include="InfillCriteria\MinimalQuantileCriterium.cs" />
    128117    <Compile Include="InfillCriteria\ExpectedQuality.cs" />
     
    132121    <Compile Include="Interfaces\ISurrogateAlgorithm.cs" />
    133122    <Compile Include="Interfaces\IInfillCriterion.cs" />
     123    <Compile Include="AdaptiveSamplingAlgorithm.cs" />
     124    <Compile Include="Operators\FitnessClusteringAnalyzer.cs" />
     125    <Compile Include="Operators\VariableVariabilityAnalyzer.cs" />
     126    <Compile Include="Operators\ModelQualityAnalyzer.cs" />
     127    <Compile Include="Operators\EvaluatedSolutionsAnalyzer.cs" />
     128    <Compile Include="Operators\ModelBuilder.cs" />
     129    <Compile Include="Operators\InfillSolver.cs" />
     130    <Compile Include="Operators\CorrelationAnalyzer.cs" />
     131    <Compile Include="Operators\SampleCollector.cs" />
    134132    <Compile Include="Plugin.cs" />
    135133    <Compile Include="Problems\InfillProblem.cs" />
    136134    <Compile Include="Properties\AssemblyInfo.cs" />
    137     <Compile Include="SamplingMethods\LatinHyperCubeDesign.cs" />
     135    <Compile Include="SamplingMethods\LatinHyperCubeDesignOLD.cs" />
     136    <Compile Include="SamplingMethods\LatinHyperCubeDesignCreator.cs" />
    138137    <Compile Include="SamplingMethods\UniformRandomSampling.cs" />
    139138  </ItemGroup>
     
    142141    <None Include="Properties\AssemblyInfo.cs.frame" />
    143142  </ItemGroup>
    144   <ItemGroup>
    145     <WCFMetadata Include="Service References\" />
    146   </ItemGroup>
     143  <ItemGroup />
    147144  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
    148145  <PropertyGroup>
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/InfillCriteria/AugmentedExpectedImprovement.cs

    r14818 r15064  
    3535  [StorableClass]
    3636  [Item("AugmentedExpectedImprovement", "Noisy InfillCriterion, Extension of the Expected Improvement as described in\n Global optimization of stochastic black-box systems via sequential kriging meta-models.\r\nHuang, D., Allen, T., Notz, W., Zeng, N.")]
    37   public class AugmentedExpectedImprovement : ExpectedImprovement {
    38 
    39 
     37  public class AugmentedExpectedImprovement : ExpectedImprovementBase {
    4038
    4139    #region Parameternames
    42 
    4340    public const string AlphaParameterName = "Alpha";
    44 
    4541    #endregion
    4642
    4743    #region Parameters
    48 
    4944    public IValueParameter<DoubleValue> AlphaParameter => Parameters[AlphaParameterName] as IValueParameter<DoubleValue>;
    50 
    5145    #endregion
    5246
    5347    #region Properties
    54 
    5548    public double Alpha => AlphaParameter.Value.Value;
    5649    [Storable]
    5750    private double Tau;
    58 
    5951    #endregion
    6052
    61 
    62     #region HL-Constructors, Serialization and Cloning
     53    #region Constructors, Serialization and Cloning
    6354    [StorableConstructor]
    64     private AugmentedExpectedImprovement(bool deserializing) : base(deserializing) { }
    65 
    66     private AugmentedExpectedImprovement(AugmentedExpectedImprovement original, Cloner cloner) : base(original, cloner) {
     55    protected AugmentedExpectedImprovement(bool deserializing) : base(deserializing) { }
     56    protected AugmentedExpectedImprovement(AugmentedExpectedImprovement original, Cloner cloner) : base(original, cloner) {
    6757      Tau = original.Tau;
    6858    }
    69 
    7059    public AugmentedExpectedImprovement() {
    7160      Parameters.Add(new ValueParameter<DoubleValue>(AlphaParameterName, "The Alpha value specifiying the robustness of the \"effective best solution\". Recommended value is 1", new DoubleValue(1.0)));
    72 
    7361    }
    7462    public override IDeepCloneable Clone(Cloner cloner) {
     
    8270    }
    8371
    84     protected override void Initialize() {
    85       if (ExpensiveMaximization) throw new NotImplementedException("AugmentedExpectedImprovement for maximization not yet implemented");
    86       var solution = RegressionSolution as IConfidenceRegressionSolution;
    87       if (solution == null) throw new ArgumentException("can not calculate Augmented EI without a regression solution providing confidence values");
     72    protected override double Evaluate(RealVector vector, double estimatedFitness, double estimatedStandardDeviation) {
     73      var d = GetEstimatedImprovement(BestFitness, estimatedFitness, estimatedStandardDeviation, ExploitationWeight, ExpensiveMaximization);
     74      return d * (1 - Tau / Math.Sqrt(estimatedStandardDeviation * estimatedStandardDeviation + Tau * Tau));
     75    }
    8876
     77    protected override double FindBestFitness(IConfidenceRegressionSolution solution) {
    8978      Tau = RegressionSolution.EstimatedTrainingValues.Zip(RegressionSolution.ProblemData.TargetVariableTrainingValues, (d, d1) => Math.Abs(d - d1)).Average();
    90       var xss = new RealVector(Encoding.Length);
     79      var bestSolution = new RealVector(Encoding.Length);
    9180      var xssIndex = solution.EstimatedTrainingValues.Zip(solution.EstimatedTrainingValues, (m, s2) => m + Alpha * Math.Sqrt(s2)).ArgMin(x => x);
    9281      var i = solution.ProblemData.TrainingIndices.ToArray()[xssIndex];
    93       for (var j = 0; j < Encoding.Length; j++) xss[j] = solution.ProblemData.Dataset.GetDoubleValue(i, j);
    94 
    95       YMin = RegressionSolution.Model.GetEstimation(xss);
     82      for (var j = 0; j < Encoding.Length; j++) bestSolution[j] = solution.ProblemData.Dataset.GetDoubleValue(i, j);
     83      return RegressionSolution.Model.GetEstimation(bestSolution);
    9684    }
    9785  }
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/InfillCriteria/ExpectedImprovement.cs

    r14818 r15064  
    2424using HeuristicLab.Common;
    2525using HeuristicLab.Core;
    26 using HeuristicLab.Data;
    2726using HeuristicLab.Encodings.RealVectorEncoding;
    28 using HeuristicLab.Parameters;
    2927using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    3028using HeuristicLab.Problems.DataAnalysis;
     
    3533  [StorableClass]
    3634  [Item("ExpectedImprovementMeassure", "Extension of the Expected Improvement to a weighted version by ANDRAS SÓBESTER , STEPHEN J. LEARY and ANDY J. KEANE   in \n On the Design of Optimization Strategies Based on Global Response Surface Approximation Models")]
    37   public class ExpectedImprovement : InfillCriterionBase {
    38 
    39     #region ParameterNames
    40     private const string ExploitationWeightParameterName = "ExploitationWeight";
    41     #endregion
    42 
    43     #region ParameterProperties
    44     public IFixedValueParameter<DoubleValue> ExploitationWeightParameter => Parameters[ExploitationWeightParameterName] as IFixedValueParameter<DoubleValue>;
    45 
    46     #endregion
    47 
    48     #region Properties
    49     protected double ExploitationWeight => ExploitationWeightParameter.Value.Value;
    50 
    51     [Storable]
    52     protected double YMin;
    53     #endregion
    54 
    55     #region HL-Constructors, Serialization and Cloning
     35  public sealed class ExpectedImprovement : ExpectedImprovementBase {
     36    #region Constructors, Serialization and Cloning
    5637    [StorableConstructor]
    57     protected ExpectedImprovement(bool deserializing) : base(deserializing) { }
    58     [StorableHook(HookType.AfterDeserialization)]
    59     private void AfterDeserialization() {
    60       RegisterEventhandlers();
    61     }
    62     protected ExpectedImprovement(ExpectedImprovement original, Cloner cloner) : base(original, cloner) {
    63       RegisterEventhandlers();
    64     }
    65     public ExpectedImprovement() {
    66       Parameters.Add(new FixedValueParameter<DoubleValue>(ExploitationWeightParameterName, "A value between 0 and 1 indicating the focus on exploration (0) or exploitation (1)", new DoubleValue(0.5)));
    67       RegisterEventhandlers();
    68     }
     38    private ExpectedImprovement(bool deserializing) : base(deserializing) { }
     39    private ExpectedImprovement(ExpectedImprovement original, Cloner cloner) : base(original, cloner) { }
     40    public ExpectedImprovement() { }
    6941    public override IDeepCloneable Clone(Cloner cloner) {
    7042      return new ExpectedImprovement(this, cloner);
     
    7648      var yhat = model.GetEstimation(vector);
    7749      var s = Math.Sqrt(model.GetVariance(vector));
    78       return GetEstimatedImprovement(YMin, yhat, s, ExploitationWeight);
     50      return GetEstimatedImprovement(BestFitness, yhat, s, ExploitationWeight, ExpensiveMaximization);
    7951    }
    8052
    81     public override bool Maximization() {
    82       return true;
     53    protected override double Evaluate(RealVector vector, double estimatedFitness, double estimatedStandardDeviation) {
     54      return GetEstimatedImprovement(BestFitness, estimatedFitness, estimatedStandardDeviation, ExploitationWeight, ExpensiveMaximization);
    8355    }
    8456
    85     protected override void Initialize() {
    86       if (ExpensiveMaximization) throw new NotImplementedException("Expected Improvement for maximization not yet implemented");
    87       var model = RegressionSolution.Model as IConfidenceRegressionModel;
    88       if (model == null) throw new ArgumentException("can not calculate EI without confidence measure");
    89       YMin = RegressionSolution.ProblemData.TargetVariableTrainingValues.Min();
     57    protected override double FindBestFitness(IConfidenceRegressionSolution solution) {
     58      return ExpensiveMaximization ? solution.ProblemData.TargetVariableTrainingValues.Max() : solution.ProblemData.TargetVariableTrainingValues.Min();
    9059    }
    91 
    92     #region Eventhandling
    93     private void RegisterEventhandlers() {
    94       DeregisterEventhandlers();
    95       ExploitationWeightParameter.Value.ValueChanged += OnExploitationWeightChanged;
    96     }
    97     private void DeregisterEventhandlers() {
    98       ExploitationWeightParameter.Value.ValueChanged -= OnExploitationWeightChanged;
    99     }
    100     private void OnExploitationWeightChanged(object sender, EventArgs e) {
    101       ExploitationWeightParameter.Value.ValueChanged -= OnExploitationWeightChanged;
    102       ExploitationWeightParameter.Value.Value = Math.Max(0, Math.Min(ExploitationWeight, 1));
    103       ExploitationWeightParameter.Value.ValueChanged += OnExploitationWeightChanged;
    104     }
    105     #endregion
    106 
    107     #region Helpers
    108     protected static double GetEstimatedImprovement(double ymin, double yhat, double s, double w) {
    109       if (Math.Abs(s) < double.Epsilon) return 0;
    110       var val = (ymin - yhat) / s;
    111       var res = w * (ymin - yhat) * StandardNormalDistribution(val) + (1 - w) * s * StandardNormalDensity(val);
    112       return double.IsInfinity(res) || double.IsNaN(res) ? 0 : res;
    113     }
    114 
    115     private static double StandardNormalDensity(double x) {
    116       if (Math.Abs(x) > 10) return 0;
    117       return Math.Exp(-0.5 * x * x) / Math.Sqrt(2 * Math.PI);
    118     }
    119 
    120     //taken from https://www.johndcook.com/blog/2009/01/19/stand-alone-error-function-erf/
    121     private static double StandardNormalDistribution(double x) {
    122       if (x > 10) return 1;
    123       if (x < -10) return 0;
    124       const double a1 = 0.254829592;
    125       const double a2 = -0.284496736;
    126       const double a3 = 1.421413741;
    127       const double a4 = -1.453152027;
    128       const double a5 = 1.061405429;
    129       const double p = 0.3275911;
    130       var sign = x < 0 ? -1 : 1;
    131       x = Math.Abs(x) / Math.Sqrt(2.0);
    132       var t = 1.0 / (1.0 + p * x);
    133       var y = 1.0 - ((((a5 * t + a4) * t + a3) * t + a2) * t + a1) * t * Math.Exp(-x * x);
    134       return 0.5 * (1.0 + sign * y);
    135     }
    136     #endregion
    13760  }
    13861}
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/InfillCriteria/ExpectedQuality.cs

    r14818 r15064  
    2929
    3030  [StorableClass]
    31   [Item("ExpectedQualityMeassure", "Use simply the qualitypredicted by the model")]
     31  [Item("ExpectedQualityMeassure", "Use the quality predicted by the model")]
    3232  public class ExpectedQuality : InfillCriterionBase {
    33     #region HL-Constructors, Serialization and Cloning
     33    #region Constructors, Serialization and Cloning
    3434    [StorableConstructor]
    35     private ExpectedQuality(bool deserializing) : base(deserializing) { }
    36     private ExpectedQuality(ExpectedQuality original, Cloner cloner) : base(original, cloner) { }
     35    protected ExpectedQuality(bool deserializing) : base(deserializing) { }
     36    protected ExpectedQuality(ExpectedQuality original, Cloner cloner) : base(original, cloner) { }
    3737    public ExpectedQuality() { }
    3838    public override IDeepCloneable Clone(Cloner cloner) {
     
    4242
    4343    public override double Evaluate(RealVector vector) {
    44       return RegressionSolution.Model.GetEstimation(vector);
     44      return ExpensiveMaximization ? RegressionSolution.Model.GetEstimation(vector) : -RegressionSolution.Model.GetEstimation(vector);
    4545    }
    4646
    47     public override bool Maximization() {
    48       return ExpensiveMaximization;
    49     }
    50 
    51     protected override void Initialize() {
     47    public override void Initialize() {
    5248    }
    5349  }
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/InfillCriteria/ExpectedQuantileImprovement.cs

    r14818 r15064  
    3535  [StorableClass]
    3636  [Item("ExpectedQuantileImprovement", "Noisy InfillCriterion, Extension of the Expected Improvement as described in \n Noisy expectedimprovement and on - line computation time allocation for the optimization of simulators with tunable fidelitys\r\nPicheny, V., Ginsbourger, D., Richet, Y")]
    37   public class ExpectedQuantileImprovement : ExpectedImprovement {
     37  public class ExpectedQuantileImprovement : ExpectedImprovementBase {
    3838
    3939    #region Parameternames
     
    4848
    4949    #region Properties
    50 
    5150    public int MaxEvaluations => MaxEvaluationsParameter.Value.Value;
    5251    public double Alpha => AlphaParameter.Value.Value;
    5352    [Storable]
    5453    private double Tau;
    55 
    5654    #endregion
    5755
    5856    #region HL-Constructors, Serialization and Cloning
    5957    [StorableConstructor]
    60     private ExpectedQuantileImprovement(bool deserializing) : base(deserializing) { }
    61 
    62     private ExpectedQuantileImprovement(ExpectedQuantileImprovement original, Cloner cloner) : base(original, cloner) {
     58    protected ExpectedQuantileImprovement(bool deserializing) : base(deserializing) { }
     59    protected ExpectedQuantileImprovement(ExpectedQuantileImprovement original, Cloner cloner) : base(original, cloner) {
    6360      Tau = original.Tau;
    6461    }
    65 
    6662    public ExpectedQuantileImprovement() {
    6763      Parameters.Add(new FixedValueParameter<DoubleValue>(AlphaParameterName, "The Alpha value specifiying the robustness of the \"effective best solution\". Recommended value is 1.0", new DoubleValue(1.0)));
    68       Parameters.Add(new ValueParameter<IntValue>(MaxEvaluationsParameterName, "The maximum number of evaluations allowed for EGO", new IntValue(100)));
     64      Parameters.Add(new ValueParameter<IntValue>(MaxEvaluationsParameterName, "The maximum number of evaluations allowed for EGO", new IntValue(500)));
    6965      MaxEvaluationsParameter.Hidden = true;
    7066    }
     
    7470    #endregion
    7571
    76     public override double Evaluate(RealVector vector) {
    77       var model = RegressionSolution.Model as IConfidenceRegressionModel;
    78       var s2 = model.GetVariance(vector);
     72    protected override double FindBestFitness(IConfidenceRegressionSolution solution) {
     73      Tau = RegressionSolution.EstimatedTrainingValues.Zip(solution.ProblemData.TargetVariableTrainingValues, (d, d1) => Math.Abs(d - d1)).Average();
     74      Tau = Tau * Tau / (MaxEvaluations - solution.ProblemData.Dataset.Rows % MaxEvaluations + 1);
    7975
    80       var yhat = model.GetEstimation(vector) + Alpha * Math.Sqrt(Tau * s2 / (Tau + s2));
    81       var s = Math.Sqrt(s2 * s2 / (Tau + s2));
     76      var index = solution.EstimatedTrainingValues.Zip(solution.EstimatedTrainingVariances, (m, s2) => m + Alpha * Math.Sqrt(s2)).ArgMin(x => x);
     77      return solution.EstimatedTrainingValues.ToArray()[index];
    8278
    83       return GetEstimatedImprovement(YMin, yhat, s, ExploitationWeight);
    8479    }
    8580
    86     protected override void Initialize() {
    87       if (ExpensiveMaximization) throw new NotImplementedException("AugmentedExpectedImprovement for maximization not yet implemented");
    88       var solution = RegressionSolution as IConfidenceRegressionSolution;
    89       if (solution == null) throw new ArgumentException("can not calculate Augmented EI without a regression solution providing confidence values");
    90 
    91       Tau = RegressionSolution.EstimatedTrainingValues.Zip(RegressionSolution.ProblemData.TargetVariableTrainingValues, (d, d1) => Math.Abs(d - d1)).Average();
    92       Tau = Tau * Tau / (MaxEvaluations - RegressionSolution.ProblemData.Dataset.Rows + 1);
    93 
    94       var xss = new RealVector(Encoding.Length);
    95       var xssIndex = solution.EstimatedTrainingVariances.Zip(solution.EstimatedTrainingVariances, (m, s2) => m + Alpha * Math.Sqrt(s2)).ArgMin(x => x);
    96       var i = solution.ProblemData.TrainingIndices.ToArray()[xssIndex];
    97       for (var j = 0; j < Encoding.Length; j++) xss[j] = solution.ProblemData.Dataset.GetDoubleValue(i, j);
    98 
    99       YMin = RegressionSolution.Model.GetEstimation(xss);
     81    protected override double Evaluate(RealVector vector, double estimatedFitness, double estimatedStandardDeviation) {
     82      var s2 = estimatedStandardDeviation * estimatedStandardDeviation;
     83      var penalty = Alpha * Math.Sqrt(Tau * s2 / (Tau + s2));
     84      var yhat = estimatedFitness + (ExpensiveMaximization ? -penalty : penalty);
     85      var s = Math.Sqrt(s2 * s2 / (Tau + s2));
     86      return GetEstimatedImprovement(BestFitness, yhat, s, ExploitationWeight, ExpensiveMaximization);
    10087    }
    10188
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/InfillCriteria/InfillCriterionBase.cs

    r14818 r15064  
    3030  [StorableClass]
    3131  public abstract class InfillCriterionBase : ParameterizedNamedItem, IInfillCriterion {
    32 
    3332    [Storable]
    34     protected IRegressionSolution RegressionSolution;
     33    public IRegressionSolution RegressionSolution { get; set; }
    3534    [Storable]
    36     protected bool ExpensiveMaximization;
     35    public bool ExpensiveMaximization { get; set; }
    3736    [Storable]
    38     protected RealVectorEncoding Encoding;
     37    public RealVectorEncoding Encoding { get; set; }
    3938
    4039    protected InfillCriterionBase(bool deserializing) : base(deserializing) { }
    41 
    4240    protected InfillCriterionBase(InfillCriterionBase original, Cloner cloner) : base(original, cloner) {
    4341      RegressionSolution = cloner.Clone(original.RegressionSolution);
     
    4846
    4947    public abstract double Evaluate(RealVector vector);
    50     public abstract bool Maximization();
     48    //public abstract bool Maximization();
    5149
    52     public void Initialize(IRegressionSolution solution, bool expensiveMaximization, RealVectorEncoding encoding) {
    53       RegressionSolution = solution;
    54       ExpensiveMaximization = expensiveMaximization;
    55       Encoding = encoding;
    56       Initialize();
    57     }
    58 
    59     protected abstract void Initialize();
    60 
     50    public abstract void Initialize();
    6151  }
    6252}
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/InfillCriteria/MinimalQuantileCriterium.cs

    r14818 r15064  
    4242    #region ParameterProperties
    4343    public IFixedValueParameter<DoubleValue> ConfidenceWeightParameter => Parameters[ConfidenceWeightParameterName] as IFixedValueParameter<DoubleValue>;
    44 
    4544    #endregion
    4645
    4746    #region Properties
    4847    private double ConfidenceWeight => ConfidenceWeightParameter.Value.Value;
    49 
    5048    #endregion
    5149
    52     #region HL-Constructors, Serialization and Cloning
     50    #region Constructors, Serialization and Cloning
    5351    [StorableConstructor]
    54     private MinimalQuantileCriterium(bool deserializing) : base(deserializing) { }
    55     private MinimalQuantileCriterium(MinimalQuantileCriterium original, Cloner cloner) : base(original, cloner) { }
     52    protected MinimalQuantileCriterium(bool deserializing) : base(deserializing) { }
     53    protected MinimalQuantileCriterium(MinimalQuantileCriterium original, Cloner cloner) : base(original, cloner) { }
    5654    public MinimalQuantileCriterium() {
    57       Parameters.Add(new FixedValueParameter<DoubleValue>(ConfidenceWeightParameterName, "A value between 0 and 1 indicating the focus on exploration (0) or exploitation (1)", new DoubleValue(0.5)));
     55      Parameters.Add(new FixedValueParameter<DoubleValue>(ConfidenceWeightParameterName, "A value greater than 0. The larger the value the stronger the emphasis on exploration", new DoubleValue(0.5)));
    5856    }
    5957    public override IDeepCloneable Clone(Cloner cloner) {
     
    6664      var yhat = model.GetEstimation(vector);
    6765      var s = Math.Sqrt(model.GetVariance(vector)) * ConfidenceWeight;
    68       return ExpensiveMaximization ? yhat + s : yhat - s;
     66      return (ExpensiveMaximization ? yhat : -yhat) + s;
    6967    }
    7068
    71     public override bool Maximization() {
    72       return ExpensiveMaximization;
    73     }
    7469
    75     protected override void Initialize() {
     70    public override void Initialize() {
    7671      var model = RegressionSolution.Model as IConfidenceRegressionModel;
    7772      if (model == null) throw new ArgumentException("can not calculate EI without confidence measure");
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/InfillCriteria/PluginExpectedImprovement.cs

    r14818 r15064  
    2020#endregion
    2121
    22 using System;
    2322using System.Linq;
    2423using HeuristicLab.Common;
    2524using HeuristicLab.Core;
     25using HeuristicLab.Encodings.RealVectorEncoding;
    2626using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    2727using HeuristicLab.Problems.DataAnalysis;
     
    3232  [StorableClass]
    3333  [Item("PluginExpectedImprovement", "Noisy InfillCriterion, Extension of the Expected Improvement by using the minimal prediction on the observed points\n rather than the minimal observed value as described in \n Global optimization based on noisy evaluations: An empirical study of two statistical approaches\r\nEmmanuel Vazqueza, Julien Villemonteixb, Maryan Sidorkiewiczb and Éric Walterc")]
    34   public class PluginExpectedImprovement : ExpectedImprovement {
    35 
     34  public class PluginExpectedImprovement : ExpectedImprovementBase {
    3635
    3736    #region HL-Constructors, Serialization and Cloning
    3837    [StorableConstructor]
    39     private PluginExpectedImprovement(bool deserializing) : base(deserializing) { }
    40     private PluginExpectedImprovement(PluginExpectedImprovement original, Cloner cloner) : base(original, cloner) { }
     38    protected PluginExpectedImprovement(bool deserializing) : base(deserializing) { }
     39    protected PluginExpectedImprovement(PluginExpectedImprovement original, Cloner cloner) : base(original, cloner) { }
    4140    public PluginExpectedImprovement() { }
    4241    public override IDeepCloneable Clone(Cloner cloner) {
     
    4544    #endregion
    4645
    47     protected override void Initialize() {
    48       if (ExpensiveMaximization) throw new NotImplementedException("PluginExpectedImprovement for maximization not yet implemented");
    49       var model = RegressionSolution.Model as IConfidenceRegressionModel;
    50       if (model == null) throw new ArgumentException("can not calculate EI without confidence measure");
    51       YMin = RegressionSolution.EstimatedTrainingValues.Min();
     46    protected override double FindBestFitness(IConfidenceRegressionSolution solution) {
     47      return ExpensiveMaximization ? RegressionSolution.EstimatedTrainingValues.Max() : RegressionSolution.EstimatedTrainingValues.Min();
     48    }
     49
     50    protected override double Evaluate(RealVector vector, double estimatedFitness, double estimatedStandardDeviation) {
     51      return GetEstimatedImprovement(BestFitness, estimatedFitness, estimatedStandardDeviation, ExploitationWeight, ExpensiveMaximization);
    5252    }
    5353  }
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/Interfaces/IInfillCriterion.cs

    r14818 r15064  
    2727namespace HeuristicLab.Algorithms.EGO {
    2828  public interface IInfillCriterion : INamedItem {
     29
     30    IRegressionSolution RegressionSolution { get; set; }
     31    bool ExpensiveMaximization { get; set; }
     32    RealVectorEncoding Encoding { get; set; }
    2933    double Evaluate(RealVector vector);
    30     bool Maximization();
    31     void Initialize(IRegressionSolution solution, bool expensiveMaximization, RealVectorEncoding encoding);
     34    //bool Maximization();
     35    void Initialize();
    3236  }
    3337}
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/Plugin.cs.frame

    r14768 r15064  
    2828  [Plugin("HeuristicLab.Algorithms.EGO", "3.4.5.$WCREV$")]
    2929  [PluginFile("HeuristicLab.Algorithms.EGO-3.4.dll", PluginFileType.Assembly)]
    30   [PluginFile("displayModelFrame.html", PluginFileType.Data)]
    3130  [PluginDependency("HeuristicLab.Algorithms.CMAEvolutionStrategy", "3.4")]
    3231  [PluginDependency("HeuristicLab.Algorithms.DataAnalysis", "3.4")]
     
    3433  [PluginDependency("HeuristicLab.Collections", "3.3")]
    3534  [PluginDependency("HeuristicLab.Common", "3.3")]
    36   [PluginDependency("HeuristicLab.Common.Resources", "3.3")]
    3735  [PluginDependency("HeuristicLab.Core", "3.3")]
    3836  [PluginDependency("HeuristicLab.Data", "3.3")]
    3937  [PluginDependency("HeuristicLab.Encodings.RealVectorEncoding", "3.3")]
     38  [PluginDependency("HeuristicLab.Encodings.PermutationEncoding", "3.3")]
    4039  [PluginDependency("HeuristicLab.Operators","3.3")]
    4140  [PluginDependency("HeuristicLab.Optimization","3.3")]
     41  [PluginDependency("HeuristicLab.Optimization.Operators","3.3")]
    4242  [PluginDependency("HeuristicLab.Parameters","3.3")]
    4343  [PluginDependency("HeuristicLab.Persistence","3.3")]
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/Problems/InfillProblem.cs

    r14818 r15064  
    2121
    2222using System;
     23using System.Collections.Generic;
     24using System.Linq;
    2325using HeuristicLab.Common;
    2426using HeuristicLab.Core;
     
    2729using HeuristicLab.Optimization;
    2830using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     31using HeuristicLab.Problems.DataAnalysis;
    2932
    3033namespace HeuristicLab.Algorithms.EGO {
     
    3336  public sealed class InfillProblem : SingleObjectiveBasicProblem<RealVectorEncoding> {
    3437
    35     public override bool Maximization => true;  //This is necessary because algorithms do not expect the maximization to change
     38    public override bool Maximization => true;
    3639
    37     #region Properties;
     40    #region ProblemResultNames
     41    public const string BestInfillSolutionResultName = "BestInfillSolution";
     42    public const string BestInfillQualityResultName = "BestInfillQuality";
     43    #endregion
     44
     45    #region Properties
    3846    [Storable]
    3947    private IInfillCriterion infillCriterion;
    40     [Storable]
    41     private SingleObjectiveBasicProblem<IEncoding> problem;
    4248
    4349    public IInfillCriterion InfillCriterion
    4450    {
    4551      get { return infillCriterion; }
    46       set { infillCriterion = value; }
    47     }
    48     public SingleObjectiveBasicProblem<IEncoding> Problem
    49     {
    50       get { return problem; }
    5152      set
    5253      {
    53         problem = value;
    54         if (problem == null) return;
    55         var enc = problem.Encoding as RealVectorEncoding;
    56         if (enc == null) throw new ArgumentException("EGO can not be performed on non-RealVectorEncodings");
    57         Encoding = enc;
    58         SolutionCreator = new UniformRandomRealVectorCreator();//ignore Problem specific Solution Creation
    59 
     54        infillCriterion = value;
     55        infillCriterion.Encoding = Encoding;
    6056      }
    6157    }
    6258    #endregion
    6359
    64     #region HLConstructors
     60    #region Constructors
    6561    [StorableConstructor]
    6662    private InfillProblem(bool deserializing) : base(deserializing) { }
    6763    private InfillProblem(InfillProblem original, Cloner cloner) : base(original, cloner) {
    68       infillCriterion = cloner.Clone(original.InfillCriterion);
    69       problem = cloner.Clone(original.Problem);
     64      infillCriterion = cloner.Clone(original.infillCriterion);
    7065    }
    7166    public InfillProblem() { }
     
    7469
    7570    public override double Evaluate(Individual individual, IRandom r) {
    76       var q = InfillCriterion.Evaluate(individual.RealVector());
    77       return InfillCriterion.Maximization() ? q : -q;
     71      return !InBounds(individual.RealVector(), Encoding.Bounds) ? double.MinValue : InfillCriterion.Evaluate(individual.RealVector());
    7872    }
    7973    public override void Analyze(Individual[] individuals, double[] qualities, ResultCollection results, IRandom random) {
    8074      base.Analyze(individuals, qualities, results, random);
    8175      var best = qualities.ArgMax(x => x);
    82       var qnew = InfillCriterion.Maximization() ? qualities[best] : -qualities[best];
    83       const string qname = EfficientGlobalOptimizationAlgorithm.BestInfillQualityResultName;
    84       const string sname = EfficientGlobalOptimizationAlgorithm.BestInfillSolutionResultName;
    85       if (!results.ContainsKey(EfficientGlobalOptimizationAlgorithm.BestInfillQualityResultName)) {
    86         results.Add(new Result(sname, (RealVector)individuals[best].RealVector().Clone()));
    87         results.Add(new Result(qname, new DoubleValue(qnew)));
     76      var newQuality = qualities[best];
     77      if (!results.ContainsKey(BestInfillQualityResultName)) {
     78        results.Add(new Result(BestInfillSolutionResultName, (RealVector)individuals[best].RealVector().Clone()));
     79        results.Add(new Result(BestInfillQualityResultName, new DoubleValue(newQuality)));
    8880        return;
    8981      }
    90       var qold = results[qname].Value as DoubleValue;
     82      var qold = results[BestInfillQualityResultName].Value as DoubleValue;
    9183      if (qold == null) throw new ArgumentException("Old best quality is not a double value. Conflicting Analyzers?");
    92       if (qold.Value >= qnew == InfillCriterion.Maximization()) return;
    93       results[sname].Value = (RealVector)individuals[best].RealVector().Clone();
    94       qold.Value = qnew;
     84      if (qold.Value >= newQuality) return;
     85      results[BestInfillSolutionResultName].Value = (RealVector)individuals[best].RealVector().Clone();
     86      qold.Value = newQuality;
    9587    }
     88    public override IEnumerable<Individual> GetNeighbors(Individual individual, IRandom random) {
     89      var bounds = Encoding.Bounds;
     90      var michalewiczIteration = 0;
     91      while (true) {
     92        var neighbour = individual.Copy();
     93        var r = neighbour.RealVector();
     94        switch (random.Next(5)) {
     95          case 0: UniformOnePositionManipulator.Apply(random, r, bounds); break;
     96          case 1: UniformOnePositionManipulator.Apply(random, r, bounds); break;//FixedNormalAllPositionsManipulator.Apply(random, r, new RealVector(new[] { 0.1 })); break;
     97          case 2: MichalewiczNonUniformAllPositionsManipulator.Apply(random, r, bounds, new IntValue(michalewiczIteration++), new IntValue(10000), new DoubleValue(5.0)); break;
     98          case 3: MichalewiczNonUniformOnePositionManipulator.Apply(random, r, bounds, new IntValue(michalewiczIteration++), new IntValue(10000), new DoubleValue(5.0)); break;
     99          case 4: BreederGeneticAlgorithmManipulator.Apply(random, r, bounds, new DoubleValue(0.1)); break;
     100          default: throw new NotImplementedException();
     101        }
     102        yield return neighbour;
     103        michalewiczIteration %= 10000;
     104      }
     105    }
     106
     107    public void Initialize(IRegressionSolution model, bool expensiveMaximization) {
     108      infillCriterion.RegressionSolution = model;
     109      infillCriterion.ExpensiveMaximization = expensiveMaximization;
     110      infillCriterion.Encoding = Encoding;
     111      infillCriterion.Initialize();
     112    }
     113
     114    #region helpers
     115    private static bool InBounds(RealVector r, DoubleMatrix bounds) {
     116      return !r.Where((t, i) => t < bounds[i % bounds.Rows, 0] || t > bounds[i % bounds.Rows, 1]).Any();
     117    }
     118    #endregion
     119
    96120  }
    97121}
  • branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/SamplingMethods/UniformRandomSampling.cs

    r14833 r15064  
    3434    #region HL-Constructors, Serialization and Cloning
    3535    [StorableConstructor]
    36     private UniformRandomSampling(bool deserializing) : base(deserializing) { }
    37     private UniformRandomSampling(UniformRandomSampling original, Cloner cloner) : base(original, cloner) { }
     36    protected UniformRandomSampling(bool deserializing) : base(deserializing) { }
     37    protected UniformRandomSampling(UniformRandomSampling original, Cloner cloner) : base(original, cloner) { }
    3838    public UniformRandomSampling() {
    3939    }
Note: See TracChangeset for help on using the changeset viewer.