#region License Information /* HeuristicLab * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL) * * This file is part of HeuristicLab. * * HeuristicLab is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * HeuristicLab is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with HeuristicLab. If not, see . */ #endregion using System; using System.Linq; using HeuristicLab.Common; using HeuristicLab.Core; using HeuristicLab.Data; using HeuristicLab.Encodings.RealVectorEncoding; using HeuristicLab.Parameters; using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; using HeuristicLab.Problems.DataAnalysis; // ReSharper disable once CheckNamespace namespace HeuristicLab.Algorithms.EGO { [StorableClass] [Item("ExpectedQuantileImprovement", "Noisy InfillCriterion, Extension of the Expected Improvement as described in \n Noisy expectedimprovement and on - line computation time allocation for the optimization of simulators with tunable fidelitys\r\nPicheny, V., Ginsbourger, D., Richet, Y")] public class ExpectedQuantileImprovement : ExpectedImprovement { #region Parameternames public const string AlphaParameterName = "Alpha"; public const string MaxEvaluationsParameterName = "MaxEvaluations"; #endregion #region Parameters public IFixedValueParameter AlphaParameter => Parameters[AlphaParameterName] as IFixedValueParameter; public IValueParameter MaxEvaluationsParameter => Parameters[MaxEvaluationsParameterName] as IValueParameter; #endregion #region Properties public int MaxEvaluations => MaxEvaluationsParameter.Value.Value; public double Alpha => AlphaParameter.Value.Value; [Storable] private double Tau; #endregion #region HL-Constructors, Serialization and Cloning [StorableConstructor] private ExpectedQuantileImprovement(bool deserializing) : base(deserializing) { } private ExpectedQuantileImprovement(ExpectedQuantileImprovement original, Cloner cloner) : base(original, cloner) { Tau = original.Tau; } public ExpectedQuantileImprovement() { Parameters.Add(new FixedValueParameter(AlphaParameterName, "The Alpha value specifiying the robustness of the \"effective best solution\". Recommended value is 1.0", new DoubleValue(1.0))); Parameters.Add(new ValueParameter(MaxEvaluationsParameterName, "The maximum number of evaluations allowed for EGO", new IntValue(100))); MaxEvaluationsParameter.Hidden = true; } public override IDeepCloneable Clone(Cloner cloner) { return new ExpectedQuantileImprovement(this, cloner); } #endregion public override double Evaluate(RealVector vector) { var model = RegressionSolution.Model as IConfidenceRegressionModel; var s2 = model.GetVariance(vector); var yhat = model.GetEstimation(vector) + Alpha * Math.Sqrt(Tau * s2 / (Tau + s2)); var s = Math.Sqrt(s2 * s2 / (Tau + s2)); return GetEstimatedImprovement(YMin, yhat, s, ExploitationWeight); } protected override void Initialize() { if (ExpensiveMaximization) throw new NotImplementedException("AugmentedExpectedImprovement for maximization not yet implemented"); var solution = RegressionSolution as IConfidenceRegressionSolution; if (solution == null) throw new ArgumentException("can not calculate Augmented EI without a regression solution providing confidence values"); Tau = RegressionSolution.EstimatedTrainingValues.Zip(RegressionSolution.ProblemData.TargetVariableTrainingValues, (d, d1) => Math.Abs(d - d1)).Average(); Tau = Tau * Tau / (MaxEvaluations - RegressionSolution.ProblemData.Dataset.Rows + 1); var xss = new RealVector(Encoding.Length); var xssIndex = solution.EstimatedTrainingVariances.Zip(solution.EstimatedTrainingVariances, (m, s2) => m + Alpha * Math.Sqrt(s2)).ArgMin(x => x); var i = solution.ProblemData.TrainingIndices.ToArray()[xssIndex]; for (var j = 0; j < Encoding.Length; j++) xss[j] = solution.ProblemData.Dataset.GetDoubleValue(i, j); YMin = RegressionSolution.Model.GetEstimation(xss); } } }