Free cookie consent management tool by TermsFeed Policy Generator

source: branches/EfficientGlobalOptimization/HeuristicLab.Algorithms.EGO/DiscreteEGO/DiscreteEfficientGlobalOptimizationAlgorithm.cs @ 15343

Last change on this file since 15343 was 15343, checked in by bwerth, 7 years ago

#2745 added discretized EGO-version for use with IntegerVectors

File size: 29.5 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Threading;
26using HeuristicLab.Algorithms.DataAnalysis;
27using HeuristicLab.Algorithms.OffspringSelectionGeneticAlgorithm;
28using HeuristicLab.Analysis;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Encodings.IntegerVectorEncoding;
33using HeuristicLab.Optimization;
34using HeuristicLab.Parameters;
35using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
36using HeuristicLab.Problems.DataAnalysis;
37using HeuristicLab.Problems.Instances;
38using HeuristicLab.Random;
39
40namespace HeuristicLab.Algorithms.EGO {
41  [StorableClass]
42  [Creatable(CreatableAttribute.Categories.Algorithms, Priority = 95)]
43  [Item("DiscreteEfficientGlobalOptimizationAlgorithm", "Solves a problem by sequentially learning a model, solving a subproblem on the model and evaluating the best found solution for this subproblem.")]
44  public class DiscreteEfficientGlobalOptimizationAlgorithm : BasicAlgorithm, ISurrogateAlgorithm<IntegerVector> {
45    #region Basic-Alg-Essentials
46    public override bool SupportsPause => true;
47    public override Type ProblemType => typeof(SingleObjectiveBasicProblem<IEncoding>);
48    public new SingleObjectiveBasicProblem<IEncoding> Problem {
49      get { return (SingleObjectiveBasicProblem<IEncoding>)base.Problem; }
50      set { base.Problem = value; }
51    }
52    #endregion
53
54    #region ParameterNames
55    private const string GenerationSizeParameterName = "GenerationSize";
56    private const string InfillCriterionParameterName = "InfillCriterion";
57    private const string InfillOptimizationAlgorithmParameterName = "InfillOptimizationAlgorithm";
58    private const string InfillOptimizationRestartsParameterName = "InfillOptimizationRestarts";
59    private const string InitialEvaluationsParameterName = "Initial Evaluations";
60    private const string MaximumEvaluationsParameterName = "Maximum Evaluations";
61    private const string MaximumRuntimeParameterName = "Maximum Runtime";
62    private const string RegressionAlgorithmParameterName = "RegressionAlgorithm";
63    private const string SeedParameterName = "Seed";
64    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
65    private const string MaximalDataSetSizeParameterName = "MaximalDataSetSize";
66    private const string RemoveDuplicatesParamterName = "RemoveDuplicates";
67    private const string InitialSamplesParameterName = "InitialSamplesFile";
68    private const string BaselineVectorParameterName = "BaselineVector";
69    private const string InitialSamplingPlanParamterName = "InitialSamplingPlan";
70    #endregion
71
72    #region ResultNames
73    private const string BestQualityResultName = "Best Quality";
74    private const string BestSolutionResultName = "Best Solution";
75    private const string EvaluatedSoultionsResultName = "EvaluatedSolutions";
76    private const string IterationsResultName = "Iterations";
77    private const string RegressionSolutionResultName = "Model";
78    private const string QualitiesChartResultName = "Qualities";
79    private const string BestQualitiesRowResultName = "Best Quality";
80    private const string CurrentQualitiesRowResultName = "Current Quality";
81    private const string WorstQualitiesRowResultName = "Worst Quality";
82    #endregion
83
84    #region ParameterProperties
85    public IFixedValueParameter<IntValue> GenerationSizeParemeter => Parameters[GenerationSizeParameterName] as IFixedValueParameter<IntValue>;
86    public IConstrainedValueParameter<IInfillCriterion> InfillCriterionParameter => Parameters[InfillCriterionParameterName] as IConstrainedValueParameter<IInfillCriterion>;
87    public IValueParameter<Algorithm> InfillOptimizationAlgorithmParameter => Parameters[InfillOptimizationAlgorithmParameterName] as IValueParameter<Algorithm>;
88    public IFixedValueParameter<IntValue> InfillOptimizationRestartsParemeter => Parameters[InfillOptimizationRestartsParameterName] as IFixedValueParameter<IntValue>;
89    public IFixedValueParameter<IntValue> InitialEvaluationsParameter => Parameters[InitialEvaluationsParameterName] as IFixedValueParameter<IntValue>;
90    public IFixedValueParameter<IntValue> MaximumEvaluationsParameter => Parameters[MaximumEvaluationsParameterName] as IFixedValueParameter<IntValue>;
91    public IFixedValueParameter<IntValue> MaximumRuntimeParameter => Parameters[MaximumRuntimeParameterName] as IFixedValueParameter<IntValue>;
92    public IValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>> RegressionAlgorithmParameter => Parameters[RegressionAlgorithmParameterName] as IValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>;
93    public IFixedValueParameter<IntValue> SeedParameter => Parameters[SeedParameterName] as IFixedValueParameter<IntValue>;
94    public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter => Parameters[SetSeedRandomlyParameterName] as IFixedValueParameter<BoolValue>;
95    public IFixedValueParameter<IntValue> MaximalDataSetSizeParameter => Parameters[MaximalDataSetSizeParameterName] as IFixedValueParameter<IntValue>;
96    public IFixedValueParameter<BoolValue> RemoveDuplicatesParameter => Parameters[RemoveDuplicatesParamterName] as IFixedValueParameter<BoolValue>;
97    public IFixedValueParameter<FileValue> InitialSamplesParameter => Parameters[InitialSamplesParameterName] as IFixedValueParameter<FileValue>;
98    public IValueParameter<IntegerVector> BaselineVectorParameter => Parameters[BaselineVectorParameterName] as IValueParameter<IntegerVector>;
99    public IConstrainedValueParameter<IInitialSampling<IntegerVector>> InitialSamplingPlanParameter => Parameters[InitialSamplingPlanParamterName] as IConstrainedValueParameter<IInitialSampling<IntegerVector>>;
100    #endregion
101
102    #region Properties
103    public int GenerationSize => GenerationSizeParemeter.Value.Value;
104    public IInfillCriterion InfillCriterion => InfillCriterionParameter.Value;
105    public Algorithm InfillOptimizationAlgorithm => InfillOptimizationAlgorithmParameter.Value;
106    public int InfillOptimizationRestarts => InfillOptimizationRestartsParemeter.Value.Value;
107    public int InitialEvaluations => InitialEvaluationsParameter.Value.Value;
108    public int MaximumEvaluations => MaximumEvaluationsParameter.Value.Value;
109    public int MaximumRuntime => MaximumRuntimeParameter.Value.Value;
110    public IDataAnalysisAlgorithm<IRegressionProblem> RegressionAlgorithm => RegressionAlgorithmParameter.Value;
111    public int Seed => SeedParameter.Value.Value;
112    public bool SetSeedRandomly => SetSeedRandomlyParameter.Value.Value;
113    public int MaximalDatasetSize => MaximalDataSetSizeParameter.Value.Value;
114    private IEnumerable<Tuple<IntegerVector, double>> DataSamples => Samples.Count > MaximalDatasetSize && MaximalDatasetSize > 0
115      ? Samples.Skip(Samples.Count - MaximalDatasetSize)
116      : Samples;
117    private bool RemoveDuplicates => RemoveDuplicatesParameter.Value.Value;
118    private IntegerVector BaselineVector => BaselineVectorParameter.Value;
119    private IInitialSampling<IntegerVector> InitialSamplingPlan => InitialSamplingPlanParameter.Value;
120    #endregion
121
122    #region StorableProperties
123    [Storable]
124    private IRandom Random = new MersenneTwister();
125    [Storable]
126    private List<Tuple<IntegerVector, double>> Samples;
127    [Storable]
128    private List<Tuple<IntegerVector, double>> InitialSamples;
129    #endregion
130
131    #region ResultsProperties
132    private double ResultsBestQuality {
133      get { return ((DoubleValue)Results[BestQualityResultName].Value).Value; }
134      set { ((DoubleValue)Results[BestQualityResultName].Value).Value = value; }
135    }
136    private IntegerVector ResultsBestSolution {
137      get { return (IntegerVector)Results[BestSolutionResultName].Value; }
138      set { Results[BestSolutionResultName].Value = value; }
139    }
140    private int ResultsEvaluations {
141      get { return ((IntValue)Results[EvaluatedSoultionsResultName].Value).Value; }
142      set { ((IntValue)Results[EvaluatedSoultionsResultName].Value).Value = value; }
143    }
144    private int ResultsIterations {
145      get { return ((IntValue)Results[IterationsResultName].Value).Value; }
146      set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
147    }
148    private DataTable ResultsQualities => (DataTable)Results[QualitiesChartResultName].Value;
149    private DataRow ResultsQualitiesBest => ResultsQualities.Rows[BestQualitiesRowResultName];
150    private DataRow ResultsQualitiesWorst => ResultsQualities.Rows[WorstQualitiesRowResultName];
151    private DataRow ResultsQualitiesIteration => ResultsQualities.Rows[CurrentQualitiesRowResultName];
152    private IRegressionSolution ResultsModel {
153      get { return (IRegressionSolution)Results[RegressionSolutionResultName].Value; }
154      set { Results[RegressionSolutionResultName].Value = value; }
155    }
156    #endregion
157
158    #region HLConstructors
159    [StorableConstructor]
160    protected DiscreteEfficientGlobalOptimizationAlgorithm(bool deserializing) : base(deserializing) { }
161    [StorableHook(HookType.AfterDeserialization)]
162    protected void AfterDeseialization() {
163      RegisterEventhandlers();
164    }
165    protected DiscreteEfficientGlobalOptimizationAlgorithm(DiscreteEfficientGlobalOptimizationAlgorithm original, Cloner cloner) : base(original, cloner) {
166      Random = cloner.Clone(Random);
167      if (original.Samples != null) Samples = original.Samples.Select(x => new Tuple<IntegerVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
168      if (original.InitialSamples != null) InitialSamples = original.InitialSamples.Select(x => new Tuple<IntegerVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
169      RegisterEventhandlers();
170    }
171    public override IDeepCloneable Clone(Cloner cloner) { return new DiscreteEfficientGlobalOptimizationAlgorithm(this, cloner); }
172    public DiscreteEfficientGlobalOptimizationAlgorithm() {
173      IProblemInstanceExporter dummy = new RegressionProblem(); //this variable is irrelevant
174      //the dummy variable enforces a using-Statement for HeuristicLab.Problems.Instances
175      //"new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>" requires no using-Statement, but nontheless it requires HeuristicLab.Problems.Instances to be referenced 
176      //Having HeuristicLab.Problems.Instances referenced but not used, causes the Essential-Unit-tests to fail.
177
178      var cmaes = new OffspringSelectionGeneticAlgorithm.OffspringSelectionGeneticAlgorithm() {
179        MaximumGenerations = new IntValue(300),
180        PopulationSize = new IntValue(50)
181      };
182      var model = new GaussianProcessRegression {
183        Problem = new RegressionProblem()
184      };
185      model.CovarianceFunctionParameter.Value = new CovarianceRationalQuadraticIso();
186      Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluationsParameterName, "", new IntValue(int.MaxValue)));
187      Parameters.Add(new FixedValueParameter<IntValue>(InitialEvaluationsParameterName, "", new IntValue(10)));
188      Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeParameterName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(-1)));
189      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
190      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
191      Parameters.Add(new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>(RegressionAlgorithmParameterName, "The model used to approximate the problem", model));
192      Parameters.Add(new ValueParameter<Algorithm>(InfillOptimizationAlgorithmParameterName, "The algorithm used to solve the expected improvement subproblem", cmaes));
193      Parameters.Add(new FixedValueParameter<IntValue>(InfillOptimizationRestartsParameterName, "Number of restarts of the SubAlgortihm to avoid local optima", new IntValue(1)));
194      Parameters.Add(new FixedValueParameter<IntValue>(GenerationSizeParameterName, "Number points that are sampled every iteration (stadard EGO: 1)", new IntValue(1)));
195      Parameters.Add(new FixedValueParameter<IntValue>(MaximalDataSetSizeParameterName, "The maximum number of sample points used to generate the model. Set 0 or less to use always all samples ", new IntValue(-1)));
196      Parameters.Add(new FixedValueParameter<BoolValue>(RemoveDuplicatesParamterName, "Wether duplicate samples should be replaced by a single sample with an averaged quality. This GREATLY decreases the chance of ill conditioned models (unbuildable models) but is not theoretically sound as the model ignores the increasing certainty in this region"));
197      Parameters.Add(new FixedValueParameter<FileValue>(InitialSamplesParameterName, "The file specifying some initial samples used to jump start the algorithm. These samples are not counted as evaluations. If InitialEvaluations is more than the samples specified in the file, the rest is uniformly random generated and evaluated.", new FileValue()));
198      Parameters.Add(new ValueParameter<IntegerVector>(BaselineVectorParameterName, "A vector used to create a baseline, this vector is evaluated once and is not part of the modeling process (has no influence on algorithm performance)"));
199      var eqi = new ExpectedQuantileImprovement();
200      eqi.MaxEvaluationsParameter.Value = MaximumEvaluationsParameter.Value;
201      var criteria = new ItemSet<IInfillCriterion> { new ExpectedImprovement(), new AugmentedExpectedImprovement(), new ExpectedQuality(), eqi, new MinimalQuantileCriterium(), new PluginExpectedImprovement() };
202      Parameters.Add(new ConstrainedValueParameter<IInfillCriterion>(InfillCriterionParameterName, "Decision what value should decide the next sample", criteria, criteria.First()));
203      var intialSamplingPlans = new ItemSet<IInitialSampling<IntegerVector>> { new UniformRandomDiscreteSampling() };
204      Parameters.Add(new ConstrainedValueParameter<IInitialSampling<IntegerVector>>(InitialSamplingPlanParamterName, "Determies the initial samples from which the first model can be built.", intialSamplingPlans, intialSamplingPlans.First()));
205      SetInfillProblem();
206      RegisterEventhandlers();
207    }
208    #endregion
209    public void SetInitialSamples(IntegerVector[] individuals, double[] qualities) {
210      InitialSamples = individuals.Zip(qualities, (individual, d) => new Tuple<IntegerVector, double>(individual, d)).ToList();
211    }
212    protected override void Initialize(CancellationToken cancellationToken) {
213      base.Initialize(cancellationToken);
214      //encoding
215      var enc = Problem.Encoding as IntegerVectorEncoding;
216      if (enc == null) throw new ArgumentException("The EGO algorithm can only be applied to IntegerVectorEncodings");
217      var infillProblem = InfillOptimizationAlgorithm.Problem as DiscreteInfillProblem;
218      if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no DiscreteInfillProblem. Troubles with Eventhandling?");
219
220      //random
221      if (SetSeedRandomly) SeedParameter.Value.Value = new System.Random().Next();
222      Random.Reset(Seed);
223      Samples = InitialSamples?.ToList() ?? new List<Tuple<IntegerVector, double>>();
224
225      //results
226      Results.Add(new Result(IterationsResultName, new IntValue(0)));
227      Results.Add(new Result(EvaluatedSoultionsResultName, new IntValue(Samples.Count)));
228      Results.Add(new Result(BestSolutionResultName, new IntegerVector(1)));
229      Results.Add(new Result(BestQualityResultName, new DoubleValue(Problem.Maximization ? double.MinValue : double.MaxValue)));
230      Results.Add(new Result(RegressionSolutionResultName, typeof(IRegressionSolution)));
231      var table = new DataTable(QualitiesChartResultName);
232      table.Rows.Add(new DataRow(BestQualitiesRowResultName));
233      table.Rows.Add(new DataRow(WorstQualitiesRowResultName));
234      table.Rows.Add(new DataRow(CurrentQualitiesRowResultName));
235      Results.Add(new Result(QualitiesChartResultName, table));
236      if (BaselineVector != null && BaselineVector.Length == enc.Length) Results.Add(new Result("BaselineValue", new DoubleValue(Evaluate(BaselineVector).Item2)));
237
238
239
240    }
241    protected override void Run(CancellationToken cancellationToken) {
242      //initial samples
243      if (Samples.Count < InitialEvaluations) {
244        var points = InitialSamplingPlan.GetSamples(InitialEvaluations - Samples.Count, Samples.Select(x => x.Item1).ToArray(), (IntegerVectorEncoding)Problem.Encoding, Random);
245        foreach (var t in points) {
246          try {
247            Samples.Add(Evaluate(t));
248            cancellationToken.ThrowIfCancellationRequested();
249          }
250          finally {
251            Analyze();
252          }
253        }
254      }
255      //adaptive samples
256      for (ResultsIterations = 0; ResultsEvaluations < MaximumEvaluations; ResultsIterations++) {
257        try {
258          ResultsModel = BuildModel(cancellationToken);
259          if (ResultsModel == null) break;
260          cancellationToken.ThrowIfCancellationRequested();
261          for (var i = 0; i < GenerationSize; i++) {
262            var samplepoint = OptimizeInfillProblem(cancellationToken);
263            if (RemoveDuplicates) {
264
265            }
266            var sample = Evaluate(samplepoint);
267            Samples.Add(sample);
268            cancellationToken.ThrowIfCancellationRequested();
269          }
270
271        }
272        finally {
273          Analyze();
274        }
275      }
276    }
277
278    #region Eventhandling
279    private void RegisterEventhandlers() {
280      DeregisterEventhandlers();
281      RegressionAlgorithmParameter.ValueChanged += OnModelAlgorithmChanged;
282      InfillOptimizationAlgorithmParameter.ValueChanged += OnInfillOptimizationAlgorithmChanged;
283      InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
284      InfillCriterionParameter.ValueChanged += InfillCriterionChanged;
285      InitialSamplesParameter.ToStringChanged += OnInitialSamplesChanged;
286
287
288    }
289    private void DeregisterEventhandlers() {
290      RegressionAlgorithmParameter.ValueChanged -= OnModelAlgorithmChanged;
291      InfillOptimizationAlgorithmParameter.ValueChanged -= OnInfillOptimizationAlgorithmChanged;
292      InfillOptimizationAlgorithm.ProblemChanged -= InfillOptimizationProblemChanged;
293      InfillCriterionParameter.ValueChanged -= InfillCriterionChanged;
294      InitialSamplesParameter.ToStringChanged -= OnInitialSamplesChanged;
295    }
296    private void OnInfillOptimizationAlgorithmChanged(object sender, EventArgs args) {
297      SetInfillProblem();
298      InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
299    }
300    private void InfillOptimizationProblemChanged(object sender, EventArgs e) {
301      InfillOptimizationAlgorithm.ProblemChanged -= InfillOptimizationProblemChanged;
302      SetInfillProblem();
303      InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
304    }
305    private void InfillCriterionChanged(object sender, EventArgs e) {
306      var infillProblem = InfillOptimizationAlgorithm.Problem as DiscreteInfillProblem;
307      if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no DiscreteInfillProblem. Troubles with Eventhandling?");
308      infillProblem.InfillCriterion = InfillCriterion;
309    }
310    private void OnModelAlgorithmChanged(object sender, EventArgs args) {
311      RegressionAlgorithm.Problem = new RegressionProblem();
312    }
313    private void OnInitialSamplesChanged(object sender, EventArgs args) { }
314    protected override void OnExecutionTimeChanged() {
315      base.OnExecutionTimeChanged();
316      if (CancellationTokenSource == null) return;
317      if (MaximumRuntime == -1) return;
318      if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
319    }
320    public override void Pause() {
321      if (InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Started || InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Paused) InfillOptimizationAlgorithm.Stop();
322      if (RegressionAlgorithm.ExecutionState == ExecutionState.Started || RegressionAlgorithm.ExecutionState == ExecutionState.Paused) RegressionAlgorithm.Stop();
323      base.Pause();
324    }
325    public override void Stop() {
326      if (InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Started || InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Paused) InfillOptimizationAlgorithm.Stop();
327      if (RegressionAlgorithm.ExecutionState == ExecutionState.Started || RegressionAlgorithm.ExecutionState == ExecutionState.Paused) RegressionAlgorithm.Stop();
328      base.Stop();
329    }
330    #endregion
331
332    #region helpers
333    private IRegressionSolution BuildModel(CancellationToken cancellationToken) {
334      var dataset = EgoUtilities.GetDataSet(DataSamples.ToList());
335      var problemdata = new RegressionProblemData(dataset, dataset.VariableNames.Where(x => !x.Equals("output")), "output");
336      problemdata.TrainingPartition.Start = 0;
337      problemdata.TrainingPartition.End = dataset.Rows;
338      problemdata.TestPartition.Start = dataset.Rows;
339      problemdata.TestPartition.End = dataset.Rows;
340
341      //train
342      var problem = (RegressionProblem)RegressionAlgorithm.Problem;
343      problem.ProblemDataParameter.Value = problemdata;
344      var i = 0;
345      IRegressionSolution solution = null;
346
347      while (solution == null && i++ < 100) {
348        var results = EgoUtilities.SyncRunSubAlgorithm(RegressionAlgorithm, Random.Next(int.MaxValue), cancellationToken);
349        solution = results.Select(x => x.Value).OfType<IRegressionSolution>().SingleOrDefault();
350        cancellationToken.ThrowIfCancellationRequested();
351      }
352
353      //try creating a model with old hyperparameters and new dataset;
354      var gp = RegressionAlgorithm as GaussianProcessRegression;
355      var oldmodel = ResultsModel as GaussianProcessRegressionSolution;
356      if (gp != null && oldmodel != null) {
357        var n = Samples.First().Item1.Length;
358        var mean = (IMeanFunction)oldmodel.Model.MeanFunction.Clone();
359        var cov = (ICovarianceFunction)oldmodel.Model.CovarianceFunction.Clone();
360        if (mean.GetNumberOfParameters(n) != 0 || cov.GetNumberOfParameters(n) != 0) throw new ArgumentException("DEBUG: assumption about fixed paramters wrong");
361        var noise = 0.0;
362        double[] hyp = { noise };
363        try {
364          var model = new GaussianProcessModel(problemdata.Dataset, problemdata.TargetVariable,
365            problemdata.AllowedInputVariables, problemdata.TrainingIndices, hyp, mean, cov);
366          model.FixParameters();
367          var sol = new GaussianProcessRegressionSolution(model, problemdata);
368          if (solution == null || solution.TrainingMeanSquaredError > sol.TrainingMeanSquaredError) {
369            solution = sol;
370          }
371        }
372        catch (ArgumentException) { }
373      }
374
375
376      if (!ResultsQualities.Rows.ContainsKey("DEBUG: Degenerates")) ResultsQualities.Rows.Add(new DataRow("DEBUG: Degenerates"));
377      var row = ResultsQualities.Rows["DEBUG: Degenerates"];
378      row.Values.Add(i - 1);
379      if (solution == null) Results.Add(new Result("Status", new StringValue("The Algorithm did not return a Model")));
380      else {
381        if (!ResultsQualities.Rows.ContainsKey("DEBUG: RMSE")) ResultsQualities.Rows.Add(new DataRow("DEBUG: RMSE"));
382        row = ResultsQualities.Rows["DEBUG: RMSE"];
383        row.Values.Add(Math.Sqrt(solution.TrainingMeanSquaredError));
384      }
385
386      RegressionAlgorithm.Runs.Clear();
387      return solution;
388    }
389    private IntegerVector OptimizeInfillProblem(CancellationToken cancellationToken) {
390      //parameterize and check InfillProblem
391      var infillProblem = InfillOptimizationAlgorithm.Problem as DiscreteInfillProblem;
392      if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
393      if (infillProblem.InfillCriterion != InfillCriterion) throw new ArgumentException("InfillCiriterion for Problem is not correctly set.");
394      var enc = Problem.Encoding as IntegerVectorEncoding;
395      infillProblem.Encoding.Bounds = enc.Bounds;
396      infillProblem.Encoding.Length = enc.Length;
397      infillProblem.Initialize(ResultsModel, Problem.Maximization);
398
399
400
401      IntegerVector bestVector = null;
402      var bestValue = infillProblem.Maximization ? double.NegativeInfinity : double.PositiveInfinity;
403      for (var i = 0; i < InfillOptimizationRestarts; i++) {
404        //optimize
405        var res = EgoUtilities.SyncRunSubAlgorithm(InfillOptimizationAlgorithm, Random.Next(int.MaxValue), cancellationToken);
406        cancellationToken.ThrowIfCancellationRequested();
407        //extract results
408        if (!res.ContainsKey(DiscreteInfillProblem.BestInfillSolutionResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best solution");
409        var v = res[DiscreteInfillProblem.BestInfillSolutionResultName].Value as IntegerVector;
410        if (!res.ContainsKey(DiscreteInfillProblem.BestInfillQualityResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best quality");
411        var d = res[DiscreteInfillProblem.BestInfillQualityResultName].Value as DoubleValue;
412        if (d == null || v == null) throw new ArgumentException("The InfillOptimizationAlgorithm did not return the expected result types");
413        //check for improvement
414        if (infillProblem.Maximization != d.Value > bestValue) continue;
415        bestValue = d.Value;
416        bestVector = v;
417      }
418      InfillOptimizationAlgorithm.Runs.Clear();
419      return bestVector;
420    }
421
422    private void Analyze() {
423      ResultsEvaluations = Samples.Count;
424      var max = Samples.ArgMax(x => x.Item2);
425      var min = Samples.ArgMin(x => x.Item2);
426      var best = Samples[Problem.Maximization ? max : min];
427      ResultsBestQuality = best.Item2;
428      ResultsBestSolution = best.Item1;
429      ResultsQualitiesBest.Values.Add(ResultsBestQuality);
430      ResultsQualitiesIteration.Values.Add(Samples[Samples.Count - 1].Item2);
431      ResultsQualitiesWorst.Values.Add(Samples[Problem.Maximization ? min : max].Item2);
432      Problem.Analyze(Samples.Select(x => GetIndividual(x.Item1)).ToArray(), Samples.Select(x => x.Item2).ToArray(), Results, Random);
433      if (Samples.Count != 0 && Samples[0].Item1.Length == 2) AnalyzeSampleDistribution();
434      AnalyzePredictionCorrelation();
435    }
436
437    private void AnalyzeSampleDistribution() {
438      const string plotname = "DEBUG:Sample Distribution";
439      const string rowInit = "Initial Samples";
440      const string rowAll = "All Samples";
441      if (!Results.ContainsKey(plotname)) Results.Add(new Result(plotname, new ScatterPlot()));
442      var plot = (ScatterPlot)Results[plotname].Value;
443      if (!plot.Rows.ContainsKey(rowInit) && InitialSamples != null && InitialSamples.Count > 0)
444        plot.Rows.Add(new ScatterPlotDataRow(rowInit, "samples from inital file (already evaulated)", InitialSamples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1]))));
445      if (!plot.Rows.ContainsKey(rowAll)) plot.Rows.Add(new ScatterPlotDataRow(rowAll, "All samples", new Point2D<double>[0]));
446      else { plot.Rows[rowAll].Points.Clear(); }
447      plot.Rows[rowAll].Points.AddRange(Samples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1])));
448    }
449
450    private void AnalyzePredictionCorrelation() {
451      const string plotName = "Prediction";
452      const string rowName = "Samples";
453      const string lastrowName = "Last Sample";
454      if (!Results.ContainsKey(plotName)) Results.Add(new Result(plotName, new ScatterPlot()));
455      var plot = (ScatterPlot)Results[plotName].Value;
456      if (!plot.Rows.ContainsKey(rowName)) plot.Rows.Add(new ScatterPlotDataRow(rowName, rowName, new List<Point2D<double>>()));
457      if (!plot.Rows.ContainsKey(lastrowName)) plot.Rows.Add(new ScatterPlotDataRow(lastrowName, lastrowName, new List<Point2D<double>>()));
458      var p = Samples[Samples.Count - 1];
459      if (ResultsModel != null) plot.Rows[rowName].Points.Add(new Point2D<double>(ResultsModel.Model.GetEstimation(p.Item1), p.Item2, p.Item1));
460      plot.VisualProperties.YAxisTitle = "True Objective Value";
461      plot.VisualProperties.XAxisTitle = "Predicted Objective Value";
462
463    }
464
465
466
467    private Individual GetIndividual(IntegerVector r) {
468      var scope = new Scope();
469      scope.Variables.Add(new Variable(Problem.Encoding.Name, r));
470      return new SingleEncodingIndividual(Problem.Encoding, scope);
471    }
472    private Tuple<IntegerVector, double> Evaluate(IntegerVector point) {
473      return new Tuple<IntegerVector, double>(point, Problem.Evaluate(GetIndividual(point), Random));
474    }
475
476    private void SetInfillProblem() {
477      InfillOptimizationAlgorithm.Problem = new DiscreteInfillProblem { InfillCriterion = InfillCriterion };
478    }
479    #endregion
480  }
481}
Note: See TracBrowser for help on using the repository browser.