1 | #region License Information
|
---|
2 | /* HeuristicLab
|
---|
3 | * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
|
---|
4 | *
|
---|
5 | * This file is part of HeuristicLab.
|
---|
6 | *
|
---|
7 | * HeuristicLab is free software: you can redistribute it and/or modify
|
---|
8 | * it under the terms of the GNU General Public License as published by
|
---|
9 | * the Free Software Foundation, either version 3 of the License, or
|
---|
10 | * (at your option) any later version.
|
---|
11 | *
|
---|
12 | * HeuristicLab is distributed in the hope that it will be useful,
|
---|
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
15 | * GNU General Public License for more details.
|
---|
16 | *
|
---|
17 | * You should have received a copy of the GNU General Public License
|
---|
18 | * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
|
---|
19 | */
|
---|
20 | #endregion
|
---|
21 |
|
---|
22 | using System;
|
---|
23 | using System.Collections.Generic;
|
---|
24 | using System.Linq;
|
---|
25 | using System.Threading;
|
---|
26 | using HeuristicLab.Algorithms.DataAnalysis;
|
---|
27 | using HeuristicLab.Analysis;
|
---|
28 | using HeuristicLab.Common;
|
---|
29 | using HeuristicLab.Core;
|
---|
30 | using HeuristicLab.Data;
|
---|
31 | using HeuristicLab.Encodings.RealVectorEncoding;
|
---|
32 | using HeuristicLab.Optimization;
|
---|
33 | using HeuristicLab.Parameters;
|
---|
34 | using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
|
---|
35 | using HeuristicLab.Problems.DataAnalysis;
|
---|
36 | using HeuristicLab.Problems.Instances;
|
---|
37 | using HeuristicLab.Random;
|
---|
38 |
|
---|
39 | namespace HeuristicLab.Algorithms.EGO {
|
---|
40 | [StorableClass]
|
---|
41 | [Creatable(CreatableAttribute.Categories.Algorithms, Priority = 95)]
|
---|
42 | [Item("EfficientGlobalOptimizationAlgorithm", "Solves a problem by sequentially learning a model, solving a subproblem on the model and evaluating the best found solution for this subproblem.")]
|
---|
43 | public class EfficientGlobalOptimizationAlgorithm : BasicAlgorithm, ISurrogateAlgorithm<RealVector> {
|
---|
44 | #region Basic-Alg-Essentials
|
---|
45 | public override bool SupportsPause => true;
|
---|
46 | public override Type ProblemType => typeof(SingleObjectiveBasicProblem<IEncoding>);
|
---|
47 | public new SingleObjectiveBasicProblem<IEncoding> Problem
|
---|
48 | {
|
---|
49 | get { return (SingleObjectiveBasicProblem<IEncoding>)base.Problem; }
|
---|
50 | set { base.Problem = value; }
|
---|
51 | }
|
---|
52 | #endregion
|
---|
53 |
|
---|
54 | #region ParameterNames
|
---|
55 | private const string GenerationSizeParameterName = "GenerationSize";
|
---|
56 | private const string InfillCriterionParameterName = "InfillCriterion";
|
---|
57 | private const string InfillOptimizationAlgorithmParameterName = "InfillOptimizationAlgorithm";
|
---|
58 | private const string InfillOptimizationRestartsParameterName = "InfillOptimizationRestarts";
|
---|
59 | private const string InitialEvaluationsParameterName = "Initial Evaluations";
|
---|
60 | private const string MaximumEvaluationsParameterName = "Maximum Evaluations";
|
---|
61 | private const string MaximumRuntimeParameterName = "Maximum Runtime";
|
---|
62 | private const string RegressionAlgorithmParameterName = "RegressionAlgorithm";
|
---|
63 | private const string SeedParameterName = "Seed";
|
---|
64 | private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
|
---|
65 | private const string MaximalDataSetSizeParameterName = "MaximalDataSetSize";
|
---|
66 | private const string RemoveDuplicatesParamterName = "RemoveDuplicates";
|
---|
67 | private const string InitialSamplesParameterName = "InitialSamplesFile";
|
---|
68 | private const string BaselineVectorParameterName = "BaselineVector";
|
---|
69 | private const string InitialSamplingPlanParamterName = "InitialSamplingPlan";
|
---|
70 | #endregion
|
---|
71 |
|
---|
72 | #region ResultNames
|
---|
73 | private const string BestQualityResultName = "Best Quality";
|
---|
74 | private const string BestSolutionResultName = "Best Solution";
|
---|
75 | private const string EvaluatedSoultionsResultName = "EvaluatedSolutions";
|
---|
76 | private const string IterationsResultName = "Iterations";
|
---|
77 | private const string RegressionSolutionResultName = "Model";
|
---|
78 | private const string QualitiesChartResultName = "Qualities";
|
---|
79 | private const string BestQualitiesRowResultName = "Best Quality";
|
---|
80 | private const string CurrentQualitiesRowResultName = "Current Quality";
|
---|
81 | private const string WorstQualitiesRowResultName = "Worst Quality";
|
---|
82 | #endregion
|
---|
83 |
|
---|
84 | #region ParameterProperties
|
---|
85 | public IFixedValueParameter<IntValue> GenerationSizeParemeter => Parameters[GenerationSizeParameterName] as IFixedValueParameter<IntValue>;
|
---|
86 | public IConstrainedValueParameter<IInfillCriterion> InfillCriterionParameter => Parameters[InfillCriterionParameterName] as IConstrainedValueParameter<IInfillCriterion>;
|
---|
87 | public IValueParameter<Algorithm> InfillOptimizationAlgorithmParameter => Parameters[InfillOptimizationAlgorithmParameterName] as IValueParameter<Algorithm>;
|
---|
88 | public IFixedValueParameter<IntValue> InfillOptimizationRestartsParemeter => Parameters[InfillOptimizationRestartsParameterName] as IFixedValueParameter<IntValue>;
|
---|
89 | public IFixedValueParameter<IntValue> InitialEvaluationsParameter => Parameters[InitialEvaluationsParameterName] as IFixedValueParameter<IntValue>;
|
---|
90 | public IFixedValueParameter<IntValue> MaximumEvaluationsParameter => Parameters[MaximumEvaluationsParameterName] as IFixedValueParameter<IntValue>;
|
---|
91 | public IFixedValueParameter<IntValue> MaximumRuntimeParameter => Parameters[MaximumRuntimeParameterName] as IFixedValueParameter<IntValue>;
|
---|
92 | public IValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>> RegressionAlgorithmParameter => Parameters[RegressionAlgorithmParameterName] as IValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>;
|
---|
93 | public IFixedValueParameter<IntValue> SeedParameter => Parameters[SeedParameterName] as IFixedValueParameter<IntValue>;
|
---|
94 | public IFixedValueParameter<BoolValue> SetSeedRandomlyParameter => Parameters[SetSeedRandomlyParameterName] as IFixedValueParameter<BoolValue>;
|
---|
95 | public IFixedValueParameter<IntValue> MaximalDataSetSizeParameter => Parameters[MaximalDataSetSizeParameterName] as IFixedValueParameter<IntValue>;
|
---|
96 | public IFixedValueParameter<BoolValue> RemoveDuplicatesParameter => Parameters[RemoveDuplicatesParamterName] as IFixedValueParameter<BoolValue>;
|
---|
97 | public IFixedValueParameter<FileValue> InitialSamplesParameter => Parameters[InitialSamplesParameterName] as IFixedValueParameter<FileValue>;
|
---|
98 | public IValueParameter<RealVector> BaselineVectorParameter => Parameters[BaselineVectorParameterName] as IValueParameter<RealVector>;
|
---|
99 | public IConstrainedValueParameter<IInitialSampling> InitialSamplingPlanParameter => Parameters[InitialSamplingPlanParamterName] as IConstrainedValueParameter<IInitialSampling>;
|
---|
100 | #endregion
|
---|
101 |
|
---|
102 | #region Properties
|
---|
103 | public int GenerationSize => GenerationSizeParemeter.Value.Value;
|
---|
104 | public IInfillCriterion InfillCriterion => InfillCriterionParameter.Value;
|
---|
105 | public Algorithm InfillOptimizationAlgorithm => InfillOptimizationAlgorithmParameter.Value;
|
---|
106 | public int InfillOptimizationRestarts => InfillOptimizationRestartsParemeter.Value.Value;
|
---|
107 | public int InitialEvaluations => InitialEvaluationsParameter.Value.Value;
|
---|
108 | public int MaximumEvaluations => MaximumEvaluationsParameter.Value.Value;
|
---|
109 | public int MaximumRuntime => MaximumRuntimeParameter.Value.Value;
|
---|
110 | public IDataAnalysisAlgorithm<IRegressionProblem> RegressionAlgorithm => RegressionAlgorithmParameter.Value;
|
---|
111 | public int Seed => SeedParameter.Value.Value;
|
---|
112 | public bool SetSeedRandomly => SetSeedRandomlyParameter.Value.Value;
|
---|
113 | public int MaximalDatasetSize => MaximalDataSetSizeParameter.Value.Value;
|
---|
114 | private IEnumerable<Tuple<RealVector, double>> DataSamples => Samples.Count > MaximalDatasetSize && MaximalDatasetSize > 0
|
---|
115 | ? Samples.Skip(Samples.Count - MaximalDatasetSize)
|
---|
116 | : Samples;
|
---|
117 | private bool RemoveDuplicates => RemoveDuplicatesParameter.Value.Value;
|
---|
118 | private RealVector BaselineVector => BaselineVectorParameter.Value;
|
---|
119 | private IInitialSampling InitialSamplingPlan => InitialSamplingPlanParameter.Value;
|
---|
120 | #endregion
|
---|
121 |
|
---|
122 | #region StorableProperties
|
---|
123 | [Storable]
|
---|
124 | private IRandom Random = new MersenneTwister();
|
---|
125 | [Storable]
|
---|
126 | private List<Tuple<RealVector, double>> Samples;
|
---|
127 | [Storable]
|
---|
128 | private List<Tuple<RealVector, double>> InitialSamples;
|
---|
129 | #endregion
|
---|
130 |
|
---|
131 | #region ResultsProperties
|
---|
132 | private double ResultsBestQuality
|
---|
133 | {
|
---|
134 | get { return ((DoubleValue)Results[BestQualityResultName].Value).Value; }
|
---|
135 | set { ((DoubleValue)Results[BestQualityResultName].Value).Value = value; }
|
---|
136 | }
|
---|
137 | private RealVector ResultsBestSolution
|
---|
138 | {
|
---|
139 | get { return (RealVector)Results[BestSolutionResultName].Value; }
|
---|
140 | set { Results[BestSolutionResultName].Value = value; }
|
---|
141 | }
|
---|
142 | private int ResultsEvaluations
|
---|
143 | {
|
---|
144 | get { return ((IntValue)Results[EvaluatedSoultionsResultName].Value).Value; }
|
---|
145 | set { ((IntValue)Results[EvaluatedSoultionsResultName].Value).Value = value; }
|
---|
146 | }
|
---|
147 | private int ResultsIterations
|
---|
148 | {
|
---|
149 | get { return ((IntValue)Results[IterationsResultName].Value).Value; }
|
---|
150 | set { ((IntValue)Results[IterationsResultName].Value).Value = value; }
|
---|
151 | }
|
---|
152 | private DataTable ResultsQualities => (DataTable)Results[QualitiesChartResultName].Value;
|
---|
153 | private DataRow ResultsQualitiesBest => ResultsQualities.Rows[BestQualitiesRowResultName];
|
---|
154 | private DataRow ResultsQualitiesWorst => ResultsQualities.Rows[WorstQualitiesRowResultName];
|
---|
155 | private DataRow ResultsQualitiesIteration => ResultsQualities.Rows[CurrentQualitiesRowResultName];
|
---|
156 | private IRegressionSolution ResultsModel
|
---|
157 | {
|
---|
158 | get { return (IRegressionSolution)Results[RegressionSolutionResultName].Value; }
|
---|
159 | set { Results[RegressionSolutionResultName].Value = value; }
|
---|
160 | }
|
---|
161 | #endregion
|
---|
162 |
|
---|
163 | #region HLConstructors
|
---|
164 | [StorableConstructor]
|
---|
165 | protected EfficientGlobalOptimizationAlgorithm(bool deserializing) : base(deserializing) { }
|
---|
166 | [StorableHook(HookType.AfterDeserialization)]
|
---|
167 | protected void AfterDeseialization() {
|
---|
168 | RegisterEventhandlers();
|
---|
169 | }
|
---|
170 | protected EfficientGlobalOptimizationAlgorithm(EfficientGlobalOptimizationAlgorithm original, Cloner cloner) : base(original, cloner) {
|
---|
171 | Random = cloner.Clone(Random);
|
---|
172 | if (original.Samples != null) Samples = original.Samples.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
|
---|
173 | if (original.InitialSamples != null) InitialSamples = original.InitialSamples.Select(x => new Tuple<RealVector, double>(cloner.Clone(x.Item1), x.Item2)).ToList();
|
---|
174 | RegisterEventhandlers();
|
---|
175 | }
|
---|
176 | public override IDeepCloneable Clone(Cloner cloner) { return new EfficientGlobalOptimizationAlgorithm(this, cloner); }
|
---|
177 | public EfficientGlobalOptimizationAlgorithm() {
|
---|
178 | IProblemInstanceExporter dummy = new RegressionProblem(); //this variable is irrelevant
|
---|
179 | //the dummy variable enforces a using-Statement for HeuristicLab.Problems.Instances
|
---|
180 | //"new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>" requires no using using-Statement, but nontheless it requires HeuristicLab.Problems.Instances to be referenced
|
---|
181 | //Having HeuristicLab.Problems.Instances referenced but not used, causes the Essential-Unit-tests to fail.
|
---|
182 |
|
---|
183 | var cmaes = new CMAEvolutionStrategy.CMAEvolutionStrategy {
|
---|
184 | MaximumGenerations = 300,
|
---|
185 | PopulationSize = 50
|
---|
186 | };
|
---|
187 | var model = new GaussianProcessRegression {
|
---|
188 | Problem = new RegressionProblem()
|
---|
189 | };
|
---|
190 | model.CovarianceFunctionParameter.Value = new CovarianceRationalQuadraticIso();
|
---|
191 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumEvaluationsParameterName, "", new IntValue(int.MaxValue)));
|
---|
192 | Parameters.Add(new FixedValueParameter<IntValue>(InitialEvaluationsParameterName, "", new IntValue(10)));
|
---|
193 | Parameters.Add(new FixedValueParameter<IntValue>(MaximumRuntimeParameterName, "The maximum runtime in seconds after which the algorithm stops. Use -1 to specify no limit for the runtime", new IntValue(-1)));
|
---|
194 | Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
|
---|
195 | Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
|
---|
196 | Parameters.Add(new ValueParameter<IDataAnalysisAlgorithm<IRegressionProblem>>(RegressionAlgorithmParameterName, "The model used to approximate the problem", model));
|
---|
197 | Parameters.Add(new ValueParameter<Algorithm>(InfillOptimizationAlgorithmParameterName, "The algorithm used to solve the expected improvement subproblem", cmaes));
|
---|
198 | Parameters.Add(new FixedValueParameter<IntValue>(InfillOptimizationRestartsParameterName, "Number of restarts of the SubAlgortihm to avoid local optima", new IntValue(1)));
|
---|
199 | Parameters.Add(new FixedValueParameter<IntValue>(GenerationSizeParameterName, "Number points that are sampled every iteration (stadard EGO: 1)", new IntValue(1)));
|
---|
200 | Parameters.Add(new FixedValueParameter<IntValue>(MaximalDataSetSizeParameterName, "The maximum number of sample points used to generate the model. Set 0 or less to use always all samples ", new IntValue(-1)));
|
---|
201 | Parameters.Add(new FixedValueParameter<BoolValue>(RemoveDuplicatesParamterName, "Wether duplicate samples should be replaced by a single sample with an averaged quality. This GREATLY decreases the chance of ill conditioned models (unbuildable models) but is not theoretically sound as the model ignores the increasing certainty in this region"));
|
---|
202 | Parameters.Add(new FixedValueParameter<FileValue>(InitialSamplesParameterName, "The file specifying some initial samples used to jump start the algorithm. These samples are not counted as evaluations. If InitialEvaluations is more than the samples specified in the file, the rest is uniformly random generated and evaluated.", new FileValue()));
|
---|
203 | Parameters.Add(new ValueParameter<RealVector>(BaselineVectorParameterName, "A vector used to create a baseline, this vector is evaluated once and is not part of the modeling process (has no influence on algorithm performance)"));
|
---|
204 | var eqi = new ExpectedQuantileImprovement();
|
---|
205 | eqi.MaxEvaluationsParameter.Value = MaximumEvaluationsParameter.Value;
|
---|
206 | var criteria = new ItemSet<IInfillCriterion> { new ExpectedImprovement(), new AugmentedExpectedImprovement(), new ExpectedQuality(), eqi, new MinimalQuantileCriterium(), new PluginExpectedImprovement() };
|
---|
207 | Parameters.Add(new ConstrainedValueParameter<IInfillCriterion>(InfillCriterionParameterName, "Decision what value should decide the next sample", criteria, criteria.First()));
|
---|
208 | var intialSamplingPlans = new ItemSet<IInitialSampling> { new UniformRandomSampling(), new LatinHyperCubeDesignCreator() };
|
---|
209 | Parameters.Add(new ConstrainedValueParameter<IInitialSampling>(InitialSamplingPlanParamterName, "Determies the initial samples from which the first model can be built.", intialSamplingPlans, intialSamplingPlans.First()));
|
---|
210 | SetInfillProblem();
|
---|
211 | RegisterEventhandlers();
|
---|
212 | }
|
---|
213 | #endregion
|
---|
214 | public void SetInitialSamples(RealVector[] individuals, double[] qualities) {
|
---|
215 | InitialSamples = individuals.Zip(qualities, (individual, d) => new Tuple<RealVector, double>(individual, d)).ToList();
|
---|
216 | }
|
---|
217 | protected override void Initialize(CancellationToken cancellationToken) {
|
---|
218 | base.Initialize(cancellationToken);
|
---|
219 | //encoding
|
---|
220 | var enc = Problem.Encoding as RealVectorEncoding;
|
---|
221 | if (enc == null) throw new ArgumentException("The EGO algorithm can only be applied to RealVectorEncodings");
|
---|
222 | var infillProblem = InfillOptimizationAlgorithm.Problem as InfillProblem;
|
---|
223 | if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no InfillProblem. Troubles with Eventhandling?");
|
---|
224 |
|
---|
225 | //random
|
---|
226 | if (SetSeedRandomly) SeedParameter.Value.Value = new System.Random().Next();
|
---|
227 | Random.Reset(Seed);
|
---|
228 | Samples = InitialSamples?.ToList() ?? new List<Tuple<RealVector, double>>();
|
---|
229 |
|
---|
230 | //results
|
---|
231 | Results.Add(new Result(IterationsResultName, new IntValue(0)));
|
---|
232 | Results.Add(new Result(EvaluatedSoultionsResultName, new IntValue(Samples.Count)));
|
---|
233 | Results.Add(new Result(BestSolutionResultName, new RealVector(1)));
|
---|
234 | Results.Add(new Result(BestQualityResultName, new DoubleValue(Problem.Maximization ? double.MinValue : double.MaxValue)));
|
---|
235 | Results.Add(new Result(RegressionSolutionResultName, typeof(IRegressionSolution)));
|
---|
236 | var table = new DataTable(QualitiesChartResultName);
|
---|
237 | table.Rows.Add(new DataRow(BestQualitiesRowResultName));
|
---|
238 | table.Rows.Add(new DataRow(WorstQualitiesRowResultName));
|
---|
239 | table.Rows.Add(new DataRow(CurrentQualitiesRowResultName));
|
---|
240 | Results.Add(new Result(QualitiesChartResultName, table));
|
---|
241 | if (BaselineVector != null && BaselineVector.Length == enc.Length) Results.Add(new Result("BaselineValue", new DoubleValue(Evaluate(BaselineVector).Item2)));
|
---|
242 |
|
---|
243 |
|
---|
244 |
|
---|
245 | }
|
---|
246 | protected override void Run(CancellationToken cancellationToken) {
|
---|
247 | //initial samples
|
---|
248 | if (Samples.Count < InitialEvaluations) {
|
---|
249 | var points = InitialSamplingPlan.GetSamples(InitialEvaluations - Samples.Count, Samples.Select(x => x.Item1).ToArray(), (RealVectorEncoding)Problem.Encoding, Random);
|
---|
250 | foreach (var t in points) {
|
---|
251 | try {
|
---|
252 | Samples.Add(Evaluate(t));
|
---|
253 | cancellationToken.ThrowIfCancellationRequested();
|
---|
254 | }
|
---|
255 | finally {
|
---|
256 | Analyze();
|
---|
257 | }
|
---|
258 | }
|
---|
259 | }
|
---|
260 | //adaptive samples
|
---|
261 | for (ResultsIterations = 0; ResultsEvaluations < MaximumEvaluations; ResultsIterations++) {
|
---|
262 | try {
|
---|
263 | ResultsModel = BuildModel(cancellationToken);
|
---|
264 | if (ResultsModel == null) break;
|
---|
265 | cancellationToken.ThrowIfCancellationRequested();
|
---|
266 | for (var i = 0; i < GenerationSize; i++) {
|
---|
267 | var samplepoint = OptimizeInfillProblem(cancellationToken);
|
---|
268 | var sample = Evaluate(samplepoint);
|
---|
269 | Samples.Add(sample);
|
---|
270 | cancellationToken.ThrowIfCancellationRequested();
|
---|
271 | }
|
---|
272 |
|
---|
273 | }
|
---|
274 | finally {
|
---|
275 | Analyze();
|
---|
276 | }
|
---|
277 | }
|
---|
278 | }
|
---|
279 |
|
---|
280 | #region Eventhandling
|
---|
281 | private void RegisterEventhandlers() {
|
---|
282 | DeregisterEventhandlers();
|
---|
283 | RegressionAlgorithmParameter.ValueChanged += OnModelAlgorithmChanged;
|
---|
284 | InfillOptimizationAlgorithmParameter.ValueChanged += OnInfillOptimizationAlgorithmChanged;
|
---|
285 | InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
|
---|
286 | InfillCriterionParameter.ValueChanged += InfillCriterionChanged;
|
---|
287 | InitialSamplesParameter.ToStringChanged += OnInitialSamplesChanged;
|
---|
288 |
|
---|
289 |
|
---|
290 | }
|
---|
291 | private void DeregisterEventhandlers() {
|
---|
292 | RegressionAlgorithmParameter.ValueChanged -= OnModelAlgorithmChanged;
|
---|
293 | InfillOptimizationAlgorithmParameter.ValueChanged -= OnInfillOptimizationAlgorithmChanged;
|
---|
294 | InfillOptimizationAlgorithm.ProblemChanged -= InfillOptimizationProblemChanged;
|
---|
295 | InfillCriterionParameter.ValueChanged -= InfillCriterionChanged;
|
---|
296 | InitialSamplesParameter.ToStringChanged -= OnInitialSamplesChanged;
|
---|
297 | }
|
---|
298 | private void OnInfillOptimizationAlgorithmChanged(object sender, EventArgs args) {
|
---|
299 | SetInfillProblem();
|
---|
300 | InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
|
---|
301 | }
|
---|
302 | private void InfillOptimizationProblemChanged(object sender, EventArgs e) {
|
---|
303 | InfillOptimizationAlgorithm.ProblemChanged -= InfillOptimizationProblemChanged;
|
---|
304 | SetInfillProblem();
|
---|
305 | InfillOptimizationAlgorithm.ProblemChanged += InfillOptimizationProblemChanged;
|
---|
306 | }
|
---|
307 | private void InfillCriterionChanged(object sender, EventArgs e) {
|
---|
308 | var infillProblem = InfillOptimizationAlgorithm.Problem as InfillProblem;
|
---|
309 | if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm has no InfillProblem. Troubles with Eventhandling?");
|
---|
310 | infillProblem.InfillCriterion = InfillCriterion;
|
---|
311 | }
|
---|
312 | private void OnModelAlgorithmChanged(object sender, EventArgs args) {
|
---|
313 | RegressionAlgorithm.Problem = new RegressionProblem();
|
---|
314 | }
|
---|
315 | private void OnInitialSamplesChanged(object sender, EventArgs args) { }
|
---|
316 | protected override void OnExecutionTimeChanged() {
|
---|
317 | base.OnExecutionTimeChanged();
|
---|
318 | if (CancellationTokenSource == null) return;
|
---|
319 | if (MaximumRuntime == -1) return;
|
---|
320 | if (ExecutionTime.TotalSeconds > MaximumRuntime) CancellationTokenSource.Cancel();
|
---|
321 | }
|
---|
322 | public override void Pause() {
|
---|
323 | if (InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Started || InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Paused) InfillOptimizationAlgorithm.Stop();
|
---|
324 | if (RegressionAlgorithm.ExecutionState == ExecutionState.Started || RegressionAlgorithm.ExecutionState == ExecutionState.Paused) RegressionAlgorithm.Stop();
|
---|
325 | base.Pause();
|
---|
326 | }
|
---|
327 | public override void Stop() {
|
---|
328 | if (InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Started || InfillOptimizationAlgorithm.ExecutionState == ExecutionState.Paused) InfillOptimizationAlgorithm.Stop();
|
---|
329 | if (RegressionAlgorithm.ExecutionState == ExecutionState.Started || RegressionAlgorithm.ExecutionState == ExecutionState.Paused) RegressionAlgorithm.Stop();
|
---|
330 | base.Stop();
|
---|
331 | }
|
---|
332 | #endregion
|
---|
333 |
|
---|
334 | #region helpers
|
---|
335 | private IRegressionSolution BuildModel(CancellationToken cancellationToken) {
|
---|
336 | var dataset = EgoUtilities.GetDataSet(DataSamples.ToList(), RemoveDuplicates);
|
---|
337 | var problemdata = new RegressionProblemData(dataset, dataset.VariableNames.Where(x => !x.Equals("output")), "output");
|
---|
338 | problemdata.TrainingPartition.Start = 0;
|
---|
339 | problemdata.TrainingPartition.End = dataset.Rows;
|
---|
340 | problemdata.TestPartition.Start = dataset.Rows;
|
---|
341 | problemdata.TestPartition.End = dataset.Rows;
|
---|
342 |
|
---|
343 | //train
|
---|
344 | var problem = (RegressionProblem)RegressionAlgorithm.Problem;
|
---|
345 | problem.ProblemDataParameter.Value = problemdata;
|
---|
346 | var i = 0;
|
---|
347 | IRegressionSolution solution = null;
|
---|
348 |
|
---|
349 | while (solution == null && i++ < 100) {
|
---|
350 | var results = EgoUtilities.SyncRunSubAlgorithm(RegressionAlgorithm, Random.Next(int.MaxValue));
|
---|
351 | solution = results.Select(x => x.Value).OfType<IRegressionSolution>().SingleOrDefault();
|
---|
352 | cancellationToken.ThrowIfCancellationRequested();
|
---|
353 | }
|
---|
354 |
|
---|
355 | //try creating a model with old hyperparameters and new dataset;
|
---|
356 | var gp = RegressionAlgorithm as GaussianProcessRegression;
|
---|
357 | var oldmodel = ResultsModel as GaussianProcessRegressionSolution;
|
---|
358 | if (gp != null && oldmodel != null) {
|
---|
359 | var n = Samples.First().Item1.Length;
|
---|
360 | var mean = (IMeanFunction)oldmodel.Model.MeanFunction.Clone();
|
---|
361 | var cov = (ICovarianceFunction)oldmodel.Model.CovarianceFunction.Clone();
|
---|
362 | if (mean.GetNumberOfParameters(n) != 0 || cov.GetNumberOfParameters(n) != 0) throw new ArgumentException("DEBUG: assumption about fixed paramters wrong");
|
---|
363 | var noise = 0.0;
|
---|
364 | double[] hyp = { noise };
|
---|
365 | try {
|
---|
366 | var model = new GaussianProcessModel(problemdata.Dataset, problemdata.TargetVariable,
|
---|
367 | problemdata.AllowedInputVariables, problemdata.TrainingIndices, hyp, mean, cov);
|
---|
368 | model.FixParameters();
|
---|
369 | var sol = new GaussianProcessRegressionSolution(model, problemdata);
|
---|
370 | if (solution == null || solution.TrainingMeanSquaredError > sol.TrainingMeanSquaredError) {
|
---|
371 | solution = sol;
|
---|
372 | }
|
---|
373 | }
|
---|
374 | catch (ArgumentException) { }
|
---|
375 | }
|
---|
376 |
|
---|
377 |
|
---|
378 | if (!ResultsQualities.Rows.ContainsKey("DEBUG: Degenerates")) ResultsQualities.Rows.Add(new DataRow("DEBUG: Degenerates"));
|
---|
379 | var row = ResultsQualities.Rows["DEBUG: Degenerates"];
|
---|
380 | row.Values.Add(i - 1);
|
---|
381 | if (solution == null) Results.Add(new Result("Status", new StringValue("The Algorithm did not return a Model")));
|
---|
382 | else {
|
---|
383 | if (!ResultsQualities.Rows.ContainsKey("DEBUG: RMSE")) ResultsQualities.Rows.Add(new DataRow("DEBUG: RMSE"));
|
---|
384 | row = ResultsQualities.Rows["DEBUG: RMSE"];
|
---|
385 | row.Values.Add(Math.Sqrt(solution.TrainingMeanSquaredError));
|
---|
386 | }
|
---|
387 |
|
---|
388 | RegressionAlgorithm.Runs.Clear();
|
---|
389 | return solution;
|
---|
390 | }
|
---|
391 | private RealVector OptimizeInfillProblem(CancellationToken cancellationToken) {
|
---|
392 | //parameterize and check InfillProblem
|
---|
393 | var infillProblem = InfillOptimizationAlgorithm.Problem as InfillProblem;
|
---|
394 | if (infillProblem == null) throw new ArgumentException("InfillOptimizationAlgorithm does not have an InfillProblem.");
|
---|
395 | if (infillProblem.InfillCriterion != InfillCriterion) throw new ArgumentException("InfillCiriterion for Problem is not correctly set.");
|
---|
396 | var enc = Problem.Encoding as RealVectorEncoding;
|
---|
397 | infillProblem.Encoding.Bounds = enc.Bounds;
|
---|
398 | infillProblem.Encoding.Length = enc.Length;
|
---|
399 | infillProblem.Initialize(ResultsModel, Problem.Maximization);
|
---|
400 |
|
---|
401 |
|
---|
402 |
|
---|
403 | RealVector bestVector = null;
|
---|
404 | var bestValue = infillProblem.Maximization ? double.NegativeInfinity : double.PositiveInfinity;
|
---|
405 | for (var i = 0; i < InfillOptimizationRestarts; i++) {
|
---|
406 | //optimize
|
---|
407 | var res = EgoUtilities.SyncRunSubAlgorithm(InfillOptimizationAlgorithm, Random.Next(int.MaxValue));
|
---|
408 | cancellationToken.ThrowIfCancellationRequested();
|
---|
409 | //extract results
|
---|
410 | if (!res.ContainsKey(InfillProblem.BestInfillSolutionResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best solution");
|
---|
411 | var v = res[InfillProblem.BestInfillSolutionResultName].Value as RealVector;
|
---|
412 | if (!res.ContainsKey(InfillProblem.BestInfillQualityResultName)) throw new ArgumentException("The InfillOptimizationAlgorithm did not return a best quality");
|
---|
413 | var d = res[InfillProblem.BestInfillQualityResultName].Value as DoubleValue;
|
---|
414 | if (d == null || v == null) throw new ArgumentException("The InfillOptimizationAlgorithm did not return the expected result types");
|
---|
415 | //check for improvement
|
---|
416 | if (infillProblem.Maximization != d.Value > bestValue) continue;
|
---|
417 | bestValue = d.Value;
|
---|
418 | bestVector = v;
|
---|
419 | }
|
---|
420 | InfillOptimizationAlgorithm.Runs.Clear();
|
---|
421 | return bestVector;
|
---|
422 | }
|
---|
423 |
|
---|
424 | private void Analyze() {
|
---|
425 | ResultsEvaluations = Samples.Count;
|
---|
426 | var max = Samples.ArgMax(x => x.Item2);
|
---|
427 | var min = Samples.ArgMin(x => x.Item2);
|
---|
428 | var best = Samples[Problem.Maximization ? max : min];
|
---|
429 | ResultsBestQuality = best.Item2;
|
---|
430 | ResultsBestSolution = best.Item1;
|
---|
431 | ResultsQualitiesBest.Values.Add(ResultsBestQuality);
|
---|
432 | ResultsQualitiesIteration.Values.Add(Samples[Samples.Count - 1].Item2);
|
---|
433 | ResultsQualitiesWorst.Values.Add(Samples[Problem.Maximization ? min : max].Item2);
|
---|
434 | Problem.Analyze(Samples.Select(x => GetIndividual(x.Item1)).ToArray(), Samples.Select(x => x.Item2).ToArray(), Results, Random);
|
---|
435 | if (Samples.Count != 0 && Samples[0].Item1.Length == 2) AnalyzeSampleDistribution();
|
---|
436 | AnalyzePredictionCorrelation();
|
---|
437 | }
|
---|
438 |
|
---|
439 | private void AnalyzeSampleDistribution() {
|
---|
440 | const string plotname = "DEBUG:Sample Distribution";
|
---|
441 | const string rowInit = "Initial Samples";
|
---|
442 | const string rowAll = "All Samples";
|
---|
443 | if (!Results.ContainsKey(plotname)) Results.Add(new Result(plotname, new ScatterPlot()));
|
---|
444 | var plot = (ScatterPlot)Results[plotname].Value;
|
---|
445 | if (!plot.Rows.ContainsKey(rowInit) && InitialSamples != null && InitialSamples.Count > 0)
|
---|
446 | plot.Rows.Add(new ScatterPlotDataRow(rowInit, "samples from inital file (already evaulated)", InitialSamples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1]))));
|
---|
447 | if (!plot.Rows.ContainsKey(rowAll)) plot.Rows.Add(new ScatterPlotDataRow(rowAll, "All samples", new Point2D<double>[0]));
|
---|
448 | else { plot.Rows[rowAll].Points.Clear(); }
|
---|
449 | plot.Rows[rowAll].Points.AddRange(Samples.Select(x => new Point2D<double>(x.Item1[0], x.Item1[1])));
|
---|
450 | }
|
---|
451 |
|
---|
452 | private void AnalyzePredictionCorrelation() {
|
---|
453 | const string plotName = "Prediction";
|
---|
454 | const string rowName = "Samples";
|
---|
455 | const string lastrowName = "Last Sample";
|
---|
456 | if (!Results.ContainsKey(plotName)) Results.Add(new Result(plotName, new ScatterPlot()));
|
---|
457 | var plot = (ScatterPlot)Results[plotName].Value;
|
---|
458 | if (!plot.Rows.ContainsKey(rowName)) plot.Rows.Add(new ScatterPlotDataRow(rowName, rowName, new List<Point2D<double>>()));
|
---|
459 | if (!plot.Rows.ContainsKey(lastrowName)) plot.Rows.Add(new ScatterPlotDataRow(lastrowName, lastrowName, new List<Point2D<double>>()));
|
---|
460 | var p = Samples[Samples.Count - 1];
|
---|
461 | if (ResultsModel != null) plot.Rows[rowName].Points.Add(new Point2D<double>(ResultsModel.Model.GetEstimation(p.Item1), p.Item2, p.Item1));
|
---|
462 | plot.VisualProperties.YAxisTitle = "True Objective Value";
|
---|
463 | plot.VisualProperties.XAxisTitle = "Predicted Objective Value";
|
---|
464 |
|
---|
465 | }
|
---|
466 |
|
---|
467 | private Individual GetIndividual(RealVector r) {
|
---|
468 | var scope = new Scope();
|
---|
469 | scope.Variables.Add(new Variable(Problem.Encoding.Name, r));
|
---|
470 | return new SingleEncodingIndividual(Problem.Encoding, scope);
|
---|
471 | }
|
---|
472 | private Tuple<RealVector, double> Evaluate(RealVector point) {
|
---|
473 | return new Tuple<RealVector, double>(point, Problem.Evaluate(GetIndividual(point), Random));
|
---|
474 | }
|
---|
475 |
|
---|
476 | private void SetInfillProblem() {
|
---|
477 | InfillOptimizationAlgorithm.Problem = new InfillProblem { InfillCriterion = InfillCriterion };
|
---|
478 | }
|
---|
479 | #endregion
|
---|
480 | }
|
---|
481 | }
|
---|