Free cookie consent management tool by TermsFeed Policy Generator

source: branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs @ 14563

Last change on this file since 14563 was 14563, checked in by abeham, 7 years ago

#2701:

  • Tagged unbiased models with property
  • Changed default configuration
  • Added solution distance to breeding, relinking and delinking performance models
  • Changed sampling model to base prediction on average distance in genotype space
  • Changed target for hillclimber and relinking to relative (quality improvement)
  • changed breeding to count cache hits per crossover
File size: 35.4 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Runtime.CompilerServices;
26using System.Threading;
27using HeuristicLab.Algorithms.DataAnalysis;
28using HeuristicLab.Algorithms.MemPR.Interfaces;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Optimization;
33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Random;
37using ExecutionContext = HeuristicLab.Core.ExecutionContext;
38
39namespace HeuristicLab.Algorithms.MemPR {
40  [Item("MemPRContext", "Abstract base class for MemPR contexts.")]
41  [StorableClass]
42  public abstract class MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext> : ParameterizedNamedItem,
43    IPopulationBasedHeuristicAlgorithmContext<TProblem, TSolution>, ISolutionModelContext<TSolution>, IEvaluationServiceContext<TSolution>
44      where TProblem : class, IItem, ISingleObjectiveHeuristicOptimizationProblem
45      where TSolution : class, IItem
46      where TPopulationContext : MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext>
47      where TSolutionContext : MemPRSolutionContext<TProblem, TSolution, TPopulationContext, TSolutionContext> {
48
49    private IExecutionContext parent;
50    public IExecutionContext Parent {
51      get { return parent; }
52      set { parent = value; }
53    }
54
55    [Storable]
56    private IScope scope;
57    public IScope Scope {
58      get { return scope; }
59      private set { scope = value; }
60    }
61
62    IKeyedItemCollection<string, IParameter> IExecutionContext.Parameters {
63      get { return Parameters; }
64    }
65
66    [Storable]
67    private IValueParameter<TProblem> problem;
68    public TProblem Problem {
69      get { return problem.Value; }
70      set { problem.Value = value; }
71    }
72    public bool Maximization {
73      get { return ((IValueParameter<BoolValue>)Problem.MaximizationParameter).Value.Value; }
74    }
75
76    [Storable]
77    private IValueParameter<BoolValue> initialized;
78    public bool Initialized {
79      get { return initialized.Value.Value; }
80      set { initialized.Value.Value = value; }
81    }
82
83    [Storable]
84    private IValueParameter<IntValue> iterations;
85    public int Iterations {
86      get { return iterations.Value.Value; }
87      set { iterations.Value.Value = value; }
88    }
89
90    [Storable]
91    private IValueParameter<IntValue> evaluatedSolutions;
92    public int EvaluatedSolutions {
93      get { return evaluatedSolutions.Value.Value; }
94      set { evaluatedSolutions.Value.Value = value; }
95    }
96
97    [Storable]
98    private IValueParameter<DoubleValue> bestQuality;
99    public double BestQuality {
100      get { return bestQuality.Value.Value; }
101      set { bestQuality.Value.Value = value; }
102    }
103
104    [Storable]
105    private IValueParameter<TSolution> bestSolution;
106    public TSolution BestSolution {
107      get { return bestSolution.Value; }
108      set { bestSolution.Value = value; }
109    }
110
111    [Storable]
112    private IValueParameter<IntValue> localSearchEvaluations;
113    public int LocalSearchEvaluations {
114      get { return localSearchEvaluations.Value.Value; }
115      set { localSearchEvaluations.Value.Value = value; }
116    }
117
118    [Storable]
119    private IValueParameter<DoubleValue> localOptimaLevel;
120    public double LocalOptimaLevel {
121      get { return localOptimaLevel.Value.Value; }
122      set { localOptimaLevel.Value.Value = value; }
123    }
124
125    [Storable]
126    private IValueParameter<IntValue> byBreeding;
127    public int ByBreeding {
128      get { return byBreeding.Value.Value; }
129      set { byBreeding.Value.Value = value; }
130    }
131
132    [Storable]
133    private IValueParameter<IntValue> byRelinking;
134    public int ByRelinking {
135      get { return byRelinking.Value.Value; }
136      set { byRelinking.Value.Value = value; }
137    }
138
139    [Storable]
140    private IValueParameter<IntValue> byDelinking;
141    public int ByDelinking {
142      get { return byDelinking.Value.Value; }
143      set { byDelinking.Value.Value = value; }
144    }
145
146    [Storable]
147    private IValueParameter<IntValue> bySampling;
148    public int BySampling {
149      get { return bySampling.Value.Value; }
150      set { bySampling.Value.Value = value; }
151    }
152
153    [Storable]
154    private IValueParameter<IntValue> byHillclimbing;
155    public int ByHillclimbing {
156      get { return byHillclimbing.Value.Value; }
157      set { byHillclimbing.Value.Value = value; }
158    }
159
160    [Storable]
161    private IValueParameter<IntValue> byAdaptivewalking;
162    public int ByAdaptivewalking {
163      get { return byAdaptivewalking.Value.Value; }
164      set { byAdaptivewalking.Value.Value = value; }
165    }
166
167    [Storable]
168    private IValueParameter<IRandom> random;
169    public IRandom Random {
170      get { return random.Value; }
171      set { random.Value = value; }
172    }
173   
174    public IEnumerable<ISingleObjectiveSolutionScope<TSolution>> Population {
175      get { return scope.SubScopes.OfType<ISingleObjectiveSolutionScope<TSolution>>(); }
176    }
177    public void AddToPopulation(ISingleObjectiveSolutionScope<TSolution> solScope) {
178      scope.SubScopes.Add(solScope);
179    }
180    public void ReplaceAtPopulation(int index, ISingleObjectiveSolutionScope<TSolution> solScope) {
181      scope.SubScopes[index] = solScope;
182    }
183    public ISingleObjectiveSolutionScope<TSolution> AtPopulation(int index) {
184      return scope.SubScopes[index] as ISingleObjectiveSolutionScope<TSolution>;
185    }
186    public void SortPopulation() {
187      scope.SubScopes.Replace(scope.SubScopes.OfType<ISingleObjectiveSolutionScope<TSolution>>().OrderBy(x => Maximization ? -x.Fitness : x.Fitness).ToList());
188    }
189    public int PopulationCount {
190      get { return scope.SubScopes.Count; }
191    }
192
193    [Storable]
194    private IConfidenceRegressionModel breedingPerformanceModel;
195    public IConfidenceRegressionModel BreedingPerformanceModel {
196      get { return breedingPerformanceModel; }
197    }
198    [Storable]
199    private List<Tuple<double, double, double, double>> breedingStat;
200    public IEnumerable<Tuple<double, double, double, double>> BreedingStat {
201      get { return breedingStat; }
202    }
203    [Storable]
204    private IConfidenceRegressionModel relinkingPerformanceModel;
205    public IConfidenceRegressionModel RelinkingPerformanceModel {
206      get { return relinkingPerformanceModel; }
207    }
208    [Storable]
209    private List<Tuple<double, double, double, double>> relinkingStat;
210    public IEnumerable<Tuple<double, double, double, double>> RelinkingStat {
211      get { return relinkingStat; }
212    }
213    [Storable]
214    private IConfidenceRegressionModel delinkingPerformanceModel;
215    public IConfidenceRegressionModel DelinkingPerformanceModel {
216      get { return delinkingPerformanceModel; }
217    }
218    [Storable]
219    private List<Tuple<double, double, double, double>> delinkingStat;
220    public IEnumerable<Tuple<double, double, double, double>> DelinkingStat {
221      get { return delinkingStat; }
222    }
223    [Storable]
224    private IConfidenceRegressionModel samplingPerformanceModel;
225    public IConfidenceRegressionModel SamplingPerformanceModel {
226      get { return samplingPerformanceModel; }
227    }
228    [Storable]
229    private List<Tuple<double, double>> samplingStat;
230    public IEnumerable<Tuple<double, double>> SamplingStat {
231      get { return samplingStat; }
232    }
233    [Storable]
234    private IConfidenceRegressionModel hillclimbingPerformanceModel;
235    public IConfidenceRegressionModel HillclimbingPerformanceModel {
236      get { return hillclimbingPerformanceModel; }
237    }
238    [Storable]
239    private List<Tuple<double, double>> hillclimbingStat;
240    public IEnumerable<Tuple<double, double>> HillclimbingStat {
241      get { return hillclimbingStat; }
242    }
243    [Storable]
244    private IConfidenceRegressionModel adaptiveWalkPerformanceModel;
245    public IConfidenceRegressionModel AdaptiveWalkPerformanceModel {
246      get { return adaptiveWalkPerformanceModel; }
247    }
248    [Storable]
249    private List<Tuple<double, double>> adaptivewalkingStat;
250    public IEnumerable<Tuple<double, double>> AdaptivewalkingStat {
251      get { return adaptivewalkingStat; }
252    }
253
254    [Storable]
255    public ISolutionModel<TSolution> Model { get; set; }
256
257    [StorableConstructor]
258    protected MemPRPopulationContext(bool deserializing) : base(deserializing) { }
259    protected MemPRPopulationContext(MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext> original, Cloner cloner)
260      : base(original, cloner) {
261      scope = cloner.Clone(original.scope);
262      problem = cloner.Clone(original.problem);
263      initialized = cloner.Clone(original.initialized);
264      iterations = cloner.Clone(original.iterations);
265      evaluatedSolutions = cloner.Clone(original.evaluatedSolutions);
266      bestQuality = cloner.Clone(original.bestQuality);
267      bestSolution = cloner.Clone(original.bestSolution);
268      localSearchEvaluations = cloner.Clone(original.localSearchEvaluations);
269      localOptimaLevel = cloner.Clone(original.localOptimaLevel);
270      byBreeding = cloner.Clone(original.byBreeding);
271      byRelinking = cloner.Clone(original.byRelinking);
272      byDelinking = cloner.Clone(original.byDelinking);
273      bySampling = cloner.Clone(original.bySampling);
274      byHillclimbing = cloner.Clone(original.byHillclimbing);
275      byAdaptivewalking = cloner.Clone(original.byAdaptivewalking);
276      random = cloner.Clone(original.random);
277      breedingPerformanceModel = cloner.Clone(original.breedingPerformanceModel);
278      breedingStat = original.breedingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
279      relinkingPerformanceModel = cloner.Clone(original.relinkingPerformanceModel);
280      relinkingStat = original.relinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
281      delinkingPerformanceModel = cloner.Clone(original.delinkingPerformanceModel);
282      delinkingStat = original.delinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
283      samplingPerformanceModel = cloner.Clone(original.samplingPerformanceModel);
284      samplingStat = original.samplingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
285      hillclimbingPerformanceModel = cloner.Clone(original.hillclimbingPerformanceModel);
286      hillclimbingStat = original.hillclimbingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
287      adaptiveWalkPerformanceModel = cloner.Clone(original.adaptiveWalkPerformanceModel);
288      adaptivewalkingStat = original.adaptivewalkingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
289     
290      Model = cloner.Clone(original.Model);
291    }
292    public MemPRPopulationContext() : this("MemPRContext") { }
293    public MemPRPopulationContext(string name) : base(name) {
294      scope = new Scope("Global");
295
296      Parameters.Add(problem = new ValueParameter<TProblem>("Problem"));
297      Parameters.Add(initialized = new ValueParameter<BoolValue>("Initialized", new BoolValue(false)));
298      Parameters.Add(iterations = new ValueParameter<IntValue>("Iterations", new IntValue(0)));
299      Parameters.Add(evaluatedSolutions = new ValueParameter<IntValue>("EvaluatedSolutions", new IntValue(0)));
300      Parameters.Add(bestQuality = new ValueParameter<DoubleValue>("BestQuality", new DoubleValue(double.NaN)));
301      Parameters.Add(bestSolution = new ValueParameter<TSolution>("BestFoundSolution"));
302      Parameters.Add(localSearchEvaluations = new ValueParameter<IntValue>("LocalSearchEvaluations", new IntValue(0)));
303      Parameters.Add(localOptimaLevel = new ValueParameter<DoubleValue>("LocalOptimaLevel", new DoubleValue(0)));
304      Parameters.Add(byBreeding = new ValueParameter<IntValue>("ByBreeding", new IntValue(0)));
305      Parameters.Add(byRelinking = new ValueParameter<IntValue>("ByRelinking", new IntValue(0)));
306      Parameters.Add(byDelinking = new ValueParameter<IntValue>("ByDelinking", new IntValue(0)));
307      Parameters.Add(bySampling = new ValueParameter<IntValue>("BySampling", new IntValue(0)));
308      Parameters.Add(byHillclimbing = new ValueParameter<IntValue>("ByHillclimbing", new IntValue(0)));
309      Parameters.Add(byAdaptivewalking = new ValueParameter<IntValue>("ByAdaptivewalking", new IntValue(0)));
310      Parameters.Add(random = new ValueParameter<IRandom>("Random", new MersenneTwister()));
311
312      breedingStat = new List<Tuple<double, double, double, double>>();
313      relinkingStat = new List<Tuple<double, double, double, double>>();
314      delinkingStat = new List<Tuple<double, double, double, double>>();
315      samplingStat = new List<Tuple<double, double>>();
316      hillclimbingStat = new List<Tuple<double, double>>();
317      adaptivewalkingStat = new List<Tuple<double, double>>();
318    }
319
320    public abstract ISingleObjectiveSolutionScope<TSolution> ToScope(TSolution code, double fitness = double.NaN);
321
322    public virtual double Evaluate(TSolution solution, CancellationToken token) {
323      var solScope = ToScope(solution);
324      Evaluate(solScope, token);
325      return solScope.Fitness;
326    }
327
328    public virtual void Evaluate(ISingleObjectiveSolutionScope<TSolution> solScope, CancellationToken token) {
329      var pdef = Problem as ISingleObjectiveProblemDefinition;
330      if (pdef != null) {
331        var ind = new SingleEncodingIndividual(pdef.Encoding, solScope);
332        solScope.Fitness = pdef.Evaluate(ind, Random);
333      } else {
334        RunOperator(Problem.Evaluator, solScope, token);
335      }
336    }
337
338    public abstract TSolutionContext CreateSingleSolutionContext(ISingleObjectiveSolutionScope<TSolution> solution);
339
340    public void IncrementEvaluatedSolutions(int byEvaluations) {
341      if (byEvaluations < 0) throw new ArgumentException("Can only increment and not decrement evaluated solutions.");
342      EvaluatedSolutions += byEvaluations;
343    }
344
345    public void RelearnBreedingPerformanceModel() {
346      breedingPerformanceModel = RunRegression(PrepareRegression(ToListRow(breedingStat)), breedingPerformanceModel).Model;
347    }
348    public bool BreedingSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
349      if (breedingPerformanceModel == null) return true;
350      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
351      foreach (var d in BreedingStat) {
352        if (d.Item1 < minI1) minI1 = d.Item1;
353        if (d.Item1 > maxI1) maxI1 = d.Item1;
354        if (d.Item2 < minI2) minI2 = d.Item2;
355        if (d.Item2 > maxI2) maxI2 = d.Item2;
356      }
357      if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
358        return true;
359     
360      return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { p1.Fitness, p2.Fitness, dist }, breedingPerformanceModel);
361    }
362
363    public void RelearnRelinkingPerformanceModel() {
364      relinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(relinkingStat)), relinkingPerformanceModel).Model;
365    }
366    public bool RelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
367      if (relinkingPerformanceModel == null) return true;
368      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
369      foreach (var d in RelinkingStat) {
370        if (d.Item1 < minI1) minI1 = d.Item1;
371        if (d.Item1 > maxI1) maxI1 = d.Item1;
372        if (d.Item2 < minI2) minI2 = d.Item2;
373        if (d.Item2 > maxI2) maxI2 = d.Item2;
374      }
375      if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
376        return true;
377
378      if (IsBetter(p1, p2)) {
379        return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel);
380      }
381      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel);
382    }
383
384    public void RelearnDelinkingPerformanceModel() {
385      delinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(delinkingStat)), delinkingPerformanceModel).Model;
386    }
387    public bool DelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
388      if (delinkingPerformanceModel == null) return true;
389      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
390      foreach (var d in DelinkingStat) {
391        if (d.Item1 < minI1) minI1 = d.Item1;
392        if (d.Item1 > maxI1) maxI1 = d.Item1;
393        if (d.Item2 < minI2) minI2 = d.Item2;
394        if (d.Item2 > maxI2) maxI2 = d.Item2;
395      }
396      if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
397        return true;
398      if (IsBetter(p1, p2)) {
399        return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel);
400      }
401      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel);
402    }
403
404    public void RelearnSamplingPerformanceModel() {
405      samplingPerformanceModel = RunRegression(PrepareRegression(ToListRow(samplingStat)), samplingPerformanceModel).Model;
406    }
407    public bool SamplingSuited(double avgDist) {
408      if (samplingPerformanceModel == null) return true;
409      if (avgDist < samplingStat.Min(x => x.Item1) || avgDist > samplingStat.Max(x => x.Item1)) return true;
410      return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { avgDist }, samplingPerformanceModel);
411    }
412
413    public void RelearnHillclimbingPerformanceModel() {
414      hillclimbingPerformanceModel = RunRegression(PrepareRegression(ToListRow(hillclimbingStat)), hillclimbingPerformanceModel).Model;
415    }
416    public bool HillclimbingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {
417      return HillclimbingSuited(scope.Fitness);
418    }
419    public bool HillclimbingSuited(double startingFitness) {
420      if (hillclimbingPerformanceModel == null) return true;
421      if (startingFitness < HillclimbingStat.Min(x => x.Item1) || startingFitness > HillclimbingStat.Max(x => x.Item1))
422        return true;
423      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, hillclimbingPerformanceModel);
424    }
425
426    public void RelearnAdaptiveWalkPerformanceModel() {
427      adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(ToListRow(adaptivewalkingStat)), adaptiveWalkPerformanceModel).Model;
428    }
429    public bool AdaptivewalkingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {
430      return AdaptivewalkingSuited(scope.Fitness);
431    }
432    public bool AdaptivewalkingSuited(double startingFitness) {
433      if (adaptiveWalkPerformanceModel == null) return true;
434      if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1))
435        return true;
436      return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { startingFitness }, adaptiveWalkPerformanceModel);
437    }
438
439    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double>> data) {
440      return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
441    }
442    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double>> data) {
443      return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
444    }
445    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double, double>> data) {
446      return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
447    }
448
449    protected RegressionProblemData PrepareRegression(List<List<double>> sample) {
450      var columns = sample.First().Select(y => new List<double>()).ToList();
451      foreach (var next in sample.Shuffle(Random)) {
452        for (var i = 0; i < next.Count; i++) {
453          columns[i].Add(next[i]);
454        }
455      }
456      var ds = new Dataset(columns.Select((v, i) => i < columns.Count - 1 ? "in" + i : "out").ToList(), columns);
457      var regPrb = new RegressionProblemData(ds, Enumerable.Range(0, columns.Count - 1).Select(x => "in" + x), "out") {
458        TrainingPartition = { Start = 0, End = Math.Min(50, sample.Count) },
459        TestPartition = { Start = Math.Min(50, sample.Count), End = sample.Count }
460      };
461      return regPrb;
462    }
463
464    protected static IConfidenceRegressionSolution RunRegression(RegressionProblemData trainingData, IConfidenceRegressionModel baseLineModel = null) {
465      var baseline = baseLineModel != null ? new ConfidenceRegressionSolution(baseLineModel, trainingData) : null;
466      var gpr = new GaussianProcessRegression { Problem = { ProblemData = trainingData } };
467      if (trainingData.InputVariables.CheckedItems.Any(x => alglib.pearsoncorr2(trainingData.Dataset.GetDoubleValues(x.Value.Value).ToArray(), trainingData.TargetVariableValues.ToArray()) > 0.8)) {
468        gpr.MeanFunction = new MeanZero();
469        var cov1 = new CovarianceSum();
470        cov1.Terms.Add(new CovarianceLinearArd());
471        cov1.Terms.Add(new CovarianceConst());
472        gpr.CovarianceFunction = cov1;
473      }
474      IConfidenceRegressionSolution solution = null;
475      var cnt = 0;
476      do {
477        ExecuteAlgorithm(gpr);
478        solution = (IConfidenceRegressionSolution)gpr.Results["Solution"].Value;
479        cnt++;
480      } while (cnt < 10 && (solution == null || solution.TrainingRSquared.IsAlmost(0)));
481      if (baseline == null) return solution;
482      if (trainingData.Dataset.Rows < 60)
483        return solution.TrainingMeanAbsoluteError < baseline.TrainingMeanAbsoluteError ? solution : baseline;
484      return solution.TestMeanAbsoluteError < baseline.TestMeanAbsoluteError ? solution : baseline;
485    }
486
487    protected static void ExecuteAlgorithm(IAlgorithm algorithm) {
488      using (var evt = new AutoResetEvent(false)) {
489        EventHandler exeStateChanged = (o, args) => {
490          if (algorithm.ExecutionState == ExecutionState.Paused || algorithm.ExecutionState == ExecutionState.Stopped)
491            evt.Set();
492        };
493        algorithm.ExecutionStateChanged += exeStateChanged;
494        algorithm.Prepare(true);
495        algorithm.Start();
496        evt.WaitOne();
497        algorithm.ExecutionStateChanged -= exeStateChanged;
498      }
499    }
500
501    private double ProbabilityAcceptAbsolutePerformanceModel(List<double> inputs, IConfidenceRegressionModel model) {
502      var inputVariables = inputs.Select((v, i) => "in" + i);
503      var ds = new Dataset(inputVariables.Concat( new [] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new [] { new List<double> { double.NaN } }));
504      var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
505      var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
506
507      // calculate the fitness goal
508      var goal = Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness);
509      var z = (goal - mean) / sdev;
510      // return the probability of achieving or surpassing that goal
511      var y = alglib.invnormaldistribution(z);
512      return Maximization ? 1.0 - y /* P(X >= z) */ : y; // P(X <= z)
513    }
514
515    private double ProbabilityAcceptRelativePerformanceModel(double basePerformance, List<double> inputs, IConfidenceRegressionModel model) {
516      var inputVariables = inputs.Select((v, i) => "in" + i);
517      var ds = new Dataset(inputVariables.Concat(new[] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new[] { new List<double> { double.NaN } }));
518      var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
519      var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
520
521      // calculate the improvement goal
522      var goal = Maximization ? Population.Min(x => x.Fitness) - basePerformance : basePerformance - Population.Max(x => x.Fitness);
523      var z = (goal - mean) / sdev;
524      // return the probability of achieving or surpassing that goal
525      return 1.0 - alglib.invnormaldistribution(z); /* P(X >= z) */
526    }
527
528    private static List<List<double>> ToListRow(List<Tuple<double, double>> rows) {
529      return rows.Select(x => new List<double> { x.Item1, x.Item2 }).ToList();
530    }
531    private static List<List<double>> ToListRow(List<Tuple<double, double, double>> rows) {
532      return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3 }).ToList();
533    }
534    private static List<List<double>> ToListRow(List<Tuple<double, double, double, double>> rows) {
535      return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3, x.Item4 }).ToList();
536    }
537
538    [MethodImpl(MethodImplOptions.AggressiveInlining)]
539    public bool IsBetter(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b) {
540      return IsBetter(a.Fitness, b.Fitness);
541    }
542    [MethodImpl(MethodImplOptions.AggressiveInlining)]
543    public bool IsBetter(double a, double b) {
544      return double.IsNaN(b) && !double.IsNaN(a)
545        || Maximization && a > b
546        || !Maximization && a < b;
547    }
548
549    public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
550      if (IsBetter(a, b))
551        breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness));
552      else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness));
553      if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel();
554    }
555
556    public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
557      if (IsBetter(a, b))
558        relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
559      else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
560      if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel();
561    }
562
563    public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
564      if (IsBetter(a, b))
565        delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
566      else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
567      if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel();
568    }
569
570    public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) {
571      samplingStat.Add(Tuple.Create(avgDist, sample.Fitness));
572      if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel();
573    }
574
575    public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
576      hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
577      if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel();
578    }
579
580    public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
581      adaptivewalkingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness));
582      if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel();
583    }
584
585    #region IExecutionContext members
586    public IAtomicOperation CreateOperation(IOperator op) {
587      return new ExecutionContext(this, op, Scope);
588    }
589
590    public IAtomicOperation CreateOperation(IOperator op, IScope s) {
591      return new ExecutionContext(this, op, s);
592    }
593
594    public IAtomicOperation CreateChildOperation(IOperator op) {
595      return new ExecutionContext(this, op, Scope);
596    }
597
598    public IAtomicOperation CreateChildOperation(IOperator op, IScope s) {
599      return new ExecutionContext(this, op, s);
600    }
601    #endregion
602
603    #region Math Helper
604    // normal distribution CDF (left of x) for N(0;1) standard normal distribution
605    // from http://www.johndcook.com/blog/csharp_phi/
606    // license: "This code is in the public domain. Do whatever you want with it, no strings attached."
607    // added: 2016-11-19 21:46 CET
608    /*protected static double Phi(double x) {
609      // constants
610      double a1 = 0.254829592;
611      double a2 = -0.284496736;
612      double a3 = 1.421413741;
613      double a4 = -1.453152027;
614      double a5 = 1.061405429;
615      double p = 0.3275911;
616
617      // Save the sign of x
618      int sign = 1;
619      if (x < 0)
620        sign = -1;
621      x = Math.Abs(x) / Math.Sqrt(2.0);
622
623      // A&S formula 7.1.26
624      double t = 1.0 / (1.0 + p * x);
625      double y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.Exp(-x * x);
626
627      return 0.5 * (1.0 + sign * y);
628    }*/
629    #endregion
630
631    #region Engine Helper
632    public void RunOperator(IOperator op, IScope scope, CancellationToken cancellationToken) {
633      var stack = new Stack<IOperation>();
634      stack.Push(CreateChildOperation(op, scope));
635
636      while (stack.Count > 0) {
637        cancellationToken.ThrowIfCancellationRequested();
638
639        var next = stack.Pop();
640        if (next is OperationCollection) {
641          var coll = (OperationCollection)next;
642          for (int i = coll.Count - 1; i >= 0; i--)
643            if (coll[i] != null) stack.Push(coll[i]);
644        } else if (next is IAtomicOperation) {
645          var operation = (IAtomicOperation)next;
646          try {
647            next = operation.Operator.Execute((IExecutionContext)operation, cancellationToken);
648          } catch (Exception ex) {
649            stack.Push(operation);
650            if (ex is OperationCanceledException) throw ex;
651            else throw new OperatorExecutionException(operation.Operator, ex);
652          }
653          if (next != null) stack.Push(next);
654        }
655      }
656    }
657    #endregion
658  }
659
660  [Item("SingleSolutionMemPRContext", "Abstract base class for single solution MemPR contexts.")]
661  [StorableClass]
662  public abstract class MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> : ParameterizedNamedItem,
663    ISingleSolutionHeuristicAlgorithmContext<TProblem, TSolution>, IEvaluationServiceContext<TSolution>
664      where TProblem : class, IItem, ISingleObjectiveHeuristicOptimizationProblem
665      where TSolution : class, IItem
666      where TContext : MemPRPopulationContext<TProblem, TSolution, TContext, TSolutionContext>
667      where TSolutionContext : MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> {
668
669    private TContext parent;
670    public IExecutionContext Parent {
671      get { return parent; }
672      set { throw new InvalidOperationException("Cannot set the parent of a single-solution context."); }
673    }
674
675    [Storable]
676    private ISingleObjectiveSolutionScope<TSolution> scope;
677    public IScope Scope {
678      get { return scope; }
679    }
680
681    IKeyedItemCollection<string, IParameter> IExecutionContext.Parameters {
682      get { return Parameters; }
683    }
684
685    public TProblem Problem {
686      get { return parent.Problem; }
687    }
688    public bool Maximization {
689      get { return parent.Maximization; }
690    }
691
692    public double BestQuality {
693      get { return parent.BestQuality; }
694      set { parent.BestQuality = value; }
695    }
696
697    public TSolution BestSolution {
698      get { return parent.BestSolution; }
699      set { parent.BestSolution = value; }
700    }
701
702    public IRandom Random {
703      get { return parent.Random; }
704    }
705
706    [Storable]
707    private IValueParameter<IntValue> evaluatedSolutions;
708    public int EvaluatedSolutions {
709      get { return evaluatedSolutions.Value.Value; }
710      set { evaluatedSolutions.Value.Value = value; }
711    }
712
713    [Storable]
714    private IValueParameter<IntValue> iterations;
715    public int Iterations {
716      get { return iterations.Value.Value; }
717      set { iterations.Value.Value = value; }
718    }
719
720    ISingleObjectiveSolutionScope<TSolution> ISingleSolutionHeuristicAlgorithmContext<TProblem, TSolution>.Solution {
721      get { return scope; }
722    }
723
724    [StorableConstructor]
725    protected MemPRSolutionContext(bool deserializing) : base(deserializing) { }
726    protected MemPRSolutionContext(MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> original, Cloner cloner)
727      : base(original, cloner) {
728      scope = cloner.Clone(original.scope);
729      evaluatedSolutions = cloner.Clone(original.evaluatedSolutions);
730      iterations = cloner.Clone(original.iterations);
731    }
732    public MemPRSolutionContext(TContext baseContext, ISingleObjectiveSolutionScope<TSolution> solution) {
733      parent = baseContext;
734      scope = solution;
735     
736      Parameters.Add(evaluatedSolutions = new ValueParameter<IntValue>("EvaluatedSolutions", new IntValue(0)));
737      Parameters.Add(iterations = new ValueParameter<IntValue>("Iterations", new IntValue(0)));
738    }
739
740    public void IncrementEvaluatedSolutions(int byEvaluations) {
741      if (byEvaluations < 0) throw new ArgumentException("Can only increment and not decrement evaluated solutions.");
742      EvaluatedSolutions += byEvaluations;
743    }
744    public virtual double Evaluate(TSolution solution, CancellationToken token) {
745      return parent.Evaluate(solution, token);
746    }
747
748    public virtual void Evaluate(ISingleObjectiveSolutionScope<TSolution> solScope, CancellationToken token) {
749      parent.Evaluate(solScope, token);
750    }
751
752    #region IExecutionContext members
753    public IAtomicOperation CreateOperation(IOperator op) {
754      return new ExecutionContext(this, op, Scope);
755    }
756
757    public IAtomicOperation CreateOperation(IOperator op, IScope s) {
758      return new ExecutionContext(this, op, s);
759    }
760
761    public IAtomicOperation CreateChildOperation(IOperator op) {
762      return new ExecutionContext(this, op, Scope);
763    }
764
765    public IAtomicOperation CreateChildOperation(IOperator op, IScope s) {
766      return new ExecutionContext(this, op, s);
767    }
768    #endregion
769  }
770}
Note: See TracBrowser for help on using the repository browser.