Free cookie consent management tool by TermsFeed Policy Generator

source: branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs @ 14551

Last change on this file since 14551 was 14550, checked in by abeham, 7 years ago

#2701:

  • Added BinaryVectorEqualityComparer (identical to the one in the P3 plugin) to binaryvector plugin
  • Added delinking for absolute coded permutations
  • Some tweaks
File size: 31.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Runtime.CompilerServices;
26using System.Threading;
27using HeuristicLab.Algorithms.DataAnalysis;
28using HeuristicLab.Algorithms.MemPR.Interfaces;
29using HeuristicLab.Common;
30using HeuristicLab.Core;
31using HeuristicLab.Data;
32using HeuristicLab.Optimization;
33using HeuristicLab.Parameters;
34using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
35using HeuristicLab.Problems.DataAnalysis;
36using HeuristicLab.Random;
37using ExecutionContext = HeuristicLab.Core.ExecutionContext;
38
39namespace HeuristicLab.Algorithms.MemPR {
40  [Item("MemPRContext", "Abstract base class for MemPR contexts.")]
41  [StorableClass]
42  public abstract class MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext> : ParameterizedNamedItem,
43    IPopulationBasedHeuristicAlgorithmContext<TProblem, TSolution>, ISolutionModelContext<TSolution>
44      where TProblem : class, IItem, ISingleObjectiveProblemDefinition
45      where TSolution : class, IItem
46      where TPopulationContext : MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext>
47      where TSolutionContext : MemPRSolutionContext<TProblem, TSolution, TPopulationContext, TSolutionContext> {
48
49    private IExecutionContext parent;
50    public IExecutionContext Parent {
51      get { return parent; }
52      set { parent = value; }
53    }
54
55    [Storable]
56    private IScope scope;
57    public IScope Scope {
58      get { return scope; }
59      private set { scope = value; }
60    }
61
62    IKeyedItemCollection<string, IParameter> IExecutionContext.Parameters {
63      get { return Parameters; }
64    }
65
66    [Storable]
67    private IValueParameter<TProblem> problem;
68    public TProblem Problem {
69      get { return problem.Value; }
70      set { problem.Value = value; }
71    }
72
73    [Storable]
74    private IValueParameter<BoolValue> initialized;
75    public bool Initialized {
76      get { return initialized.Value.Value; }
77      set { initialized.Value.Value = value; }
78    }
79
80    [Storable]
81    private IValueParameter<IntValue> iterations;
82    public int Iterations {
83      get { return iterations.Value.Value; }
84      set { iterations.Value.Value = value; }
85    }
86
87    [Storable]
88    private IValueParameter<IntValue> evaluatedSolutions;
89    public int EvaluatedSolutions {
90      get { return evaluatedSolutions.Value.Value; }
91      set { evaluatedSolutions.Value.Value = value; }
92    }
93
94    [Storable]
95    private IValueParameter<DoubleValue> bestQuality;
96    public double BestQuality {
97      get { return bestQuality.Value.Value; }
98      set { bestQuality.Value.Value = value; }
99    }
100
101    [Storable]
102    private IValueParameter<TSolution> bestSolution;
103    public TSolution BestSolution {
104      get { return bestSolution.Value; }
105      set { bestSolution.Value = value; }
106    }
107
108    [Storable]
109    private IValueParameter<IntValue> localSearchEvaluations;
110    public int LocalSearchEvaluations {
111      get { return localSearchEvaluations.Value.Value; }
112      set { localSearchEvaluations.Value.Value = value; }
113    }
114
115    [Storable]
116    private IValueParameter<DoubleValue> localOptimaLevel;
117    public double LocalOptimaLevel {
118      get { return localOptimaLevel.Value.Value; }
119      set { localOptimaLevel.Value.Value = value; }
120    }
121
122    [Storable]
123    private IValueParameter<IntValue> byBreeding;
124    public int ByBreeding {
125      get { return byBreeding.Value.Value; }
126      set { byBreeding.Value.Value = value; }
127    }
128
129    [Storable]
130    private IValueParameter<IntValue> byRelinking;
131    public int ByRelinking {
132      get { return byRelinking.Value.Value; }
133      set { byRelinking.Value.Value = value; }
134    }
135
136    [Storable]
137    private IValueParameter<IntValue> byDelinking;
138    public int ByDelinking {
139      get { return byDelinking.Value.Value; }
140      set { byDelinking.Value.Value = value; }
141    }
142
143    [Storable]
144    private IValueParameter<IntValue> bySampling;
145    public int BySampling {
146      get { return bySampling.Value.Value; }
147      set { bySampling.Value.Value = value; }
148    }
149
150    [Storable]
151    private IValueParameter<IntValue> byHillclimbing;
152    public int ByHillclimbing {
153      get { return byHillclimbing.Value.Value; }
154      set { byHillclimbing.Value.Value = value; }
155    }
156
157    [Storable]
158    private IValueParameter<IntValue> byAdaptivewalking;
159    public int ByAdaptivewalking {
160      get { return byAdaptivewalking.Value.Value; }
161      set { byAdaptivewalking.Value.Value = value; }
162    }
163
164    [Storable]
165    private IValueParameter<IRandom> random;
166    public IRandom Random {
167      get { return random.Value; }
168      set { random.Value = value; }
169    }
170   
171    public IEnumerable<ISingleObjectiveSolutionScope<TSolution>> Population {
172      get { return scope.SubScopes.OfType<ISingleObjectiveSolutionScope<TSolution>>(); }
173    }
174    public void AddToPopulation(ISingleObjectiveSolutionScope<TSolution> solScope) {
175      scope.SubScopes.Add(solScope);
176    }
177    public void ReplaceAtPopulation(int index, ISingleObjectiveSolutionScope<TSolution> solScope) {
178      scope.SubScopes[index] = solScope;
179    }
180    public ISingleObjectiveSolutionScope<TSolution> AtPopulation(int index) {
181      return scope.SubScopes[index] as ISingleObjectiveSolutionScope<TSolution>;
182    }
183    public void SortPopulation() {
184      scope.SubScopes.Replace(scope.SubScopes.OfType<ISingleObjectiveSolutionScope<TSolution>>().OrderBy(x => Problem.Maximization ? -x.Fitness : x.Fitness).ToList());
185    }
186    public int PopulationCount {
187      get { return scope.SubScopes.Count; }
188    }
189
190    [Storable]
191    private IConfidenceRegressionModel breedingPerformanceModel;
192    public IConfidenceRegressionModel BreedingPerformanceModel {
193      get { return breedingPerformanceModel; }
194    }
195    [Storable]
196    private List<Tuple<double, double, double>> breedingStat;
197    public List<Tuple<double, double, double>> BreedingStat {
198      get { return breedingStat; }
199    }
200    [Storable]
201    private IConfidenceRegressionModel relinkingPerformanceModel;
202    public IConfidenceRegressionModel RelinkingPerformanceModel {
203      get { return relinkingPerformanceModel; }
204    }
205    [Storable]
206    private List<Tuple<double, double, double>> relinkingStat;
207    public List<Tuple<double, double, double>> RelinkingStat {
208      get { return relinkingStat; }
209    }
210    [Storable]
211    private IConfidenceRegressionModel delinkingPerformanceModel;
212    public IConfidenceRegressionModel DelinkingPerformanceModel {
213      get { return delinkingPerformanceModel; }
214    }
215    [Storable]
216    private List<Tuple<double, double, double>> delinkingStat;
217    public List<Tuple<double, double, double>> DelinkingStat {
218      get { return delinkingStat; }
219    }
220    [Storable]
221    private IConfidenceRegressionModel samplingPerformanceModel;
222    public IConfidenceRegressionModel SamplingPerformanceModel {
223      get { return samplingPerformanceModel; }
224    }
225    [Storable]
226    private List<Tuple<double, double>> samplingStat;
227    public List<Tuple<double, double>> SamplingStat {
228      get { return samplingStat; }
229    }
230    [Storable]
231    private IConfidenceRegressionModel hillclimbingPerformanceModel;
232    public IConfidenceRegressionModel HillclimbingPerformanceModel {
233      get { return hillclimbingPerformanceModel; }
234    }
235    [Storable]
236    private List<Tuple<double, double>> hillclimbingStat;
237    public List<Tuple<double, double>> HillclimbingStat {
238      get { return hillclimbingStat; }
239    }
240    [Storable]
241    private IConfidenceRegressionModel adaptiveWalkPerformanceModel;
242    public IConfidenceRegressionModel AdaptiveWalkPerformanceModel {
243      get { return adaptiveWalkPerformanceModel; }
244    }
245    [Storable]
246    private List<Tuple<double, double>> adaptivewalkingStat;
247    public List<Tuple<double, double>> AdaptivewalkingStat {
248      get { return adaptivewalkingStat; }
249    }
250
251    [Storable]
252    public ISolutionModel<TSolution> Model { get; set; }
253
254    [StorableConstructor]
255    protected MemPRPopulationContext(bool deserializing) : base(deserializing) { }
256    protected MemPRPopulationContext(MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext> original, Cloner cloner)
257      : base(original, cloner) {
258      scope = cloner.Clone(original.scope);
259      problem = cloner.Clone(original.problem);
260      initialized = cloner.Clone(original.initialized);
261      iterations = cloner.Clone(original.iterations);
262      evaluatedSolutions = cloner.Clone(original.evaluatedSolutions);
263      bestQuality = cloner.Clone(original.bestQuality);
264      bestSolution = cloner.Clone(original.bestSolution);
265      localSearchEvaluations = cloner.Clone(original.localSearchEvaluations);
266      localOptimaLevel = cloner.Clone(original.localOptimaLevel);
267      byBreeding = cloner.Clone(original.byBreeding);
268      byRelinking = cloner.Clone(original.byRelinking);
269      byDelinking = cloner.Clone(original.byDelinking);
270      bySampling = cloner.Clone(original.bySampling);
271      byHillclimbing = cloner.Clone(original.byHillclimbing);
272      byAdaptivewalking = cloner.Clone(original.byAdaptivewalking);
273      random = cloner.Clone(original.random);
274      breedingPerformanceModel = cloner.Clone(original.breedingPerformanceModel);
275      breedingStat = original.breedingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3)).ToList();
276      relinkingPerformanceModel = cloner.Clone(original.relinkingPerformanceModel);
277      relinkingStat = original.relinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3)).ToList();
278      delinkingPerformanceModel = cloner.Clone(original.delinkingPerformanceModel);
279      delinkingStat = original.delinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3)).ToList();
280      samplingPerformanceModel = cloner.Clone(original.samplingPerformanceModel);
281      samplingStat = original.samplingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
282      hillclimbingPerformanceModel = cloner.Clone(original.hillclimbingPerformanceModel);
283      hillclimbingStat = original.hillclimbingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
284      adaptiveWalkPerformanceModel = cloner.Clone(original.adaptiveWalkPerformanceModel);
285      adaptivewalkingStat = original.adaptivewalkingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
286     
287      Model = cloner.Clone(original.Model);
288    }
289    public MemPRPopulationContext() : this("MemPRContext") { }
290    public MemPRPopulationContext(string name) : base(name) {
291      scope = new Scope("Global");
292
293      Parameters.Add(problem = new ValueParameter<TProblem>("Problem"));
294      Parameters.Add(initialized = new ValueParameter<BoolValue>("Initialized", new BoolValue(false)));
295      Parameters.Add(iterations = new ValueParameter<IntValue>("Iterations", new IntValue(0)));
296      Parameters.Add(evaluatedSolutions = new ValueParameter<IntValue>("EvaluatedSolutions", new IntValue(0)));
297      Parameters.Add(bestQuality = new ValueParameter<DoubleValue>("BestQuality", new DoubleValue(double.NaN)));
298      Parameters.Add(bestSolution = new ValueParameter<TSolution>("BestSolution"));
299      Parameters.Add(localSearchEvaluations = new ValueParameter<IntValue>("LocalSearchEvaluations", new IntValue(0)));
300      Parameters.Add(localOptimaLevel = new ValueParameter<DoubleValue>("LocalOptimaLevel", new DoubleValue(0)));
301      Parameters.Add(byBreeding = new ValueParameter<IntValue>("ByBreeding", new IntValue(0)));
302      Parameters.Add(byRelinking = new ValueParameter<IntValue>("ByRelinking", new IntValue(0)));
303      Parameters.Add(byDelinking = new ValueParameter<IntValue>("ByDelinking", new IntValue(0)));
304      Parameters.Add(bySampling = new ValueParameter<IntValue>("BySampling", new IntValue(0)));
305      Parameters.Add(byHillclimbing = new ValueParameter<IntValue>("ByHillclimbing", new IntValue(0)));
306      Parameters.Add(byAdaptivewalking = new ValueParameter<IntValue>("ByAdaptivewalking", new IntValue(0)));
307      Parameters.Add(random = new ValueParameter<IRandom>("Random", new MersenneTwister()));
308
309      breedingStat = new List<Tuple<double, double, double>>();
310      relinkingStat = new List<Tuple<double, double, double>>();
311      delinkingStat = new List<Tuple<double, double, double>>();
312      samplingStat = new List<Tuple<double, double>>();
313      hillclimbingStat = new List<Tuple<double, double>>();
314      adaptivewalkingStat = new List<Tuple<double, double>>();
315    }
316
317    public abstract TSolutionContext CreateSingleSolutionContext(ISingleObjectiveSolutionScope<TSolution> solution);
318
319    public void IncrementEvaluatedSolutions(int byEvaluations) {
320      if (byEvaluations < 0) throw new ArgumentException("Can only increment and not decrement evaluated solutions.");
321      EvaluatedSolutions += byEvaluations;
322    }
323
324    public void RelearnBreedingPerformanceModel() {
325      breedingPerformanceModel = RunRegression(PrepareRegression(BreedingStat), breedingPerformanceModel).Model;
326    }
327    public bool BreedingSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2) {
328      if (breedingPerformanceModel == null) return true;
329      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
330      foreach (var d in BreedingStat) {
331        if (d.Item1 < minI1) minI1 = d.Item1;
332        if (d.Item1 > maxI1) maxI1 = d.Item1;
333        if (d.Item2 < minI2) minI2 = d.Item2;
334        if (d.Item2 > maxI2) maxI2 = d.Item2;
335      }
336      if (IsBetter(p1, p2)) {
337        if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
338          return true;
339        return Random.NextDouble() < ProbabilityAccept3dModel(p1.Fitness, p2.Fitness, breedingPerformanceModel);
340      }
341      if (p1.Fitness < minI2 || p1.Fitness > maxI2 || p2.Fitness < minI1 || p2.Fitness > maxI1)
342        return true;
343      return Random.NextDouble() < ProbabilityAccept3dModel(p2.Fitness, p1.Fitness, breedingPerformanceModel);
344    }
345
346    public void RelearnRelinkingPerformanceModel() {
347      relinkingPerformanceModel = RunRegression(PrepareRegression(RelinkingStat), relinkingPerformanceModel).Model;
348    }
349    public bool RelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2) {
350      if (relinkingPerformanceModel == null) return true;
351      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
352      foreach (var d in RelinkingStat) {
353        if (d.Item1 < minI1) minI1 = d.Item1;
354        if (d.Item1 > maxI1) maxI1 = d.Item1;
355        if (d.Item2 < minI2) minI2 = d.Item2;
356        if (d.Item2 > maxI2) maxI2 = d.Item2;
357      }
358      if (IsBetter(p1, p2)) {
359        if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
360          return true;
361        return Random.NextDouble() < ProbabilityAccept3dModel(p1.Fitness, p2.Fitness, relinkingPerformanceModel);
362      }
363      if (p1.Fitness < minI2 || p1.Fitness > maxI2 || p2.Fitness < minI1 || p2.Fitness > maxI1)
364        return true;
365      return Random.NextDouble() < ProbabilityAccept3dModel(p2.Fitness, p1.Fitness, relinkingPerformanceModel);
366    }
367
368    public void RelearnDelinkingPerformanceModel() {
369      delinkingPerformanceModel = RunRegression(PrepareRegression(DelinkingStat), delinkingPerformanceModel).Model;
370    }
371    public bool DelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2) {
372      if (delinkingPerformanceModel == null) return true;
373      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
374      foreach (var d in DelinkingStat) {
375        if (d.Item1 < minI1) minI1 = d.Item1;
376        if (d.Item1 > maxI1) maxI1 = d.Item1;
377        if (d.Item2 < minI2) minI2 = d.Item2;
378        if (d.Item2 > maxI2) maxI2 = d.Item2;
379      }
380      if (IsBetter(p1, p2)) {
381        if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
382          return true;
383        return Random.NextDouble() < ProbabilityAccept3dModel(p1.Fitness, p2.Fitness, delinkingPerformanceModel);
384      }
385      if (p1.Fitness < minI2 || p1.Fitness > maxI2 || p2.Fitness < minI1 || p2.Fitness > maxI1)
386        return true;
387      return Random.NextDouble() < ProbabilityAccept3dModel(p2.Fitness, p1.Fitness, delinkingPerformanceModel);
388    }
389
390    public void RelearnSamplingPerformanceModel() {
391      samplingPerformanceModel = RunRegression(PrepareRegression(SamplingStat), samplingPerformanceModel).Model;
392    }
393    public bool SamplingSuited() {
394      if (samplingPerformanceModel == null) return true;
395      return Random.NextDouble() < ProbabilityAccept2dModel(Population.Average(x => x.Fitness), samplingPerformanceModel);
396    }
397
398    public void RelearnHillclimbingPerformanceModel() {
399      hillclimbingPerformanceModel = RunRegression(PrepareRegression(HillclimbingStat), hillclimbingPerformanceModel).Model;
400    }
401    public bool HillclimbingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {
402      if (hillclimbingPerformanceModel == null) return true;
403      if (scope.Fitness < HillclimbingStat.Min(x => x.Item1) || scope.Fitness > HillclimbingStat.Max(x => x.Item1))
404        return true;
405      return Random.NextDouble() < ProbabilityAccept2dModel(scope.Fitness, hillclimbingPerformanceModel);
406    }
407    public bool HillclimbingSuited(double startingFitness) {
408      if (hillclimbingPerformanceModel == null) return true;
409      if (startingFitness < HillclimbingStat.Min(x => x.Item1) || startingFitness > HillclimbingStat.Max(x => x.Item1))
410        return true;
411      return Random.NextDouble() < ProbabilityAccept2dModel(startingFitness, hillclimbingPerformanceModel);
412    }
413
414    public void RelearnAdaptiveWalkPerformanceModel() {
415      adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(AdaptivewalkingStat), adaptiveWalkPerformanceModel).Model;
416    }
417    public bool AdaptivewalkingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {
418      if (adaptiveWalkPerformanceModel == null) return true;
419      if (scope.Fitness < AdaptivewalkingStat.Min(x => x.Item1) || scope.Fitness > AdaptivewalkingStat.Max(x => x.Item1))
420        return true;
421      return Random.NextDouble() < ProbabilityAccept2dModel(scope.Fitness, adaptiveWalkPerformanceModel);
422    }
423    public bool AdaptivewalkingSuited(double startingFitness) {
424      if (adaptiveWalkPerformanceModel == null) return true;
425      if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1))
426        return true;
427      return Random.NextDouble() < ProbabilityAccept2dModel(startingFitness, adaptiveWalkPerformanceModel);
428    }
429
430    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, List<Tuple<double, double>> data) {
431      return new ConfidenceRegressionSolution(model, PrepareRegression(data));
432    }
433    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, List<Tuple<double, double, double>> data) {
434      return new ConfidenceRegressionSolution(model, PrepareRegression(data));
435    }
436
437    protected RegressionProblemData PrepareRegression(List<Tuple<double, double>> sample) {
438      var inCol = new List<double>();
439      var outCol = new List<double>();
440      foreach (var next in sample.Shuffle(Random)) {
441        inCol.Add(next.Item1);
442        outCol.Add(next.Item2);
443      }
444      var ds = new Dataset(new[] { "in", "out" }, new[] { inCol, outCol });
445      var regPrb = new RegressionProblemData(ds, new[] { "in" }, "out") {
446        TrainingPartition = { Start = 0, End = Math.Min(50, sample.Count) },
447        TestPartition = { Start = Math.Min(50, sample.Count), End = sample.Count }
448      };
449      return regPrb;
450    }
451
452    protected RegressionProblemData PrepareRegression(List<Tuple<double, double, double>> sample) {
453      var in1Col = new List<double>();
454      var in2Col = new List<double>();
455      var outCol = new List<double>();
456      foreach (var next in sample.Shuffle(Random)) {
457        in1Col.Add(next.Item1);
458        in2Col.Add(next.Item2);
459        outCol.Add(next.Item3);
460      }
461      var ds = new Dataset(new[] { "in1", "in2", "out" }, new[] { in1Col, in2Col, outCol });
462      var regPrb = new RegressionProblemData(ds, new[] { "in1", "in2" }, "out") {
463        TrainingPartition = { Start = 0, End = Math.Min(50, sample.Count) },
464        TestPartition = { Start = Math.Min(50, sample.Count), End = sample.Count }
465      };
466      return regPrb;
467    }
468
469    protected static IConfidenceRegressionSolution RunRegression(RegressionProblemData trainingData, IConfidenceRegressionModel baseLineModel = null) {
470      var baseline = baseLineModel != null ? new ConfidenceRegressionSolution(baseLineModel, trainingData) : null;
471      var gpr = new GaussianProcessRegression { Problem = { ProblemData = trainingData } };
472      if (trainingData.InputVariables.CheckedItems.Any(x => alglib.pearsoncorr2(trainingData.Dataset.GetDoubleValues(x.Value.Value).ToArray(), trainingData.TargetVariableValues.ToArray()) > 0.8)) {
473        gpr.MeanFunction = new MeanZero();
474        var cov1 = new CovarianceSum();
475        cov1.Terms.Add(new CovarianceLinearArd());
476        cov1.Terms.Add(new CovarianceConst());
477        gpr.CovarianceFunction = cov1;
478      }
479      IConfidenceRegressionSolution solution = null;
480      var cnt = 0;
481      do {
482        ExecuteAlgorithm(gpr);
483        solution = (IConfidenceRegressionSolution)gpr.Results["Solution"].Value;
484        cnt++;
485      } while (cnt < 10 && (solution == null || solution.TrainingRSquared.IsAlmost(0)));
486      if (baseline == null) return solution;
487      if (trainingData.Dataset.Rows < 60)
488        return solution.TrainingMeanAbsoluteError < baseline.TrainingMeanAbsoluteError ? solution : baseline;
489      return solution.TestMeanAbsoluteError < baseline.TestMeanAbsoluteError ? solution : baseline;
490    }
491
492    protected static void ExecuteAlgorithm(IAlgorithm algorithm) {
493      using (var evt = new AutoResetEvent(false)) {
494        EventHandler exeStateChanged = (o, args) => {
495          if (algorithm.ExecutionState == ExecutionState.Paused || algorithm.ExecutionState == ExecutionState.Stopped)
496            evt.Set();
497        };
498        algorithm.ExecutionStateChanged += exeStateChanged;
499        algorithm.Prepare(true);
500        algorithm.Start();
501        evt.WaitOne();
502        algorithm.ExecutionStateChanged -= exeStateChanged;
503      }
504    }
505
506    private double ProbabilityAccept2dModel(double a, IConfidenceRegressionModel model) {
507      var ds = new Dataset(new[] { "in", "out" }, new[] { new List<double> { a }, new List<double> { double.NaN } });
508      var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
509      var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
510
511      var goal = Problem.Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness);
512      var z = (goal - mean) / sdev;
513      return Problem.Maximization ? 1.0 - Phi(z) /* P(X >= z) */ : Phi(z); // P(X <= z)
514    }
515
516    private double ProbabilityAccept3dModel(double a, double b, IConfidenceRegressionModel model) {
517      var ds = new Dataset(new[] { "in1", "in2", "out" }, new[] { new List<double> { a }, new List<double> { b }, new List<double> { double.NaN } });
518      var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
519      var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
520
521      var goal = Problem.Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness);
522      var z = (goal - mean) / sdev;
523      return Problem.Maximization ? 1.0 - Phi(z) /* P(X >= z) */ : Phi(z); // P(X <= z)
524    }
525
526    [MethodImpl(MethodImplOptions.AggressiveInlining)]
527    public bool IsBetter(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b) {
528      return IsBetter(a.Fitness, b.Fitness);
529    }
530    [MethodImpl(MethodImplOptions.AggressiveInlining)]
531    public bool IsBetter(double a, double b) {
532      return double.IsNaN(b) && !double.IsNaN(a)
533        || Problem.Maximization && a > b
534        || !Problem.Maximization && a < b;
535    }
536
537    public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, ISingleObjectiveSolutionScope<TSolution> child) {
538      if (IsBetter(a, b))
539        BreedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, child.Fitness));
540      else BreedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, child.Fitness));
541    }
542
543    public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, ISingleObjectiveSolutionScope<TSolution> child) {
544      if (IsBetter(a, b))
545        RelinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, child.Fitness));
546      else RelinkingStat.Add(Tuple.Create(b.Fitness, a.Fitness, child.Fitness));
547    }
548
549    public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, ISingleObjectiveSolutionScope<TSolution> child) {
550      if (IsBetter(a, b))
551        DelinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, child.Fitness));
552      else DelinkingStat.Add(Tuple.Create(b.Fitness, a.Fitness, child.Fitness));
553    }
554
555    public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample) {
556      SamplingStat.Add(Tuple.Create(Population.Average(x => x.Fitness), sample.Fitness));
557    }
558
559    public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
560      HillclimbingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness));
561    }
562
563    public void AddTabuwalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
564      AdaptivewalkingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness));
565    }
566
567    #region IExecutionContext members
568    public IAtomicOperation CreateOperation(IOperator op) {
569      return new ExecutionContext(this, op, Scope);
570    }
571
572    public IAtomicOperation CreateOperation(IOperator op, IScope s) {
573      return new ExecutionContext(this, op, s);
574    }
575
576    public IAtomicOperation CreateChildOperation(IOperator op) {
577      return new ExecutionContext(this, op, Scope);
578    }
579
580    public IAtomicOperation CreateChildOperation(IOperator op, IScope s) {
581      return new ExecutionContext(this, op, s);
582    }
583    #endregion
584
585    #region Math Helper
586    // normal distribution CDF (left of x) for N(0;1) standard normal distribution
587    // from http://www.johndcook.com/blog/csharp_phi/
588    // license: "This code is in the public domain. Do whatever you want with it, no strings attached."
589    // added: 2016-11-19 21:46 CET
590    protected static double Phi(double x) {
591      // constants
592      double a1 = 0.254829592;
593      double a2 = -0.284496736;
594      double a3 = 1.421413741;
595      double a4 = -1.453152027;
596      double a5 = 1.061405429;
597      double p = 0.3275911;
598
599      // Save the sign of x
600      int sign = 1;
601      if (x < 0)
602        sign = -1;
603      x = Math.Abs(x) / Math.Sqrt(2.0);
604
605      // A&S formula 7.1.26
606      double t = 1.0 / (1.0 + p * x);
607      double y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.Exp(-x * x);
608
609      return 0.5 * (1.0 + sign * y);
610    }
611    #endregion
612  }
613
614  [Item("SingleSolutionMemPRContext", "Abstract base class for single solution MemPR contexts.")]
615  [StorableClass]
616  public abstract class MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> : ParameterizedNamedItem,
617    ISingleSolutionHeuristicAlgorithmContext<TProblem, TSolution>
618      where TProblem : class, IItem, ISingleObjectiveProblemDefinition
619      where TSolution : class, IItem
620      where TContext : MemPRPopulationContext<TProblem, TSolution, TContext, TSolutionContext>
621      where TSolutionContext : MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> {
622
623    private TContext parent;
624    public IExecutionContext Parent {
625      get { return parent; }
626      set { throw new InvalidOperationException("Cannot set the parent of a single-solution context."); }
627    }
628
629    [Storable]
630    private ISingleObjectiveSolutionScope<TSolution> scope;
631    public IScope Scope {
632      get { return scope; }
633    }
634
635    IKeyedItemCollection<string, IParameter> IExecutionContext.Parameters {
636      get { return Parameters; }
637    }
638
639    public TProblem Problem {
640      get { return parent.Problem; }
641    }
642
643    public double BestQuality {
644      get { return parent.BestQuality; }
645      set { parent.BestQuality = value; }
646    }
647
648    public TSolution BestSolution {
649      get { return parent.BestSolution; }
650      set { parent.BestSolution = value; }
651    }
652
653    public IRandom Random {
654      get { return parent.Random; }
655    }
656
657    [Storable]
658    private IValueParameter<IntValue> evaluatedSolutions;
659    public int EvaluatedSolutions {
660      get { return evaluatedSolutions.Value.Value; }
661      set { evaluatedSolutions.Value.Value = value; }
662    }
663
664    [Storable]
665    private IValueParameter<IntValue> iterations;
666    public int Iterations {
667      get { return iterations.Value.Value; }
668      set { iterations.Value.Value = value; }
669    }
670
671    ISingleObjectiveSolutionScope<TSolution> ISingleSolutionHeuristicAlgorithmContext<TProblem, TSolution>.Solution {
672      get { return scope; }
673    }
674
675    [StorableConstructor]
676    protected MemPRSolutionContext(bool deserializing) : base(deserializing) { }
677    protected MemPRSolutionContext(MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> original, Cloner cloner)
678      : base(original, cloner) {
679      scope = cloner.Clone(original.scope);
680      evaluatedSolutions = cloner.Clone(original.evaluatedSolutions);
681      iterations = cloner.Clone(original.iterations);
682    }
683    public MemPRSolutionContext(TContext baseContext, ISingleObjectiveSolutionScope<TSolution> solution) {
684      parent = baseContext;
685      scope = solution;
686     
687      Parameters.Add(evaluatedSolutions = new ValueParameter<IntValue>("EvaluatedSolutions", new IntValue(0)));
688      Parameters.Add(iterations = new ValueParameter<IntValue>("Iterations", new IntValue(0)));
689    }
690
691    public void IncrementEvaluatedSolutions(int byEvaluations) {
692      if (byEvaluations < 0) throw new ArgumentException("Can only increment and not decrement evaluated solutions.");
693      EvaluatedSolutions += byEvaluations;
694    }
695
696    #region IExecutionContext members
697    public IAtomicOperation CreateOperation(IOperator op) {
698      return new ExecutionContext(this, op, Scope);
699    }
700
701    public IAtomicOperation CreateOperation(IOperator op, IScope s) {
702      return new ExecutionContext(this, op, s);
703    }
704
705    public IAtomicOperation CreateChildOperation(IOperator op) {
706      return new ExecutionContext(this, op, Scope);
707    }
708
709    public IAtomicOperation CreateChildOperation(IOperator op, IScope s) {
710      return new ExecutionContext(this, op, s);
711    }
712    #endregion
713  }
714}
Note: See TracBrowser for help on using the repository browser.