Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2701_MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs @ 17185

Last change on this file since 17185 was 14680, checked in by abeham, 7 years ago

#2701: disabled learning

  • updated HeuristicLab.Data to trunk
File size: 35.9 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using System.Runtime.CompilerServices;
26using System.Threading;
27using HeuristicLab.Algorithms.DataAnalysis;
28using HeuristicLab.Algorithms.MemPR.Interfaces;
29using HeuristicLab.Analysis;
30using HeuristicLab.Common;
31using HeuristicLab.Core;
32using HeuristicLab.Data;
33using HeuristicLab.Optimization;
34using HeuristicLab.Parameters;
35using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
36using HeuristicLab.Problems.DataAnalysis;
37using HeuristicLab.Random;
38using ExecutionContext = HeuristicLab.Core.ExecutionContext;
39
40namespace HeuristicLab.Algorithms.MemPR {
41  [Item("MemPRContext", "Abstract base class for MemPR contexts.")]
42  [StorableClass]
43  public abstract class MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext> : ParameterizedNamedItem,
44    IPopulationBasedHeuristicAlgorithmContext<TProblem, TSolution>, ISolutionModelContext<TSolution>, IEvaluationServiceContext<TSolution>
45      where TProblem : class, IItem, ISingleObjectiveHeuristicOptimizationProblem
46      where TSolution : class, IItem
47      where TPopulationContext : MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext>
48      where TSolutionContext : MemPRSolutionContext<TProblem, TSolution, TPopulationContext, TSolutionContext> {
49
50    private IExecutionContext parent;
51    public IExecutionContext Parent {
52      get { return parent; }
53      set { parent = value; }
54    }
55
56    [Storable]
57    private IScope scope;
58    public IScope Scope {
59      get { return scope; }
60      private set { scope = value; }
61    }
62
63    IKeyedItemCollection<string, IParameter> IExecutionContext.Parameters {
64      get { return Parameters; }
65    }
66
67    [Storable]
68    private IValueParameter<TProblem> problem;
69    public TProblem Problem {
70      get { return problem.Value; }
71      set { problem.Value = value; }
72    }
73    public bool Maximization {
74      get { return ((IValueParameter<BoolValue>)Problem.MaximizationParameter).Value.Value; }
75    }
76
77    [Storable]
78    private IValueParameter<BoolValue> initialized;
79    public bool Initialized {
80      get { return initialized.Value.Value; }
81      set { initialized.Value.Value = value; }
82    }
83
84    [Storable]
85    private IValueParameter<IntValue> iterations;
86    public int Iterations {
87      get { return iterations.Value.Value; }
88      set { iterations.Value.Value = value; }
89    }
90
91    [Storable]
92    private IValueParameter<IntValue> evaluatedSolutions;
93    public int EvaluatedSolutions {
94      get { return evaluatedSolutions.Value.Value; }
95      set { evaluatedSolutions.Value.Value = value; }
96    }
97
98    [Storable]
99    private IValueParameter<DoubleValue> bestQuality;
100    public double BestQuality {
101      get { return bestQuality.Value.Value; }
102      set { bestQuality.Value.Value = value; }
103    }
104
105    [Storable]
106    private IValueParameter<TSolution> bestSolution;
107    public TSolution BestSolution {
108      get { return bestSolution.Value; }
109      set { bestSolution.Value = value; }
110    }
111
112    [Storable]
113    private IValueParameter<IntValue> localSearchEvaluations;
114    public int LocalSearchEvaluations {
115      get { return localSearchEvaluations.Value.Value; }
116      set { localSearchEvaluations.Value.Value = value; }
117    }
118
119    [Storable]
120    private IValueParameter<DoubleValue> localOptimaLevel;
121    public double LocalOptimaLevel {
122      get { return localOptimaLevel.Value.Value; }
123      set { localOptimaLevel.Value.Value = value; }
124    }
125
126    [Storable]
127    private IValueParameter<IntValue> byBreeding;
128    public int ByBreeding {
129      get { return byBreeding.Value.Value; }
130      set { byBreeding.Value.Value = value; }
131    }
132
133    [Storable]
134    private IValueParameter<IntValue> byRelinking;
135    public int ByRelinking {
136      get { return byRelinking.Value.Value; }
137      set { byRelinking.Value.Value = value; }
138    }
139
140    [Storable]
141    private IValueParameter<IntValue> byDelinking;
142    public int ByDelinking {
143      get { return byDelinking.Value.Value; }
144      set { byDelinking.Value.Value = value; }
145    }
146
147    [Storable]
148    private IValueParameter<IntValue> bySampling;
149    public int BySampling {
150      get { return bySampling.Value.Value; }
151      set { bySampling.Value.Value = value; }
152    }
153
154    [Storable]
155    private IValueParameter<IntValue> byHillclimbing;
156    public int ByHillclimbing {
157      get { return byHillclimbing.Value.Value; }
158      set { byHillclimbing.Value.Value = value; }
159    }
160
161    [Storable]
162    private IValueParameter<IntValue> byAdaptivewalking;
163    public int ByAdaptivewalking {
164      get { return byAdaptivewalking.Value.Value; }
165      set { byAdaptivewalking.Value.Value = value; }
166    }
167
168    [Storable]
169    private IValueParameter<IRandom> random;
170    public IRandom Random {
171      get { return random.Value; }
172      set { random.Value = value; }
173    }
174   
175    public IEnumerable<ISingleObjectiveSolutionScope<TSolution>> Population {
176      get { return scope.SubScopes.OfType<ISingleObjectiveSolutionScope<TSolution>>(); }
177    }
178    public void AddToPopulation(ISingleObjectiveSolutionScope<TSolution> solScope) {
179      scope.SubScopes.Add(solScope);
180    }
181    public void ReplaceAtPopulation(int index, ISingleObjectiveSolutionScope<TSolution> solScope) {
182      scope.SubScopes[index] = solScope;
183    }
184    public ISingleObjectiveSolutionScope<TSolution> AtPopulation(int index) {
185      return scope.SubScopes[index] as ISingleObjectiveSolutionScope<TSolution>;
186    }
187    public void SortPopulation() {
188      scope.SubScopes.Replace(scope.SubScopes.OfType<ISingleObjectiveSolutionScope<TSolution>>().OrderBy(x => Maximization ? -x.Fitness : x.Fitness).ToList());
189    }
190    public int PopulationCount {
191      get { return scope.SubScopes.Count; }
192    }
193
194    [Storable]
195    private IConfidenceRegressionModel breedingPerformanceModel;
196    public IConfidenceRegressionModel BreedingPerformanceModel {
197      get { return breedingPerformanceModel; }
198    }
199    [Storable]
200    private List<Tuple<double, double, double, double>> breedingStat;
201    public IEnumerable<Tuple<double, double, double, double>> BreedingStat {
202      get { return breedingStat; }
203    }
204    [Storable]
205    private IConfidenceRegressionModel relinkingPerformanceModel;
206    public IConfidenceRegressionModel RelinkingPerformanceModel {
207      get { return relinkingPerformanceModel; }
208    }
209    [Storable]
210    private List<Tuple<double, double, double, double>> relinkingStat;
211    public IEnumerable<Tuple<double, double, double, double>> RelinkingStat {
212      get { return relinkingStat; }
213    }
214    [Storable]
215    private IConfidenceRegressionModel delinkingPerformanceModel;
216    public IConfidenceRegressionModel DelinkingPerformanceModel {
217      get { return delinkingPerformanceModel; }
218    }
219    [Storable]
220    private List<Tuple<double, double, double, double>> delinkingStat;
221    public IEnumerable<Tuple<double, double, double, double>> DelinkingStat {
222      get { return delinkingStat; }
223    }
224    [Storable]
225    private IConfidenceRegressionModel samplingPerformanceModel;
226    public IConfidenceRegressionModel SamplingPerformanceModel {
227      get { return samplingPerformanceModel; }
228    }
229    [Storable]
230    private List<Tuple<double, double>> samplingStat;
231    public IEnumerable<Tuple<double, double>> SamplingStat {
232      get { return samplingStat; }
233    }
234    [Storable]
235    private IConfidenceRegressionModel hillclimbingPerformanceModel;
236    public IConfidenceRegressionModel HillclimbingPerformanceModel {
237      get { return hillclimbingPerformanceModel; }
238    }
239    [Storable]
240    private List<Tuple<double, double>> hillclimbingStat;
241    public IEnumerable<Tuple<double, double>> HillclimbingStat {
242      get { return hillclimbingStat; }
243    }
244    [Storable]
245    private IConfidenceRegressionModel adaptiveWalkPerformanceModel;
246    public IConfidenceRegressionModel AdaptiveWalkPerformanceModel {
247      get { return adaptiveWalkPerformanceModel; }
248    }
249    [Storable]
250    private List<Tuple<double, double>> adaptivewalkingStat;
251    public IEnumerable<Tuple<double, double>> AdaptivewalkingStat {
252      get { return adaptivewalkingStat; }
253    }
254
255    [Storable]
256    public ISolutionModel<TSolution> Model { get; set; }
257
258    [StorableConstructor]
259    protected MemPRPopulationContext(bool deserializing) : base(deserializing) { }
260    protected MemPRPopulationContext(MemPRPopulationContext<TProblem, TSolution, TPopulationContext, TSolutionContext> original, Cloner cloner)
261      : base(original, cloner) {
262      scope = cloner.Clone(original.scope);
263      problem = cloner.Clone(original.problem);
264      initialized = cloner.Clone(original.initialized);
265      iterations = cloner.Clone(original.iterations);
266      evaluatedSolutions = cloner.Clone(original.evaluatedSolutions);
267      bestQuality = cloner.Clone(original.bestQuality);
268      bestSolution = cloner.Clone(original.bestSolution);
269      localSearchEvaluations = cloner.Clone(original.localSearchEvaluations);
270      localOptimaLevel = cloner.Clone(original.localOptimaLevel);
271      byBreeding = cloner.Clone(original.byBreeding);
272      byRelinking = cloner.Clone(original.byRelinking);
273      byDelinking = cloner.Clone(original.byDelinking);
274      bySampling = cloner.Clone(original.bySampling);
275      byHillclimbing = cloner.Clone(original.byHillclimbing);
276      byAdaptivewalking = cloner.Clone(original.byAdaptivewalking);
277      random = cloner.Clone(original.random);
278      breedingPerformanceModel = cloner.Clone(original.breedingPerformanceModel);
279      breedingStat = original.breedingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
280      relinkingPerformanceModel = cloner.Clone(original.relinkingPerformanceModel);
281      relinkingStat = original.relinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
282      delinkingPerformanceModel = cloner.Clone(original.delinkingPerformanceModel);
283      delinkingStat = original.delinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
284      samplingPerformanceModel = cloner.Clone(original.samplingPerformanceModel);
285      samplingStat = original.samplingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
286      hillclimbingPerformanceModel = cloner.Clone(original.hillclimbingPerformanceModel);
287      hillclimbingStat = original.hillclimbingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
288      adaptiveWalkPerformanceModel = cloner.Clone(original.adaptiveWalkPerformanceModel);
289      adaptivewalkingStat = original.adaptivewalkingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
290     
291      Model = cloner.Clone(original.Model);
292    }
293    public MemPRPopulationContext() : this("MemPRContext") { }
294    public MemPRPopulationContext(string name) : base(name) {
295      scope = new Scope("Global");
296
297      Parameters.Add(problem = new ValueParameter<TProblem>("Problem"));
298      Parameters.Add(initialized = new ValueParameter<BoolValue>("Initialized", new BoolValue(false)));
299      Parameters.Add(iterations = new ValueParameter<IntValue>("Iterations", new IntValue(0)));
300      Parameters.Add(evaluatedSolutions = new ValueParameter<IntValue>("EvaluatedSolutions", new IntValue(0)));
301      Parameters.Add(bestQuality = new ValueParameter<DoubleValue>("BestQuality", new DoubleValue(double.NaN)));
302      Parameters.Add(bestSolution = new ValueParameter<TSolution>("BestFoundSolution"));
303      Parameters.Add(localSearchEvaluations = new ValueParameter<IntValue>("LocalSearchEvaluations", new IntValue(0)));
304      Parameters.Add(localOptimaLevel = new ValueParameter<DoubleValue>("LocalOptimaLevel", new DoubleValue(0)));
305      Parameters.Add(byBreeding = new ValueParameter<IntValue>("ByBreeding", new IntValue(0)));
306      Parameters.Add(byRelinking = new ValueParameter<IntValue>("ByRelinking", new IntValue(0)));
307      Parameters.Add(byDelinking = new ValueParameter<IntValue>("ByDelinking", new IntValue(0)));
308      Parameters.Add(bySampling = new ValueParameter<IntValue>("BySampling", new IntValue(0)));
309      Parameters.Add(byHillclimbing = new ValueParameter<IntValue>("ByHillclimbing", new IntValue(0)));
310      Parameters.Add(byAdaptivewalking = new ValueParameter<IntValue>("ByAdaptivewalking", new IntValue(0)));
311      Parameters.Add(random = new ValueParameter<IRandom>("Random", new MersenneTwister()));
312
313      breedingStat = new List<Tuple<double, double, double, double>>();
314      relinkingStat = new List<Tuple<double, double, double, double>>();
315      delinkingStat = new List<Tuple<double, double, double, double>>();
316      samplingStat = new List<Tuple<double, double>>();
317      hillclimbingStat = new List<Tuple<double, double>>();
318      adaptivewalkingStat = new List<Tuple<double, double>>();
319    }
320
321    public abstract ISingleObjectiveSolutionScope<TSolution> ToScope(TSolution code, double fitness = double.NaN);
322
323    public virtual double Evaluate(TSolution solution, CancellationToken token) {
324      var solScope = ToScope(solution);
325      Evaluate(solScope, token);
326      return solScope.Fitness;
327    }
328
329    public virtual void Evaluate(ISingleObjectiveSolutionScope<TSolution> solScope, CancellationToken token) {
330      var pdef = Problem as ISingleObjectiveProblemDefinition;
331      if (pdef != null) {
332        var ind = new SingleEncodingIndividual(pdef.Encoding, solScope);
333        solScope.Fitness = pdef.Evaluate(ind, Random);
334      } else {
335        RunOperator(Problem.Evaluator, solScope, token);
336      }
337    }
338
339    public abstract TSolutionContext CreateSingleSolutionContext(ISingleObjectiveSolutionScope<TSolution> solution);
340
341    public void IncrementEvaluatedSolutions(int byEvaluations) {
342      if (byEvaluations < 0) throw new ArgumentException("Can only increment and not decrement evaluated solutions.");
343      EvaluatedSolutions += byEvaluations;
344    }
345
346    #region Breeding Performance
347    public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
348      return;
349      if (IsBetter(a, b))
350        breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness));
351      else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness));
352      if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel();
353    }
354    public void RelearnBreedingPerformanceModel() {
355      return;
356      breedingPerformanceModel = RunRegression(PrepareRegression(ToListRow(breedingStat)), breedingPerformanceModel).Model;
357    }
358    public bool BreedingSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
359      return true;
360      if (breedingPerformanceModel == null) return true;
361      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
362      foreach (var d in BreedingStat) {
363        if (d.Item1 < minI1) minI1 = d.Item1;
364        if (d.Item1 > maxI1) maxI1 = d.Item1;
365        if (d.Item2 < minI2) minI2 = d.Item2;
366        if (d.Item2 > maxI2) maxI2 = d.Item2;
367      }
368      if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
369        return true;
370     
371      return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { p1.Fitness, p2.Fitness, dist }, breedingPerformanceModel);
372    }
373    #endregion
374
375    #region Relinking Performance
376    public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
377      return;
378      if (IsBetter(a, b))
379        relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
380      else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
381      if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel();
382    }
383    public void RelearnRelinkingPerformanceModel() {
384      return;
385      relinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(relinkingStat)), relinkingPerformanceModel).Model;
386    }
387    public bool RelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
388      return true;
389      if (relinkingPerformanceModel == null) return true;
390      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
391      foreach (var d in RelinkingStat) {
392        if (d.Item1 < minI1) minI1 = d.Item1;
393        if (d.Item1 > maxI1) maxI1 = d.Item1;
394        if (d.Item2 < minI2) minI2 = d.Item2;
395        if (d.Item2 > maxI2) maxI2 = d.Item2;
396      }
397      if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
398        return true;
399
400      if (IsBetter(p1, p2)) {
401        return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel);
402      }
403      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel);
404    }
405    #endregion
406
407    #region Delinking Performance
408    public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
409      return;
410      if (IsBetter(a, b))
411        delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
412      else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
413      if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel();
414    }
415    public void RelearnDelinkingPerformanceModel() {
416      return;
417      delinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(delinkingStat)), delinkingPerformanceModel).Model;
418    }
419    public bool DelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
420      return true;
421      if (delinkingPerformanceModel == null) return true;
422      double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
423      foreach (var d in DelinkingStat) {
424        if (d.Item1 < minI1) minI1 = d.Item1;
425        if (d.Item1 > maxI1) maxI1 = d.Item1;
426        if (d.Item2 < minI2) minI2 = d.Item2;
427        if (d.Item2 > maxI2) maxI2 = d.Item2;
428      }
429      if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
430        return true;
431      if (IsBetter(p1, p2)) {
432        return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel);
433      }
434      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel);
435    }
436    #endregion
437
438    #region Sampling Performance
439    public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) {
440      return;
441      samplingStat.Add(Tuple.Create(avgDist, sample.Fitness));
442      if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel();
443    }
444    public void RelearnSamplingPerformanceModel() {
445      return;
446      samplingPerformanceModel = RunRegression(PrepareRegression(ToListRow(samplingStat)), samplingPerformanceModel).Model;
447    }
448    public bool SamplingSuited(double avgDist) {
449      return true;
450      if (samplingPerformanceModel == null) return true;
451      if (avgDist < samplingStat.Min(x => x.Item1) || avgDist > samplingStat.Max(x => x.Item1)) return true;
452      return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { avgDist }, samplingPerformanceModel);
453    }
454    #endregion
455
456    #region Hillclimbing Performance
457    public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
458      return;
459      hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
460      if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel();
461    }
462    public void RelearnHillclimbingPerformanceModel() {
463      return;
464      hillclimbingPerformanceModel = RunRegression(PrepareRegression(ToListRow(hillclimbingStat)), hillclimbingPerformanceModel).Model;
465    }
466    public bool HillclimbingSuited(double startingFitness) {
467      return true;
468      if (hillclimbingPerformanceModel == null) return true;
469      if (startingFitness < HillclimbingStat.Min(x => x.Item1) || startingFitness > HillclimbingStat.Max(x => x.Item1))
470        return true;
471      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, hillclimbingPerformanceModel);
472    }
473    #endregion
474
475    #region Adaptivewalking Performance
476    public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
477      return;
478      adaptivewalkingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
479      if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel();
480    }
481    public void RelearnAdaptiveWalkPerformanceModel() {
482      return;
483      adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(ToListRow(adaptivewalkingStat)), adaptiveWalkPerformanceModel).Model;
484    }
485    public bool AdaptivewalkingSuited(double startingFitness) {
486      return true;
487      if (adaptiveWalkPerformanceModel == null) return true;
488      if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1))
489        return true;
490      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, adaptiveWalkPerformanceModel);
491    }
492    #endregion
493
494    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double>> data) {
495      return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
496    }
497    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double>> data) {
498      return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
499    }
500    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double, double>> data) {
501      return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
502    }
503
504    protected RegressionProblemData PrepareRegression(List<List<double>> data) {
505      var columns = data.First().Select(y => new List<double>()).ToList();
506      foreach (var next in data.Shuffle(Random)) {
507        for (var i = 0; i < next.Count; i++) {
508          columns[i].Add(next[i]);
509        }
510      }
511      var ds = new Dataset(columns.Select((v, i) => i < columns.Count - 1 ? "in" + i : "out").ToList(), columns);
512      var regPrb = new RegressionProblemData(ds, Enumerable.Range(0, columns.Count - 1).Select(x => "in" + x), "out") {
513        TrainingPartition = { Start = 0, End = Math.Min(50, data.Count) },
514        TestPartition = { Start = Math.Min(50, data.Count), End = data.Count }
515      };
516      return regPrb;
517    }
518
519    protected static IConfidenceRegressionSolution RunRegression(RegressionProblemData trainingData, IConfidenceRegressionModel baseLineModel = null) {
520      var targetValues = trainingData.Dataset.GetDoubleValues(trainingData.TargetVariable, trainingData.TrainingIndices).ToList();
521      var baseline = baseLineModel != null ? new ConfidenceRegressionSolution(baseLineModel, trainingData) : null;
522      var constantSolution = new ConfidenceRegressionSolution(new ConfidenceConstantModel(targetValues.Average(), targetValues.Variance(), trainingData.TargetVariable), trainingData);
523      var gpr = new GaussianProcessRegression { Problem = { ProblemData = trainingData } };
524      if (trainingData.InputVariables.CheckedItems.Any(x => alglib.pearsoncorr2(trainingData.Dataset.GetDoubleValues(x.Value.Value).ToArray(), trainingData.TargetVariableValues.ToArray()) > 0.8)) {
525        gpr.MeanFunction = new MeanZero();
526        var cov1 = new CovarianceSum();
527        cov1.Terms.Add(new CovarianceLinearArd());
528        cov1.Terms.Add(new CovarianceConst());
529        gpr.CovarianceFunction = cov1;
530      }
531      IConfidenceRegressionSolution solution = null;
532      var cnt = 0;
533      do {
534        ExecuteAlgorithm(gpr);
535        solution = (IConfidenceRegressionSolution)gpr.Results["Solution"].Value;
536        cnt++;
537      } while (cnt < 10 && (solution == null || solution.TrainingRSquared.IsAlmost(0)));
538
539      return GetBestRegressionSolution(constantSolution, baseline, solution);
540    }
541
542    private static IConfidenceRegressionSolution GetBestRegressionSolution(IConfidenceRegressionSolution constant, IConfidenceRegressionSolution baseline, IConfidenceRegressionSolution solution) {
543      if (baseline == null)
544        return constant.TrainingMeanAbsoluteError < solution.TrainingMeanAbsoluteError ? constant : solution;
545
546      double a, b, c;
547      if (constant.ProblemData.Dataset.Rows < 60) {
548        c = constant.TrainingMeanAbsoluteError;
549        b = baseline.TrainingMeanAbsoluteError;
550        a = solution.TrainingMeanAbsoluteError;
551      } else {
552        c = constant.TestMeanAbsoluteError;
553        b = baseline.TestMeanAbsoluteError;
554        a = solution.TestMeanAbsoluteError;
555      }
556      if (c < b && (c < a || b < a)) return constant;
557      if (b < c && (b < a || c < a)) return baseline;
558      return solution;
559    }
560
561    protected static void ExecuteAlgorithm(IAlgorithm algorithm) {
562      using (var evt = new AutoResetEvent(false)) {
563        EventHandler exeStateChanged = (o, args) => {
564          if (algorithm.ExecutionState != ExecutionState.Started)
565            evt.Set();
566        };
567        algorithm.ExecutionStateChanged += exeStateChanged;
568        if (algorithm.ExecutionState != ExecutionState.Prepared) {
569          algorithm.Prepare(true);
570          evt.WaitOne();
571        }
572        algorithm.Start();
573        evt.WaitOne();
574        algorithm.ExecutionStateChanged -= exeStateChanged;
575      }
576    }
577
578    private double ProbabilityAcceptAbsolutePerformanceModel(List<double> inputs, IConfidenceRegressionModel model) {
579      var inputVariables = inputs.Select((v, i) => "in" + i);
580      var ds = new Dataset(inputVariables.Concat( new [] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new [] { new List<double> { double.NaN } }));
581      var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
582      var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
583
584      // calculate the fitness goal
585      var goal = Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness);
586      var z = (goal - mean) / sdev;
587      // return the probability of achieving or surpassing that goal
588      var y = alglib.invnormaldistribution(z);
589      return Maximization ? 1.0 - y /* P(X >= z) */ : y; // P(X <= z)
590    }
591
592    private double ProbabilityAcceptRelativePerformanceModel(double basePerformance, List<double> inputs, IConfidenceRegressionModel model) {
593      var inputVariables = inputs.Select((v, i) => "in" + i);
594      var ds = new Dataset(inputVariables.Concat(new[] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new[] { new List<double> { double.NaN } }));
595      var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
596      var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
597
598      // calculate the improvement goal
599      var goal = Maximization ? Population.Min(x => x.Fitness) - basePerformance : basePerformance - Population.Max(x => x.Fitness);
600      var z = (goal - mean) / sdev;
601      // return the probability of achieving or surpassing that goal
602      return 1.0 - alglib.invnormaldistribution(z); /* P(X >= z) */
603    }
604
605    private static List<List<double>> ToListRow(List<Tuple<double, double>> rows) {
606      return rows.Select(x => new List<double> { x.Item1, x.Item2 }).ToList();
607    }
608    private static List<List<double>> ToListRow(List<Tuple<double, double, double>> rows) {
609      return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3 }).ToList();
610    }
611    private static List<List<double>> ToListRow(List<Tuple<double, double, double, double>> rows) {
612      return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3, x.Item4 }).ToList();
613    }
614
615    [MethodImpl(MethodImplOptions.AggressiveInlining)]
616    public bool IsBetter(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b) {
617      return IsBetter(a.Fitness, b.Fitness);
618    }
619    [MethodImpl(MethodImplOptions.AggressiveInlining)]
620    public bool IsBetter(double a, double b) {
621      return double.IsNaN(b) && !double.IsNaN(a)
622        || Maximization && a > b
623        || !Maximization && a < b;
624    }
625
626    #region IExecutionContext members
627    public IAtomicOperation CreateOperation(IOperator op) {
628      return new ExecutionContext(this, op, Scope);
629    }
630
631    public IAtomicOperation CreateOperation(IOperator op, IScope s) {
632      return new ExecutionContext(this, op, s);
633    }
634
635    public IAtomicOperation CreateChildOperation(IOperator op) {
636      return new ExecutionContext(this, op, Scope);
637    }
638
639    public IAtomicOperation CreateChildOperation(IOperator op, IScope s) {
640      return new ExecutionContext(this, op, s);
641    }
642    #endregion
643
644    #region Engine Helper
645    public void RunOperator(IOperator op, IScope scope, CancellationToken cancellationToken) {
646      var stack = new Stack<IOperation>();
647      stack.Push(CreateChildOperation(op, scope));
648
649      while (stack.Count > 0) {
650        cancellationToken.ThrowIfCancellationRequested();
651
652        var next = stack.Pop();
653        if (next is OperationCollection) {
654          var coll = (OperationCollection)next;
655          for (int i = coll.Count - 1; i >= 0; i--)
656            if (coll[i] != null) stack.Push(coll[i]);
657        } else if (next is IAtomicOperation) {
658          var operation = (IAtomicOperation)next;
659          try {
660            next = operation.Operator.Execute((IExecutionContext)operation, cancellationToken);
661          } catch (Exception ex) {
662            stack.Push(operation);
663            if (ex is OperationCanceledException) throw ex;
664            else throw new OperatorExecutionException(operation.Operator, ex);
665          }
666          if (next != null) stack.Push(next);
667        }
668      }
669    }
670    #endregion
671  }
672
673  [Item("SingleSolutionMemPRContext", "Abstract base class for single solution MemPR contexts.")]
674  [StorableClass]
675  public abstract class MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> : ParameterizedNamedItem,
676    ISingleSolutionHeuristicAlgorithmContext<TProblem, TSolution>, IEvaluationServiceContext<TSolution>
677      where TProblem : class, IItem, ISingleObjectiveHeuristicOptimizationProblem
678      where TSolution : class, IItem
679      where TContext : MemPRPopulationContext<TProblem, TSolution, TContext, TSolutionContext>
680      where TSolutionContext : MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> {
681
682    private TContext parent;
683    public IExecutionContext Parent {
684      get { return parent; }
685      set { throw new InvalidOperationException("Cannot set the parent of a single-solution context."); }
686    }
687
688    [Storable]
689    private ISingleObjectiveSolutionScope<TSolution> scope;
690    public IScope Scope {
691      get { return scope; }
692    }
693
694    IKeyedItemCollection<string, IParameter> IExecutionContext.Parameters {
695      get { return Parameters; }
696    }
697
698    public TProblem Problem {
699      get { return parent.Problem; }
700    }
701    public bool Maximization {
702      get { return parent.Maximization; }
703    }
704
705    public double BestQuality {
706      get { return parent.BestQuality; }
707      set { parent.BestQuality = value; }
708    }
709
710    public TSolution BestSolution {
711      get { return parent.BestSolution; }
712      set { parent.BestSolution = value; }
713    }
714
715    public IRandom Random {
716      get { return parent.Random; }
717    }
718
719    [Storable]
720    private IValueParameter<IntValue> evaluatedSolutions;
721    public int EvaluatedSolutions {
722      get { return evaluatedSolutions.Value.Value; }
723      set { evaluatedSolutions.Value.Value = value; }
724    }
725
726    [Storable]
727    private IValueParameter<IntValue> iterations;
728    public int Iterations {
729      get { return iterations.Value.Value; }
730      set { iterations.Value.Value = value; }
731    }
732
733    ISingleObjectiveSolutionScope<TSolution> ISingleSolutionHeuristicAlgorithmContext<TProblem, TSolution>.Solution {
734      get { return scope; }
735    }
736
737    [StorableConstructor]
738    protected MemPRSolutionContext(bool deserializing) : base(deserializing) { }
739    protected MemPRSolutionContext(MemPRSolutionContext<TProblem, TSolution, TContext, TSolutionContext> original, Cloner cloner)
740      : base(original, cloner) {
741      scope = cloner.Clone(original.scope);
742      evaluatedSolutions = cloner.Clone(original.evaluatedSolutions);
743      iterations = cloner.Clone(original.iterations);
744    }
745    public MemPRSolutionContext(TContext baseContext, ISingleObjectiveSolutionScope<TSolution> solution) {
746      parent = baseContext;
747      scope = solution;
748     
749      Parameters.Add(evaluatedSolutions = new ValueParameter<IntValue>("EvaluatedSolutions", new IntValue(0)));
750      Parameters.Add(iterations = new ValueParameter<IntValue>("Iterations", new IntValue(0)));
751    }
752
753    public void IncrementEvaluatedSolutions(int byEvaluations) {
754      if (byEvaluations < 0) throw new ArgumentException("Can only increment and not decrement evaluated solutions.");
755      EvaluatedSolutions += byEvaluations;
756    }
757    public virtual double Evaluate(TSolution solution, CancellationToken token) {
758      return parent.Evaluate(solution, token);
759    }
760
761    public virtual void Evaluate(ISingleObjectiveSolutionScope<TSolution> solScope, CancellationToken token) {
762      parent.Evaluate(solScope, token);
763    }
764
765    #region IExecutionContext members
766    public IAtomicOperation CreateOperation(IOperator op) {
767      return new ExecutionContext(this, op, Scope);
768    }
769
770    public IAtomicOperation CreateOperation(IOperator op, IScope s) {
771      return new ExecutionContext(this, op, s);
772    }
773
774    public IAtomicOperation CreateChildOperation(IOperator op) {
775      return new ExecutionContext(this, op, Scope);
776    }
777
778    public IAtomicOperation CreateChildOperation(IOperator op, IScope s) {
779      return new ExecutionContext(this, op, s);
780    }
781    #endregion
782  }
783}
Note: See TracBrowser for help on using the repository browser.