Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
02/11/17 01:06:37 (7 years ago)
Author:
abeham
Message:

#2457: copied MemPR algorithm from its branch to this branch

Location:
branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR
Files:
1 edited
1 copied

Legend:

Unmodified
Added
Removed
  • branches/PerformanceComparison/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs

    r14573 r14666  
    191191      get { return scope.SubScopes.Count; }
    192192    }
    193 
    194     [Storable]
    195     private IConfidenceRegressionModel breedingPerformanceModel;
    196     public IConfidenceRegressionModel BreedingPerformanceModel {
    197       get { return breedingPerformanceModel; }
    198     }
     193   
    199194    [Storable]
    200195    private List<Tuple<double, double, double, double>> breedingStat;
     
    203198    }
    204199    [Storable]
    205     private IConfidenceRegressionModel relinkingPerformanceModel;
    206     public IConfidenceRegressionModel RelinkingPerformanceModel {
    207       get { return relinkingPerformanceModel; }
    208     }
    209     [Storable]
    210200    private List<Tuple<double, double, double, double>> relinkingStat;
    211201    public IEnumerable<Tuple<double, double, double, double>> RelinkingStat {
     
    213203    }
    214204    [Storable]
    215     private IConfidenceRegressionModel delinkingPerformanceModel;
    216     public IConfidenceRegressionModel DelinkingPerformanceModel {
    217       get { return delinkingPerformanceModel; }
    218     }
    219     [Storable]
    220205    private List<Tuple<double, double, double, double>> delinkingStat;
    221206    public IEnumerable<Tuple<double, double, double, double>> DelinkingStat {
     
    223208    }
    224209    [Storable]
    225     private IConfidenceRegressionModel samplingPerformanceModel;
    226     public IConfidenceRegressionModel SamplingPerformanceModel {
    227       get { return samplingPerformanceModel; }
    228     }
    229     [Storable]
    230210    private List<Tuple<double, double>> samplingStat;
    231211    public IEnumerable<Tuple<double, double>> SamplingStat {
     
    233213    }
    234214    [Storable]
    235     private IConfidenceRegressionModel hillclimbingPerformanceModel;
    236     public IConfidenceRegressionModel HillclimbingPerformanceModel {
    237       get { return hillclimbingPerformanceModel; }
    238     }
    239     [Storable]
    240215    private List<Tuple<double, double>> hillclimbingStat;
    241216    public IEnumerable<Tuple<double, double>> HillclimbingStat {
    242217      get { return hillclimbingStat; }
    243     }
    244     [Storable]
    245     private IConfidenceRegressionModel adaptiveWalkPerformanceModel;
    246     public IConfidenceRegressionModel AdaptiveWalkPerformanceModel {
    247       get { return adaptiveWalkPerformanceModel; }
    248218    }
    249219    [Storable]
     
    276246      byAdaptivewalking = cloner.Clone(original.byAdaptivewalking);
    277247      random = cloner.Clone(original.random);
    278       breedingPerformanceModel = cloner.Clone(original.breedingPerformanceModel);
    279248      breedingStat = original.breedingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
    280       relinkingPerformanceModel = cloner.Clone(original.relinkingPerformanceModel);
    281249      relinkingStat = original.relinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
    282       delinkingPerformanceModel = cloner.Clone(original.delinkingPerformanceModel);
    283250      delinkingStat = original.delinkingStat.Select(x => Tuple.Create(x.Item1, x.Item2, x.Item3, x.Item4)).ToList();
    284       samplingPerformanceModel = cloner.Clone(original.samplingPerformanceModel);
    285251      samplingStat = original.samplingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
    286       hillclimbingPerformanceModel = cloner.Clone(original.hillclimbingPerformanceModel);
    287252      hillclimbingStat = original.hillclimbingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
    288       adaptiveWalkPerformanceModel = cloner.Clone(original.adaptiveWalkPerformanceModel);
    289253      adaptivewalkingStat = original.adaptivewalkingStat.Select(x => Tuple.Create(x.Item1, x.Item2)).ToList();
    290254     
     
    349313        breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness));
    350314      else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness));
    351       if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel();
    352     }
    353     public void RelearnBreedingPerformanceModel() {
    354       breedingPerformanceModel = RunRegression(PrepareRegression(ToListRow(breedingStat)), breedingPerformanceModel).Model;
    355315    }
    356316    public bool BreedingSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
    357       if (breedingPerformanceModel == null) return true;
    358       double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
    359       foreach (var d in BreedingStat) {
    360         if (d.Item1 < minI1) minI1 = d.Item1;
    361         if (d.Item1 > maxI1) maxI1 = d.Item1;
    362         if (d.Item2 < minI2) minI2 = d.Item2;
    363         if (d.Item2 > maxI2) maxI2 = d.Item2;
    364       }
    365       if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
    366         return true;
    367      
    368       return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { p1.Fitness, p2.Fitness, dist }, breedingPerformanceModel);
     317      return true;
    369318    }
    370319    #endregion
     
    375324        relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
    376325      else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
    377       if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel();
    378     }
    379     public void RelearnRelinkingPerformanceModel() {
    380       relinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(relinkingStat)), relinkingPerformanceModel).Model;
    381326    }
    382327    public bool RelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
    383       if (relinkingPerformanceModel == null) return true;
    384       double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
    385       foreach (var d in RelinkingStat) {
    386         if (d.Item1 < minI1) minI1 = d.Item1;
    387         if (d.Item1 > maxI1) maxI1 = d.Item1;
    388         if (d.Item2 < minI2) minI2 = d.Item2;
    389         if (d.Item2 > maxI2) maxI2 = d.Item2;
    390       }
    391       if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
    392         return true;
    393 
    394       if (IsBetter(p1, p2)) {
    395         return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel);
    396       }
    397       return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel);
     328      return true;
    398329    }
    399330    #endregion
     
    404335        delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
    405336      else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
    406       if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel();
    407     }
    408     public void RelearnDelinkingPerformanceModel() {
    409       delinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(delinkingStat)), delinkingPerformanceModel).Model;
    410337    }
    411338    public bool DelinkSuited(ISingleObjectiveSolutionScope<TSolution> p1, ISingleObjectiveSolutionScope<TSolution> p2, double dist) {
    412       if (delinkingPerformanceModel == null) return true;
    413       double minI1 = double.MaxValue, minI2 = double.MaxValue, maxI1 = double.MinValue, maxI2 = double.MinValue;
    414       foreach (var d in DelinkingStat) {
    415         if (d.Item1 < minI1) minI1 = d.Item1;
    416         if (d.Item1 > maxI1) maxI1 = d.Item1;
    417         if (d.Item2 < minI2) minI2 = d.Item2;
    418         if (d.Item2 > maxI2) maxI2 = d.Item2;
    419       }
    420       if (p1.Fitness < minI1 || p1.Fitness > maxI1 || p2.Fitness < minI2 || p2.Fitness > maxI2)
    421         return true;
    422       if (IsBetter(p1, p2)) {
    423         return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p1.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel);
    424       }
    425       return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel);
     339      return true;
    426340    }
    427341    #endregion
     
    430344    public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) {
    431345      samplingStat.Add(Tuple.Create(avgDist, sample.Fitness));
    432       if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel();
    433     }
    434     public void RelearnSamplingPerformanceModel() {
    435       samplingPerformanceModel = RunRegression(PrepareRegression(ToListRow(samplingStat)), samplingPerformanceModel).Model;
    436346    }
    437347    public bool SamplingSuited(double avgDist) {
    438       if (samplingPerformanceModel == null) return true;
    439       if (avgDist < samplingStat.Min(x => x.Item1) || avgDist > samplingStat.Max(x => x.Item1)) return true;
    440       return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { avgDist }, samplingPerformanceModel);
     348      return true;
    441349    }
    442350    #endregion
     
    445353    public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
    446354      hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
    447       if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel();
    448     }
    449     public void RelearnHillclimbingPerformanceModel() {
    450       hillclimbingPerformanceModel = RunRegression(PrepareRegression(ToListRow(hillclimbingStat)), hillclimbingPerformanceModel).Model;
    451355    }
    452356    public bool HillclimbingSuited(double startingFitness) {
    453       if (hillclimbingPerformanceModel == null) return true;
    454       if (startingFitness < HillclimbingStat.Min(x => x.Item1) || startingFitness > HillclimbingStat.Max(x => x.Item1))
    455         return true;
    456       return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, hillclimbingPerformanceModel);
     357      return true;
    457358    }
    458359    #endregion
     
    461362    public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
    462363      adaptivewalkingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
    463       if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel();
    464     }
    465     public void RelearnAdaptiveWalkPerformanceModel() {
    466       adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(ToListRow(adaptivewalkingStat)), adaptiveWalkPerformanceModel).Model;
    467364    }
    468365    public bool AdaptivewalkingSuited(double startingFitness) {
    469       if (adaptiveWalkPerformanceModel == null) return true;
    470       if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1))
    471         return true;
    472       return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, adaptiveWalkPerformanceModel);
    473     }
    474     #endregion
    475 
    476     public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double>> data) {
    477       return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
    478     }
    479     public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double>> data) {
    480       return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
    481     }
    482     public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double, double, double>> data) {
    483       return new ConfidenceRegressionSolution(model, PrepareRegression(ToListRow(data.ToList())));
    484     }
    485 
    486     protected RegressionProblemData PrepareRegression(List<List<double>> data) {
    487       var columns = data.First().Select(y => new List<double>()).ToList();
    488       foreach (var next in data.Shuffle(Random)) {
    489         for (var i = 0; i < next.Count; i++) {
    490           columns[i].Add(next[i]);
    491         }
    492       }
    493       var ds = new Dataset(columns.Select((v, i) => i < columns.Count - 1 ? "in" + i : "out").ToList(), columns);
    494       var regPrb = new RegressionProblemData(ds, Enumerable.Range(0, columns.Count - 1).Select(x => "in" + x), "out") {
    495         TrainingPartition = { Start = 0, End = Math.Min(50, data.Count) },
    496         TestPartition = { Start = Math.Min(50, data.Count), End = data.Count }
    497       };
    498       return regPrb;
    499     }
    500 
    501     protected static IConfidenceRegressionSolution RunRegression(RegressionProblemData trainingData, IConfidenceRegressionModel baseLineModel = null) {
    502       var targetValues = trainingData.Dataset.GetDoubleValues(trainingData.TargetVariable, trainingData.TrainingIndices).ToList();
    503       var baseline = baseLineModel != null ? new ConfidenceRegressionSolution(baseLineModel, trainingData) : null;
    504       var constantSolution = new ConfidenceRegressionSolution(new ConfidenceConstantModel(targetValues.Average(), targetValues.Variance(), trainingData.TargetVariable), trainingData);
    505       var gpr = new GaussianProcessRegression { Problem = { ProblemData = trainingData } };
    506       if (trainingData.InputVariables.CheckedItems.Any(x => alglib.pearsoncorr2(trainingData.Dataset.GetDoubleValues(x.Value.Value).ToArray(), trainingData.TargetVariableValues.ToArray()) > 0.8)) {
    507         gpr.MeanFunction = new MeanZero();
    508         var cov1 = new CovarianceSum();
    509         cov1.Terms.Add(new CovarianceLinearArd());
    510         cov1.Terms.Add(new CovarianceConst());
    511         gpr.CovarianceFunction = cov1;
    512       }
    513       IConfidenceRegressionSolution solution = null;
    514       var cnt = 0;
    515       do {
    516         ExecuteAlgorithm(gpr);
    517         solution = (IConfidenceRegressionSolution)gpr.Results["Solution"].Value;
    518         cnt++;
    519       } while (cnt < 10 && (solution == null || solution.TrainingRSquared.IsAlmost(0)));
    520 
    521       return GetBestRegressionSolution(constantSolution, baseline, solution);
    522     }
    523 
    524     private static IConfidenceRegressionSolution GetBestRegressionSolution(IConfidenceRegressionSolution constant, IConfidenceRegressionSolution baseline, IConfidenceRegressionSolution solution) {
    525       if (baseline == null)
    526         return constant.TrainingMeanAbsoluteError < solution.TrainingMeanAbsoluteError ? constant : solution;
    527 
    528       double a, b, c;
    529       if (constant.ProblemData.Dataset.Rows < 60) {
    530         c = constant.TrainingMeanAbsoluteError;
    531         b = baseline.TrainingMeanAbsoluteError;
    532         a = solution.TrainingMeanAbsoluteError;
    533       } else {
    534         c = constant.TestMeanAbsoluteError;
    535         b = baseline.TestMeanAbsoluteError;
    536         a = solution.TestMeanAbsoluteError;
    537       }
    538       if (c < b && (c < a || b < a)) return constant;
    539       if (b < c && (b < a || c < a)) return baseline;
    540       return solution;
    541     }
    542 
    543     protected static void ExecuteAlgorithm(IAlgorithm algorithm) {
    544       using (var evt = new AutoResetEvent(false)) {
    545         EventHandler exeStateChanged = (o, args) => {
    546           if (algorithm.ExecutionState != ExecutionState.Started)
    547             evt.Set();
    548         };
    549         algorithm.ExecutionStateChanged += exeStateChanged;
    550         if (algorithm.ExecutionState != ExecutionState.Prepared) {
    551           algorithm.Prepare(true);
    552           evt.WaitOne();
    553         }
    554         algorithm.Start();
    555         evt.WaitOne();
    556         algorithm.ExecutionStateChanged -= exeStateChanged;
    557       }
    558     }
    559 
    560     private double ProbabilityAcceptAbsolutePerformanceModel(List<double> inputs, IConfidenceRegressionModel model) {
    561       var inputVariables = inputs.Select((v, i) => "in" + i);
    562       var ds = new Dataset(inputVariables.Concat( new [] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new [] { new List<double> { double.NaN } }));
    563       var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
    564       var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
    565 
    566       // calculate the fitness goal
    567       var goal = Maximization ? Population.Min(x => x.Fitness) : Population.Max(x => x.Fitness);
    568       var z = (goal - mean) / sdev;
    569       // return the probability of achieving or surpassing that goal
    570       var y = alglib.invnormaldistribution(z);
    571       return Maximization ? 1.0 - y /* P(X >= z) */ : y; // P(X <= z)
    572     }
    573 
    574     private double ProbabilityAcceptRelativePerformanceModel(double basePerformance, List<double> inputs, IConfidenceRegressionModel model) {
    575       var inputVariables = inputs.Select((v, i) => "in" + i);
    576       var ds = new Dataset(inputVariables.Concat(new[] { "out" }), inputs.Select(x => new List<double> { x }).Concat(new[] { new List<double> { double.NaN } }));
    577       var mean = model.GetEstimatedValues(ds, new[] { 0 }).Single();
    578       var sdev = Math.Sqrt(model.GetEstimatedVariances(ds, new[] { 0 }).Single());
    579 
    580       // calculate the improvement goal
    581       var goal = Maximization ? Population.Min(x => x.Fitness) - basePerformance : basePerformance - Population.Max(x => x.Fitness);
    582       var z = (goal - mean) / sdev;
    583       // return the probability of achieving or surpassing that goal
    584       return 1.0 - alglib.invnormaldistribution(z); /* P(X >= z) */
    585     }
    586 
    587     private static List<List<double>> ToListRow(List<Tuple<double, double>> rows) {
    588       return rows.Select(x => new List<double> { x.Item1, x.Item2 }).ToList();
    589     }
    590     private static List<List<double>> ToListRow(List<Tuple<double, double, double>> rows) {
    591       return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3 }).ToList();
    592     }
    593     private static List<List<double>> ToListRow(List<Tuple<double, double, double, double>> rows) {
    594       return rows.Select(x => new List<double> { x.Item1, x.Item2, x.Item3, x.Item4 }).ToList();
    595     }
     366      return true;
     367    }
     368    #endregion
    596369
    597370    [MethodImpl(MethodImplOptions.AggressiveInlining)]
Note: See TracChangeset for help on using the changeset viewer.