Free cookie consent management tool by TermsFeed Policy Generator

Changeset 14573 for branches


Ignore:
Timestamp:
01/15/17 09:57:50 (8 years ago)
Author:
abeham
Message:

#2701:

  • changed adaptive walk performance model to relative
  • added ConfidenceConstantModel for regression
  • using constant model as fallback to GPR
  • reorganized code
Location:
branches/MemPRAlgorithm
Files:
3 edited
1 copied

Legend:

Unmodified
Added
Removed
  • branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRAlgorithm.cs

    r14563 r14573  
    302302
    303303      if (!replaced && offspring != null) {
    304         if (Context.HillclimbingSuited(offspring)) {
     304        if (Context.HillclimbingSuited(offspring.Fitness)) {
    305305          HillClimb(offspring, token, CalculateSubspace(Context.Population.Select(x => x.Solution)));
    306306          if (Replace(offspring, token)) {
     
    566566      AdaptiveWalk(newScope, maxEvals, token, subspace);
    567567     
     568      Context.AddAdaptivewalkingResult(scope, newScope);
    568569      if (Context.IsBetter(newScope, scope)) {
    569         Context.AddAdaptivewalkingResult(scope, newScope);
    570570        scope.Adopt(newScope);
    571       } else if (!Eq(newScope, scope))
    572         Context.AddAdaptivewalkingResult(scope, newScope);
     571      }
    573572    }
    574573    protected abstract void AdaptiveWalk(ISingleObjectiveSolutionScope<TSolution> scope, int maxEvals, CancellationToken token, ISolutionSubspace<TSolution> subspace = null);
  • branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs

    r14563 r14573  
    2727using HeuristicLab.Algorithms.DataAnalysis;
    2828using HeuristicLab.Algorithms.MemPR.Interfaces;
     29using HeuristicLab.Analysis;
    2930using HeuristicLab.Common;
    3031using HeuristicLab.Core;
     
    343344    }
    344345
     346    #region Breeding Performance
     347    public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
     348      if (IsBetter(a, b))
     349        breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness));
     350      else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness));
     351      if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel();
     352    }
    345353    public void RelearnBreedingPerformanceModel() {
    346354      breedingPerformanceModel = RunRegression(PrepareRegression(ToListRow(breedingStat)), breedingPerformanceModel).Model;
     
    360368      return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { p1.Fitness, p2.Fitness, dist }, breedingPerformanceModel);
    361369    }
    362 
     370    #endregion
     371
     372    #region Relinking Performance
     373    public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
     374      if (IsBetter(a, b))
     375        relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
     376      else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
     377      if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel();
     378    }
    363379    public void RelearnRelinkingPerformanceModel() {
    364380      relinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(relinkingStat)), relinkingPerformanceModel).Model;
     
    381397      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel);
    382398    }
    383 
     399    #endregion
     400
     401    #region Delinking Performance
     402    public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
     403      if (IsBetter(a, b))
     404        delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
     405      else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
     406      if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel();
     407    }
    384408    public void RelearnDelinkingPerformanceModel() {
    385409      delinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(delinkingStat)), delinkingPerformanceModel).Model;
     
    401425      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel);
    402426    }
    403 
     427    #endregion
     428
     429    #region Sampling Performance
     430    public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) {
     431      samplingStat.Add(Tuple.Create(avgDist, sample.Fitness));
     432      if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel();
     433    }
    404434    public void RelearnSamplingPerformanceModel() {
    405435      samplingPerformanceModel = RunRegression(PrepareRegression(ToListRow(samplingStat)), samplingPerformanceModel).Model;
     
    410440      return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { avgDist }, samplingPerformanceModel);
    411441    }
    412 
     442    #endregion
     443
     444    #region Hillclimbing Performance
     445    public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
     446      hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
     447      if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel();
     448    }
    413449    public void RelearnHillclimbingPerformanceModel() {
    414450      hillclimbingPerformanceModel = RunRegression(PrepareRegression(ToListRow(hillclimbingStat)), hillclimbingPerformanceModel).Model;
    415     }
    416     public bool HillclimbingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {
    417       return HillclimbingSuited(scope.Fitness);
    418451    }
    419452    public bool HillclimbingSuited(double startingFitness) {
     
    423456      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, hillclimbingPerformanceModel);
    424457    }
    425 
     458    #endregion
     459
     460    #region Adaptivewalking Performance
     461    public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
     462      adaptivewalkingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
     463      if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel();
     464    }
    426465    public void RelearnAdaptiveWalkPerformanceModel() {
    427466      adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(ToListRow(adaptivewalkingStat)), adaptiveWalkPerformanceModel).Model;
    428     }
    429     public bool AdaptivewalkingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {
    430       return AdaptivewalkingSuited(scope.Fitness);
    431467    }
    432468    public bool AdaptivewalkingSuited(double startingFitness) {
     
    434470      if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1))
    435471        return true;
    436       return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { startingFitness }, adaptiveWalkPerformanceModel);
    437     }
     472      return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, adaptiveWalkPerformanceModel);
     473    }
     474    #endregion
    438475
    439476    public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double>> data) {
     
    447484    }
    448485
    449     protected RegressionProblemData PrepareRegression(List<List<double>> sample) {
    450       var columns = sample.First().Select(y => new List<double>()).ToList();
    451       foreach (var next in sample.Shuffle(Random)) {
     486    protected RegressionProblemData PrepareRegression(List<List<double>> data) {
     487      var columns = data.First().Select(y => new List<double>()).ToList();
     488      foreach (var next in data.Shuffle(Random)) {
    452489        for (var i = 0; i < next.Count; i++) {
    453490          columns[i].Add(next[i]);
     
    456493      var ds = new Dataset(columns.Select((v, i) => i < columns.Count - 1 ? "in" + i : "out").ToList(), columns);
    457494      var regPrb = new RegressionProblemData(ds, Enumerable.Range(0, columns.Count - 1).Select(x => "in" + x), "out") {
    458         TrainingPartition = { Start = 0, End = Math.Min(50, sample.Count) },
    459         TestPartition = { Start = Math.Min(50, sample.Count), End = sample.Count }
     495        TrainingPartition = { Start = 0, End = Math.Min(50, data.Count) },
     496        TestPartition = { Start = Math.Min(50, data.Count), End = data.Count }
    460497      };
    461498      return regPrb;
     
    463500
    464501    protected static IConfidenceRegressionSolution RunRegression(RegressionProblemData trainingData, IConfidenceRegressionModel baseLineModel = null) {
     502      var targetValues = trainingData.Dataset.GetDoubleValues(trainingData.TargetVariable, trainingData.TrainingIndices).ToList();
    465503      var baseline = baseLineModel != null ? new ConfidenceRegressionSolution(baseLineModel, trainingData) : null;
     504      var constantSolution = new ConfidenceRegressionSolution(new ConfidenceConstantModel(targetValues.Average(), targetValues.Variance(), trainingData.TargetVariable), trainingData);
    466505      var gpr = new GaussianProcessRegression { Problem = { ProblemData = trainingData } };
    467506      if (trainingData.InputVariables.CheckedItems.Any(x => alglib.pearsoncorr2(trainingData.Dataset.GetDoubleValues(x.Value.Value).ToArray(), trainingData.TargetVariableValues.ToArray()) > 0.8)) {
     
    479518        cnt++;
    480519      } while (cnt < 10 && (solution == null || solution.TrainingRSquared.IsAlmost(0)));
    481       if (baseline == null) return solution;
    482       if (trainingData.Dataset.Rows < 60)
    483         return solution.TrainingMeanAbsoluteError < baseline.TrainingMeanAbsoluteError ? solution : baseline;
    484       return solution.TestMeanAbsoluteError < baseline.TestMeanAbsoluteError ? solution : baseline;
     520
     521      return GetBestRegressionSolution(constantSolution, baseline, solution);
     522    }
     523
     524    private static IConfidenceRegressionSolution GetBestRegressionSolution(IConfidenceRegressionSolution constant, IConfidenceRegressionSolution baseline, IConfidenceRegressionSolution solution) {
     525      if (baseline == null)
     526        return constant.TrainingMeanAbsoluteError < solution.TrainingMeanAbsoluteError ? constant : solution;
     527
     528      double a, b, c;
     529      if (constant.ProblemData.Dataset.Rows < 60) {
     530        c = constant.TrainingMeanAbsoluteError;
     531        b = baseline.TrainingMeanAbsoluteError;
     532        a = solution.TrainingMeanAbsoluteError;
     533      } else {
     534        c = constant.TestMeanAbsoluteError;
     535        b = baseline.TestMeanAbsoluteError;
     536        a = solution.TestMeanAbsoluteError;
     537      }
     538      if (c < b && (c < a || b < a)) return constant;
     539      if (b < c && (b < a || c < a)) return baseline;
     540      return solution;
    485541    }
    486542
     
    488544      using (var evt = new AutoResetEvent(false)) {
    489545        EventHandler exeStateChanged = (o, args) => {
    490           if (algorithm.ExecutionState == ExecutionState.Paused || algorithm.ExecutionState == ExecutionState.Stopped)
     546          if (algorithm.ExecutionState != ExecutionState.Started)
    491547            evt.Set();
    492548        };
    493549        algorithm.ExecutionStateChanged += exeStateChanged;
    494         algorithm.Prepare(true);
     550        if (algorithm.ExecutionState != ExecutionState.Prepared) {
     551          algorithm.Prepare(true);
     552          evt.WaitOne();
     553        }
    495554        algorithm.Start();
    496555        evt.WaitOne();
     
    547606    }
    548607
    549     public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
    550       if (IsBetter(a, b))
    551         breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness));
    552       else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness));
    553       if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel();
    554     }
    555 
    556     public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
    557       if (IsBetter(a, b))
    558         relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
    559       else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
    560       if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel();
    561     }
    562 
    563     public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {
    564       if (IsBetter(a, b))
    565         delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));
    566       else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));
    567       if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel();
    568     }
    569 
    570     public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) {
    571       samplingStat.Add(Tuple.Create(avgDist, sample.Fitness));
    572       if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel();
    573     }
    574 
    575     public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
    576       hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));
    577       if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel();
    578     }
    579 
    580     public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {
    581       adaptivewalkingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness));
    582       if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel();
    583     }
    584 
    585608    #region IExecutionContext members
    586609    public IAtomicOperation CreateOperation(IOperator op) {
     
    599622      return new ExecutionContext(this, op, s);
    600623    }
    601     #endregion
    602 
    603     #region Math Helper
    604     // normal distribution CDF (left of x) for N(0;1) standard normal distribution
    605     // from http://www.johndcook.com/blog/csharp_phi/
    606     // license: "This code is in the public domain. Do whatever you want with it, no strings attached."
    607     // added: 2016-11-19 21:46 CET
    608     /*protected static double Phi(double x) {
    609       // constants
    610       double a1 = 0.254829592;
    611       double a2 = -0.284496736;
    612       double a3 = 1.421413741;
    613       double a4 = -1.453152027;
    614       double a5 = 1.061405429;
    615       double p = 0.3275911;
    616 
    617       // Save the sign of x
    618       int sign = 1;
    619       if (x < 0)
    620         sign = -1;
    621       x = Math.Abs(x) / Math.Sqrt(2.0);
    622 
    623       // A&S formula 7.1.26
    624       double t = 1.0 / (1.0 + p * x);
    625       double y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.Exp(-x * x);
    626 
    627       return 0.5 * (1.0 + sign * y);
    628     }*/
    629624    #endregion
    630625
  • branches/MemPRAlgorithm/HeuristicLab.Problems.DataAnalysis/3.4/HeuristicLab.Problems.DataAnalysis-3.4.csproj

    r14099 r14573  
    136136    <Compile Include="Implementation\Clustering\ClusteringProblemData.cs" />
    137137    <Compile Include="Implementation\Clustering\ClusteringSolution.cs" />
     138    <Compile Include="Implementation\ConfidenceConstantModel.cs" />
    138139    <Compile Include="Implementation\ConstantModel.cs" />
    139140    <Compile Include="Implementation\DataAnalysisModel.cs" />
  • branches/MemPRAlgorithm/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/ConfidenceConstantModel.cs

    r14561 r14573  
    3030namespace HeuristicLab.Problems.DataAnalysis {
    3131  [StorableClass]
    32   [Item("Constant Model", "A model that always returns the same constant value regardless of the presented input data.")]
    33   public class ConstantModel : RegressionModel, IClassificationModel, ITimeSeriesPrognosisModel, IStringConvertibleValue {
     32  [Item("Confidence Constant Model", "A model that always returns the same constant mean value and variance regardless of the presented input data.")]
     33  public class ConfidenceConstantModel : RegressionModel, IConfidenceRegressionModel, IStringConvertibleValue {
    3434    public override IEnumerable<string> VariablesUsedForPrediction { get { return Enumerable.Empty<string>(); } }
    3535
     
    4242    }
    4343
     44    [Storable]
     45    private readonly double variance;
     46    public double Variance {
     47      get { return variance; }
     48      // setter not implemented because manipulation of the variance is not allowed
     49    }
     50
    4451    [StorableConstructor]
    45     protected ConstantModel(bool deserializing) : base(deserializing) { }
    46     protected ConstantModel(ConstantModel original, Cloner cloner)
     52    protected ConfidenceConstantModel(bool deserializing) : base(deserializing) { }
     53    protected ConfidenceConstantModel(ConfidenceConstantModel original, Cloner cloner)
    4754      : base(original, cloner) {
    4855      this.constant = original.constant;
     56      this.variance = original.variance;
    4957    }
    5058
    51     public override IDeepCloneable Clone(Cloner cloner) { return new ConstantModel(this, cloner); }
     59    public override IDeepCloneable Clone(Cloner cloner) { return new ConfidenceConstantModel(this, cloner); }
    5260
    53     public ConstantModel(double constant, string targetVariable)
     61    public ConfidenceConstantModel(double constant, double variance, string targetVariable)
    5462      : base(targetVariable) {
    5563      this.name = ItemName;
    5664      this.description = ItemDescription;
    5765      this.constant = constant;
     66      this.variance = variance;
    5867      this.ReadOnly = true; // changing a constant regression model is not supported
    5968    }
     
    6271      return rows.Select(row => Constant);
    6372    }
    64     public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) {
    65       return GetEstimatedValues(dataset, rows);
    66     }
    67     public IEnumerable<IEnumerable<double>> GetPrognosedValues(IDataset dataset, IEnumerable<int> rows, IEnumerable<int> horizons) {
    68       return rows.Select(_ => horizons.Select(__ => Constant));
     73
     74    public IEnumerable<double> GetEstimatedVariances(IDataset dataset, IEnumerable<int> rows) {
     75      return rows.Select(x => Variance);
    6976    }
    7077
    7178    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    72       return new ConstantRegressionSolution(this, new RegressionProblemData(problemData));
    73     }
    74     public IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) {
    75       return new ConstantClassificationSolution(this, new ClassificationProblemData(problemData));
    76     }
    77     public ITimeSeriesPrognosisSolution CreateTimeSeriesPrognosisSolution(ITimeSeriesPrognosisProblemData problemData) {
    78       return new TimeSeriesPrognosisSolution(this, new TimeSeriesPrognosisProblemData(problemData));
     79      return new ConfidenceRegressionSolution(this, new RegressionProblemData(problemData));
    7980    }
    8081
    8182    public override string ToString() {
    82       return string.Format("Constant: {0}", GetValue());
     83      return string.Format("Constant: {0:E4}, Variance: {1:E4}", Constant, Variance);
    8384    }
    8485
     
    101102#pragma warning restore 0067
    102103    #endregion
    103 
    104104  }
    105105}
Note: See TracChangeset for help on using the changeset viewer.