- Timestamp:
- 01/15/17 09:57:50 (8 years ago)
- Location:
- branches/MemPRAlgorithm
- Files:
-
- 3 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRAlgorithm.cs
r14563 r14573 302 302 303 303 if (!replaced && offspring != null) { 304 if (Context.HillclimbingSuited(offspring )) {304 if (Context.HillclimbingSuited(offspring.Fitness)) { 305 305 HillClimb(offspring, token, CalculateSubspace(Context.Population.Select(x => x.Solution))); 306 306 if (Replace(offspring, token)) { … … 566 566 AdaptiveWalk(newScope, maxEvals, token, subspace); 567 567 568 Context.AddAdaptivewalkingResult(scope, newScope); 568 569 if (Context.IsBetter(newScope, scope)) { 569 Context.AddAdaptivewalkingResult(scope, newScope);570 570 scope.Adopt(newScope); 571 } else if (!Eq(newScope, scope)) 572 Context.AddAdaptivewalkingResult(scope, newScope); 571 } 573 572 } 574 573 protected abstract void AdaptiveWalk(ISingleObjectiveSolutionScope<TSolution> scope, int maxEvals, CancellationToken token, ISolutionSubspace<TSolution> subspace = null); -
branches/MemPRAlgorithm/HeuristicLab.Algorithms.MemPR/3.3/MemPRContext.cs
r14563 r14573 27 27 using HeuristicLab.Algorithms.DataAnalysis; 28 28 using HeuristicLab.Algorithms.MemPR.Interfaces; 29 using HeuristicLab.Analysis; 29 30 using HeuristicLab.Common; 30 31 using HeuristicLab.Core; … … 343 344 } 344 345 346 #region Breeding Performance 347 public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) { 348 if (IsBetter(a, b)) 349 breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness)); 350 else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness)); 351 if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel(); 352 } 345 353 public void RelearnBreedingPerformanceModel() { 346 354 breedingPerformanceModel = RunRegression(PrepareRegression(ToListRow(breedingStat)), breedingPerformanceModel).Model; … … 360 368 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { p1.Fitness, p2.Fitness, dist }, breedingPerformanceModel); 361 369 } 362 370 #endregion 371 372 #region Relinking Performance 373 public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) { 374 if (IsBetter(a, b)) 375 relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness)); 376 else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness)); 377 if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel(); 378 } 363 379 public void RelearnRelinkingPerformanceModel() { 364 380 relinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(relinkingStat)), relinkingPerformanceModel).Model; … … 381 397 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, relinkingPerformanceModel); 382 398 } 383 399 #endregion 400 401 #region Delinking Performance 402 public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) { 403 if (IsBetter(a, b)) 404 delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness)); 405 else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness)); 406 if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel(); 407 } 384 408 public void RelearnDelinkingPerformanceModel() { 385 409 delinkingPerformanceModel = RunRegression(PrepareRegression(ToListRow(delinkingStat)), delinkingPerformanceModel).Model; … … 401 425 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(p2.Fitness, new List<double> { p1.Fitness, p2.Fitness, dist }, delinkingPerformanceModel); 402 426 } 403 427 #endregion 428 429 #region Sampling Performance 430 public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) { 431 samplingStat.Add(Tuple.Create(avgDist, sample.Fitness)); 432 if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel(); 433 } 404 434 public void RelearnSamplingPerformanceModel() { 405 435 samplingPerformanceModel = RunRegression(PrepareRegression(ToListRow(samplingStat)), samplingPerformanceModel).Model; … … 410 440 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { avgDist }, samplingPerformanceModel); 411 441 } 412 442 #endregion 443 444 #region Hillclimbing Performance 445 public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) { 446 hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness)); 447 if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel(); 448 } 413 449 public void RelearnHillclimbingPerformanceModel() { 414 450 hillclimbingPerformanceModel = RunRegression(PrepareRegression(ToListRow(hillclimbingStat)), hillclimbingPerformanceModel).Model; 415 }416 public bool HillclimbingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {417 return HillclimbingSuited(scope.Fitness);418 451 } 419 452 public bool HillclimbingSuited(double startingFitness) { … … 423 456 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, hillclimbingPerformanceModel); 424 457 } 425 458 #endregion 459 460 #region Adaptivewalking Performance 461 public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) { 462 adaptivewalkingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness)); 463 if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel(); 464 } 426 465 public void RelearnAdaptiveWalkPerformanceModel() { 427 466 adaptiveWalkPerformanceModel = RunRegression(PrepareRegression(ToListRow(adaptivewalkingStat)), adaptiveWalkPerformanceModel).Model; 428 }429 public bool AdaptivewalkingSuited(ISingleObjectiveSolutionScope<TSolution> scope) {430 return AdaptivewalkingSuited(scope.Fitness);431 467 } 432 468 public bool AdaptivewalkingSuited(double startingFitness) { … … 434 470 if (startingFitness < AdaptivewalkingStat.Min(x => x.Item1) || startingFitness > AdaptivewalkingStat.Max(x => x.Item1)) 435 471 return true; 436 return Random.NextDouble() < ProbabilityAcceptAbsolutePerformanceModel(new List<double> { startingFitness }, adaptiveWalkPerformanceModel); 437 } 472 return Random.NextDouble() < ProbabilityAcceptRelativePerformanceModel(startingFitness, new List<double> { startingFitness }, adaptiveWalkPerformanceModel); 473 } 474 #endregion 438 475 439 476 public IConfidenceRegressionSolution GetSolution(IConfidenceRegressionModel model, IEnumerable<Tuple<double, double>> data) { … … 447 484 } 448 485 449 protected RegressionProblemData PrepareRegression(List<List<double>> sample) {450 var columns = sample.First().Select(y => new List<double>()).ToList();451 foreach (var next in sample.Shuffle(Random)) {486 protected RegressionProblemData PrepareRegression(List<List<double>> data) { 487 var columns = data.First().Select(y => new List<double>()).ToList(); 488 foreach (var next in data.Shuffle(Random)) { 452 489 for (var i = 0; i < next.Count; i++) { 453 490 columns[i].Add(next[i]); … … 456 493 var ds = new Dataset(columns.Select((v, i) => i < columns.Count - 1 ? "in" + i : "out").ToList(), columns); 457 494 var regPrb = new RegressionProblemData(ds, Enumerable.Range(0, columns.Count - 1).Select(x => "in" + x), "out") { 458 TrainingPartition = { Start = 0, End = Math.Min(50, sample.Count) },459 TestPartition = { Start = Math.Min(50, sample.Count), End = sample.Count }495 TrainingPartition = { Start = 0, End = Math.Min(50, data.Count) }, 496 TestPartition = { Start = Math.Min(50, data.Count), End = data.Count } 460 497 }; 461 498 return regPrb; … … 463 500 464 501 protected static IConfidenceRegressionSolution RunRegression(RegressionProblemData trainingData, IConfidenceRegressionModel baseLineModel = null) { 502 var targetValues = trainingData.Dataset.GetDoubleValues(trainingData.TargetVariable, trainingData.TrainingIndices).ToList(); 465 503 var baseline = baseLineModel != null ? new ConfidenceRegressionSolution(baseLineModel, trainingData) : null; 504 var constantSolution = new ConfidenceRegressionSolution(new ConfidenceConstantModel(targetValues.Average(), targetValues.Variance(), trainingData.TargetVariable), trainingData); 466 505 var gpr = new GaussianProcessRegression { Problem = { ProblemData = trainingData } }; 467 506 if (trainingData.InputVariables.CheckedItems.Any(x => alglib.pearsoncorr2(trainingData.Dataset.GetDoubleValues(x.Value.Value).ToArray(), trainingData.TargetVariableValues.ToArray()) > 0.8)) { … … 479 518 cnt++; 480 519 } while (cnt < 10 && (solution == null || solution.TrainingRSquared.IsAlmost(0))); 481 if (baseline == null) return solution; 482 if (trainingData.Dataset.Rows < 60) 483 return solution.TrainingMeanAbsoluteError < baseline.TrainingMeanAbsoluteError ? solution : baseline; 484 return solution.TestMeanAbsoluteError < baseline.TestMeanAbsoluteError ? solution : baseline; 520 521 return GetBestRegressionSolution(constantSolution, baseline, solution); 522 } 523 524 private static IConfidenceRegressionSolution GetBestRegressionSolution(IConfidenceRegressionSolution constant, IConfidenceRegressionSolution baseline, IConfidenceRegressionSolution solution) { 525 if (baseline == null) 526 return constant.TrainingMeanAbsoluteError < solution.TrainingMeanAbsoluteError ? constant : solution; 527 528 double a, b, c; 529 if (constant.ProblemData.Dataset.Rows < 60) { 530 c = constant.TrainingMeanAbsoluteError; 531 b = baseline.TrainingMeanAbsoluteError; 532 a = solution.TrainingMeanAbsoluteError; 533 } else { 534 c = constant.TestMeanAbsoluteError; 535 b = baseline.TestMeanAbsoluteError; 536 a = solution.TestMeanAbsoluteError; 537 } 538 if (c < b && (c < a || b < a)) return constant; 539 if (b < c && (b < a || c < a)) return baseline; 540 return solution; 485 541 } 486 542 … … 488 544 using (var evt = new AutoResetEvent(false)) { 489 545 EventHandler exeStateChanged = (o, args) => { 490 if (algorithm.ExecutionState == ExecutionState.Paused || algorithm.ExecutionState == ExecutionState.Stopped)546 if (algorithm.ExecutionState != ExecutionState.Started) 491 547 evt.Set(); 492 548 }; 493 549 algorithm.ExecutionStateChanged += exeStateChanged; 494 algorithm.Prepare(true); 550 if (algorithm.ExecutionState != ExecutionState.Prepared) { 551 algorithm.Prepare(true); 552 evt.WaitOne(); 553 } 495 554 algorithm.Start(); 496 555 evt.WaitOne(); … … 547 606 } 548 607 549 public void AddBreedingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {550 if (IsBetter(a, b))551 breedingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, child.Fitness));552 else breedingStat.Add(Tuple.Create(b.Fitness, a.Fitness, parentDist, child.Fitness));553 if (breedingStat.Count % 10 == 0) RelearnBreedingPerformanceModel();554 }555 556 public void AddRelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {557 if (IsBetter(a, b))558 relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));559 else relinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));560 if (relinkingStat.Count % 10 == 0) RelearnRelinkingPerformanceModel();561 }562 563 public void AddDelinkingResult(ISingleObjectiveSolutionScope<TSolution> a, ISingleObjectiveSolutionScope<TSolution> b, double parentDist, ISingleObjectiveSolutionScope<TSolution> child) {564 if (IsBetter(a, b))565 delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - a.Fitness : a.Fitness - child.Fitness));566 else delinkingStat.Add(Tuple.Create(a.Fitness, b.Fitness, parentDist, Maximization ? child.Fitness - b.Fitness : b.Fitness - child.Fitness));567 if (delinkingStat.Count % 10 == 0) RelearnDelinkingPerformanceModel();568 }569 570 public void AddSamplingResult(ISingleObjectiveSolutionScope<TSolution> sample, double avgDist) {571 samplingStat.Add(Tuple.Create(avgDist, sample.Fitness));572 if (samplingStat.Count % 10 == 0) RelearnSamplingPerformanceModel();573 }574 575 public void AddHillclimbingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {576 hillclimbingStat.Add(Tuple.Create(input.Fitness, Maximization ? outcome.Fitness - input.Fitness : input.Fitness - outcome.Fitness));577 if (hillclimbingStat.Count % 10 == 0) RelearnHillclimbingPerformanceModel();578 }579 580 public void AddAdaptivewalkingResult(ISingleObjectiveSolutionScope<TSolution> input, ISingleObjectiveSolutionScope<TSolution> outcome) {581 adaptivewalkingStat.Add(Tuple.Create(input.Fitness, outcome.Fitness));582 if (adaptivewalkingStat.Count % 10 == 0) RelearnAdaptiveWalkPerformanceModel();583 }584 585 608 #region IExecutionContext members 586 609 public IAtomicOperation CreateOperation(IOperator op) { … … 599 622 return new ExecutionContext(this, op, s); 600 623 } 601 #endregion602 603 #region Math Helper604 // normal distribution CDF (left of x) for N(0;1) standard normal distribution605 // from http://www.johndcook.com/blog/csharp_phi/606 // license: "This code is in the public domain. Do whatever you want with it, no strings attached."607 // added: 2016-11-19 21:46 CET608 /*protected static double Phi(double x) {609 // constants610 double a1 = 0.254829592;611 double a2 = -0.284496736;612 double a3 = 1.421413741;613 double a4 = -1.453152027;614 double a5 = 1.061405429;615 double p = 0.3275911;616 617 // Save the sign of x618 int sign = 1;619 if (x < 0)620 sign = -1;621 x = Math.Abs(x) / Math.Sqrt(2.0);622 623 // A&S formula 7.1.26624 double t = 1.0 / (1.0 + p * x);625 double y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.Exp(-x * x);626 627 return 0.5 * (1.0 + sign * y);628 }*/629 624 #endregion 630 625 -
branches/MemPRAlgorithm/HeuristicLab.Problems.DataAnalysis/3.4/HeuristicLab.Problems.DataAnalysis-3.4.csproj
r14099 r14573 136 136 <Compile Include="Implementation\Clustering\ClusteringProblemData.cs" /> 137 137 <Compile Include="Implementation\Clustering\ClusteringSolution.cs" /> 138 <Compile Include="Implementation\ConfidenceConstantModel.cs" /> 138 139 <Compile Include="Implementation\ConstantModel.cs" /> 139 140 <Compile Include="Implementation\DataAnalysisModel.cs" /> -
branches/MemPRAlgorithm/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/ConfidenceConstantModel.cs
r14561 r14573 30 30 namespace HeuristicLab.Problems.DataAnalysis { 31 31 [StorableClass] 32 [Item("Con stant Model", "A model that always returns the same constant value regardless of the presented input data.")]33 public class Con stantModel : RegressionModel, IClassificationModel, ITimeSeriesPrognosisModel, IStringConvertibleValue {32 [Item("Confidence Constant Model", "A model that always returns the same constant mean value and variance regardless of the presented input data.")] 33 public class ConfidenceConstantModel : RegressionModel, IConfidenceRegressionModel, IStringConvertibleValue { 34 34 public override IEnumerable<string> VariablesUsedForPrediction { get { return Enumerable.Empty<string>(); } } 35 35 … … 42 42 } 43 43 44 [Storable] 45 private readonly double variance; 46 public double Variance { 47 get { return variance; } 48 // setter not implemented because manipulation of the variance is not allowed 49 } 50 44 51 [StorableConstructor] 45 protected Con stantModel(bool deserializing) : base(deserializing) { }46 protected Con stantModel(ConstantModel original, Cloner cloner)52 protected ConfidenceConstantModel(bool deserializing) : base(deserializing) { } 53 protected ConfidenceConstantModel(ConfidenceConstantModel original, Cloner cloner) 47 54 : base(original, cloner) { 48 55 this.constant = original.constant; 56 this.variance = original.variance; 49 57 } 50 58 51 public override IDeepCloneable Clone(Cloner cloner) { return new Con stantModel(this, cloner); }59 public override IDeepCloneable Clone(Cloner cloner) { return new ConfidenceConstantModel(this, cloner); } 52 60 53 public Con stantModel(double constant, string targetVariable)61 public ConfidenceConstantModel(double constant, double variance, string targetVariable) 54 62 : base(targetVariable) { 55 63 this.name = ItemName; 56 64 this.description = ItemDescription; 57 65 this.constant = constant; 66 this.variance = variance; 58 67 this.ReadOnly = true; // changing a constant regression model is not supported 59 68 } … … 62 71 return rows.Select(row => Constant); 63 72 } 64 public IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 65 return GetEstimatedValues(dataset, rows); 66 } 67 public IEnumerable<IEnumerable<double>> GetPrognosedValues(IDataset dataset, IEnumerable<int> rows, IEnumerable<int> horizons) { 68 return rows.Select(_ => horizons.Select(__ => Constant)); 73 74 public IEnumerable<double> GetEstimatedVariances(IDataset dataset, IEnumerable<int> rows) { 75 return rows.Select(x => Variance); 69 76 } 70 77 71 78 public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) { 72 return new ConstantRegressionSolution(this, new RegressionProblemData(problemData)); 73 } 74 public IClassificationSolution CreateClassificationSolution(IClassificationProblemData problemData) { 75 return new ConstantClassificationSolution(this, new ClassificationProblemData(problemData)); 76 } 77 public ITimeSeriesPrognosisSolution CreateTimeSeriesPrognosisSolution(ITimeSeriesPrognosisProblemData problemData) { 78 return new TimeSeriesPrognosisSolution(this, new TimeSeriesPrognosisProblemData(problemData)); 79 return new ConfidenceRegressionSolution(this, new RegressionProblemData(problemData)); 79 80 } 80 81 81 82 public override string ToString() { 82 return string.Format("Constant: {0 }", GetValue());83 return string.Format("Constant: {0:E4}, Variance: {1:E4}", Constant, Variance); 83 84 } 84 85 … … 101 102 #pragma warning restore 0067 102 103 #endregion 103 104 104 } 105 105 }
Note: See TracChangeset
for help on using the changeset viewer.