#region License Information
/* HeuristicLab
* Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using HeuristicLab.Analysis;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Optimization;
using HeuristicLab.Parameters;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
using System;
using System.Collections.Generic;
using System.Linq;
namespace HeuristicLab.OptimizationExpertSystem.Common {
[Item("Distance Weighted Recommender", "")]
[StorableClass]
public class DistanceWeightedRecommender : ParameterizedNamedItem, IAlgorithmInstanceRecommender {
private IFixedValueParameter NeighborhoodFactorParameter {
get { return (IFixedValueParameter)Parameters["NeighborhoodFactor"]; }
}
public double NeighborhoodFactor {
get { return NeighborhoodFactorParameter.Value.Value; }
set { NeighborhoodFactorParameter.Value.Value = value; }
}
[StorableConstructor]
private DistanceWeightedRecommender(bool deserializing) : base(deserializing) { }
private DistanceWeightedRecommender(DistanceWeightedRecommender original, Cloner cloner)
: base(original, cloner) { }
public DistanceWeightedRecommender() {
Parameters.Add(new FixedValueParameter("NeighborhoodFactor", "Penalize neighbors that are far away.", new DoubleValue(5)));
}
public override IDeepCloneable Clone(Cloner cloner) {
return new DistanceWeightedRecommender(this, cloner);
}
public IRecommendationModel TrainModel(IRun[] problemInstances, KnowledgeCenter okc, string[] characteristics) {
var piDistances = okc.GetProblemDistances(characteristics);
var maxDist = piDistances.Max(x => x.Value);
var instances = new SortedList();
foreach (var relevantRuns in okc.GetKnowledgeBaseByAlgorithm()) {
var algorithm = relevantRuns.Key;
Func distFunc = (d) => Math.Exp(NeighborhoodFactor * (-d / maxDist));
var pis = relevantRuns.Value.Select(x => ((StringValue)x.Parameters["Problem Name"]).Value).Distinct()
.Select(x => Tuple.Create(x, okc.ProblemInstances.SingleOrDefault(y => ((StringValue)y.Parameters["Problem Name"]).Value == x)))
.Where(x => x.Item2 != null && x.Item2.Parameters.ContainsKey("BestKnownQuality") && piDistances.ContainsKey(x.Item2))
.Select(x => Tuple.Create(x.Item1, distFunc(piDistances[x.Item2]), ((DoubleValue)x.Item2.Parameters["BestKnownQuality"]).Value))
.ToDictionary(x => x.Item1, x => Tuple.Create(x.Item2, x.Item3));
var sumPis = pis.Sum(x => x.Value.Item1);
var avgERT = 0.0;
foreach (var problemRuns in relevantRuns.Value.GroupBy(x => ((StringValue)x.Parameters["Problem Name"]).Value)) {
Tuple info;
if (!pis.TryGetValue(problemRuns.Key, out info)) continue;
var convGraph = new List>>();
foreach (var run in problemRuns) {
var current = new List>();
var performanceGraph = ((IndexedDataTable)run.Results["QualityPerEvaluations"]);
current.AddRange(performanceGraph.Rows.First().Values.TakeWhile(v => v.Item1 < okc.MaximumEvaluations.Value));
if (current.Count > 0) {
current.Add(Tuple.Create((double)okc.MaximumEvaluations.Value, current.Last().Item2));
convGraph.Add(current);
}
}
var ert = ExpectedRuntimeHelper.CalculateErt(convGraph, (okc.Maximization ? (1 - okc.MinimumTarget.Value) : (1 + okc.MinimumTarget.Value)) * info.Item2, okc.Maximization).ExpectedRuntime;
if (double.IsNaN(ert)) {
ert = ExpectedRuntimeHelper.CalculateErt(problemRuns.ToList(), "QualityPerEvaluations", (okc.Maximization ? (1 - okc.MinimumTarget.Value) : (1 + okc.MinimumTarget.Value)) * info.Item2, okc.Maximization).ExpectedRuntime;
if (double.IsNaN(ert)) ert = int.MaxValue;
}
avgERT += info.Item1 * ert;
}
avgERT /= sumPis;
if (instances.ContainsKey(avgERT)) {
avgERT += new System.Random().NextDouble();
}
instances.Add(avgERT, (IAlgorithm)algorithm.Clone());
}
return new FixedRankModel(instances.Select(x => Tuple.Create(x.Value, x.Key)));
}
}
}