Free cookie consent management tool by TermsFeed Policy Generator

source: branches/1614_GeneralizedQAP/HeuristicLab.OptimizationExpertSystem.Common/3.3/Recommenders/DistanceWeightedRecommender.cs @ 17141

Last change on this file since 17141 was 13861, checked in by abeham, 8 years ago

#2457: added directed walk for qap

File size: 5.2 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using HeuristicLab.Analysis;
23using HeuristicLab.Common;
24using HeuristicLab.Core;
25using HeuristicLab.Data;
26using HeuristicLab.Optimization;
27using HeuristicLab.Parameters;
28using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
29using System;
30using System.Collections.Generic;
31using System.Linq;
32
33namespace HeuristicLab.OptimizationExpertSystem.Common {
34  [Item("Distance Weighted Recommender", "")]
35  [StorableClass]
36  public class DistanceWeightedRecommender : ParameterizedNamedItem, IAlgorithmInstanceRecommender {
37
38    private IFixedValueParameter<DoubleValue> NeighborhoodFactorParameter {
39      get { return (IFixedValueParameter<DoubleValue>)Parameters["NeighborhoodFactor"]; }
40    }
41
42    public double NeighborhoodFactor {
43      get { return NeighborhoodFactorParameter.Value.Value; }
44      set { NeighborhoodFactorParameter.Value.Value = value; }
45    }
46   
47    [StorableConstructor]
48    private DistanceWeightedRecommender(bool deserializing) : base(deserializing) { }
49    private DistanceWeightedRecommender(DistanceWeightedRecommender original, Cloner cloner)
50      : base(original, cloner) { }
51    public DistanceWeightedRecommender() {
52      Parameters.Add(new FixedValueParameter<DoubleValue>("NeighborhoodFactor", "Penalize neighbors that are far away.", new DoubleValue(5)));
53    }
54
55    public override IDeepCloneable Clone(Cloner cloner) {
56      return new DistanceWeightedRecommender(this, cloner);
57    }
58
59    public IRecommendationModel TrainModel(IRun[] problemInstances, KnowledgeCenter okc, string[] characteristics) {
60      var piDistances = okc.GetProblemDistances(characteristics);
61      var maxDist = piDistances.Max(x => x.Value);
62      var instances = new SortedList<double, IAlgorithm>();
63      foreach (var relevantRuns in okc.GetKnowledgeBaseByAlgorithm()) {
64        var algorithm = relevantRuns.Key;
65        Func<double, double> distFunc = (d) => Math.Exp(NeighborhoodFactor * (-d / maxDist));
66        var pis = relevantRuns.Value.Select(x => ((StringValue)x.Parameters["Problem Name"]).Value).Distinct()
67                              .Select(x => Tuple.Create(x, okc.ProblemInstances.SingleOrDefault(y => ((StringValue)y.Parameters["Problem Name"]).Value == x)))
68                              .Where(x => x.Item2 != null && x.Item2.Parameters.ContainsKey("BestKnownQuality") && piDistances.ContainsKey(x.Item2))
69                              .Select(x => Tuple.Create(x.Item1, distFunc(piDistances[x.Item2]), ((DoubleValue)x.Item2.Parameters["BestKnownQuality"]).Value))
70                              .ToDictionary(x => x.Item1, x => Tuple.Create(x.Item2, x.Item3));
71        var sumPis = pis.Sum(x => x.Value.Item1);
72        var avgERT = 0.0;
73        foreach (var problemRuns in relevantRuns.Value.GroupBy(x => ((StringValue)x.Parameters["Problem Name"]).Value)) {
74          Tuple<double, double> info;
75          if (!pis.TryGetValue(problemRuns.Key, out info)) continue;
76          var convGraph = new List<List<Tuple<double, double>>>();
77          foreach (var run in problemRuns) {
78            var current = new List<Tuple<double, double>>();
79            var performanceGraph = ((IndexedDataTable<double>)run.Results["QualityPerEvaluations"]);
80            current.AddRange(performanceGraph.Rows.First().Values.TakeWhile(v => v.Item1 < okc.MaximumEvaluations.Value));
81            if (current.Count > 0) {
82              current.Add(Tuple.Create((double)okc.MaximumEvaluations.Value, current.Last().Item2));
83              convGraph.Add(current);
84            }
85          }
86          var ert = ExpectedRuntimeHelper.CalculateErt(convGraph, (okc.Maximization ? (1 - okc.MinimumTarget.Value) : (1 + okc.MinimumTarget.Value)) * info.Item2, okc.Maximization).ExpectedRuntime;
87          if (double.IsNaN(ert)) {
88            ert = ExpectedRuntimeHelper.CalculateErt(problemRuns.ToList(), "QualityPerEvaluations", (okc.Maximization ? (1 - okc.MinimumTarget.Value) : (1 + okc.MinimumTarget.Value)) * info.Item2, okc.Maximization).ExpectedRuntime;
89            if (double.IsNaN(ert)) ert = int.MaxValue;
90          }
91          avgERT += info.Item1 * ert;
92        }
93        avgERT /= sumPis;
94        if (instances.ContainsKey(avgERT)) {
95          avgERT += new System.Random().NextDouble();
96        }
97        instances.Add(avgERT, (IAlgorithm)algorithm.Clone());
98      }
99
100      return new FixedRankModel(instances.Select(x => Tuple.Create(x.Value, x.Key)));
101    }
102  }
103}
Note: See TracBrowser for help on using the repository browser.