#region License Information
/* HeuristicLab
* Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Encodings.ParameterConfigurationEncoding;
using HeuristicLab.Operators;
using HeuristicLab.Optimization;
using HeuristicLab.Parameters;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
namespace HeuristicLab.Problems.MetaOptimization {
///
/// TODO
///
[Item("AlgorithmRunsAnalyzer", "TODO")]
[StorableClass]
public class AlgorithmRunsAnalyzer : SingleSuccessorOperator {
#region Parameter properties
public ILookupParameter QualityParameter {
get { return (ILookupParameter)Parameters["Quality"]; }
}
public LookupParameter GenerationsParameter {
get { return (LookupParameter)Parameters["Generations"]; }
}
public LookupParameter RepetitionsParameter {
get { return (LookupParameter)Parameters[MetaOptimizationProblem.RepetitionsParameterName]; }
}
public ILookupParameter ParameterConfigurationParameter {
get { return (ILookupParameter)Parameters["ParameterConfigurationTree"]; }
}
public ILookupParameter> ProblemsParameter {
get { return (ILookupParameter>)Parameters[MetaOptimizationProblem.ProblemsParameterName]; }
}
public LookupParameter ReferenceQualityAveragesParameter {
get { return (LookupParameter)Parameters["ReferenceQualityAverages"]; }
}
public LookupParameter ReferenceQualityDeviationsParameter {
get { return (LookupParameter)Parameters["ReferenceQualityDeviations"]; }
}
public LookupParameter ReferenceEvaluatedSolutionAveragesParameter {
get { return (LookupParameter)Parameters["ReferenceEvaluatedSolutionAverages"]; }
}
public LookupParameter ResultsParameter {
get { return (LookupParameter)Parameters["Results"]; }
}
public ScopeTreeLookupParameter AlgorithmParameter {
get { return (ScopeTreeLookupParameter)Parameters["Algorithm"]; }
}
public ScopeTreeLookupParameter ProblemIndexParameter {
get { return (ScopeTreeLookupParameter)Parameters["ProblemIndex"]; }
}
public ScopeTreeLookupParameter RepetitionIndexParameter {
get { return (ScopeTreeLookupParameter)Parameters["RepetitionIndex"]; }
}
public LookupParameter MaximizationParameter {
get { return (LookupParameter)Parameters["Maximization"]; }
}
public LookupParameter QualityWeightParameter {
get { return (LookupParameter)Parameters[MetaOptimizationProblem.QualityWeightParameterName]; }
}
public LookupParameter StandardDeviationWeightParameter {
get { return (LookupParameter)Parameters[MetaOptimizationProblem.StandardDeviationWeightParameterName]; }
}
public LookupParameter EvaluatedSolutionsWeightParameter {
get { return (LookupParameter)Parameters[MetaOptimizationProblem.EvaluatedSolutionsWeightParameterName]; }
}
private ScopeParameter CurrentScopeParameter {
get { return (ScopeParameter)Parameters["CurrentScope"]; }
}
public IScope CurrentScope {
get { return CurrentScopeParameter.ActualValue; }
}
public LookupParameter QualityMeasureNameParameter {
get { return (LookupParameter)Parameters[MetaOptimizationProblem.QualityMeasureNameName]; }
}
#endregion
#region Constructors and Cloning
public AlgorithmRunsAnalyzer()
: base() {
Parameters.Add(new LookupParameter("Random", "The pseudo random number generator which should be used to initialize the new random permutation."));
Parameters.Add(new LookupParameter("Quality", "The evaluated quality of the ParameterVector."));
Parameters.Add(new LookupParameter("Generations", ""));
Parameters.Add(new LookupParameter(MetaOptimizationProblem.RepetitionsParameterName, "Number of evaluations on one problem."));
Parameters.Add(new LookupParameter("ParameterConfigurationTree", ""));
Parameters.Add(new LookupParameter>(MetaOptimizationProblem.ProblemsParameterName, ""));
Parameters.Add(new LookupParameter("ReferenceQualityAverages", ""));
Parameters.Add(new LookupParameter("ReferenceQualityDeviations", ""));
Parameters.Add(new LookupParameter("ReferenceEvaluatedSolutionAverages", ""));
Parameters.Add(new LookupParameter("Results", ""));
Parameters.Add(new ScopeTreeLookupParameter("Algorithm", "The finished algorithms containing Runs."));
Parameters.Add(new ScopeTreeLookupParameter("ProblemIndex", "The index of the problem an algorithm was executed with."));
Parameters.Add(new ScopeTreeLookupParameter("RepetitionIndex", "The index of the repetition"));
Parameters.Add(new LookupParameter("Maximization", "Set to false if the problem should be minimized."));
Parameters.Add(new LookupParameter(MetaOptimizationProblem.QualityWeightParameterName));
Parameters.Add(new LookupParameter(MetaOptimizationProblem.StandardDeviationWeightParameterName));
Parameters.Add(new LookupParameter(MetaOptimizationProblem.EvaluatedSolutionsWeightParameterName));
Parameters.Add(new LookupParameter(MetaOptimizationProblem.QualityMeasureNameName));
Parameters.Add(new ScopeParameter("CurrentScope", "The current scope whose sub-scopes represent the parents."));
}
[StorableConstructor]
protected AlgorithmRunsAnalyzer(bool deserializing) : base(deserializing) { }
protected AlgorithmRunsAnalyzer(AlgorithmRunsAnalyzer original, Cloner cloner) : base(original, cloner) { }
public override IDeepCloneable Clone(Cloner cloner) {
return new AlgorithmRunsAnalyzer(this, cloner);
}
[StorableHook(HookType.AfterDeserialization)]
private void AfterDeserialization() {
if (!Parameters.ContainsKey("CurrentScope")) Parameters.Add(new ScopeParameter("CurrentScope", "The current scope whose sub-scopes represent the parents.")); // backwards compatibility
if (!Parameters.ContainsKey(MetaOptimizationProblem.QualityMeasureNameName)) Parameters.Add(new LookupParameter(MetaOptimizationProblem.QualityMeasureNameName)); // backwards compatibility
}
#endregion
public override IOperation Apply() {
ParameterConfigurationTree parameterConfiguration = ParameterConfigurationParameter.ActualValue;
ItemArray algorithms = AlgorithmParameter.ActualValue;
ItemArray problemIndices = ProblemIndexParameter.ActualValue;
ItemArray repetitionIndices = RepetitionIndexParameter.ActualValue;
IEnumerable parameterNames = parameterConfiguration.GetOptimizedParameterNames();
IItemList problems = ProblemsParameter.ActualValue;
bool maximization = MaximizationParameter.ActualValue.Value;
int repetitions = RepetitionsParameter.ActualValue.Value;
double qualityWeight = QualityWeightParameter.ActualValue.Value;
double standardDeviationWeight = StandardDeviationWeightParameter.ActualValue.Value;
double evaluatedSolutionsWeight = EvaluatedSolutionsWeightParameter.ActualValue.Value;
string qualityMeasureName = QualityMeasureNameParameter.ActualValue.Value;
var resultNames = new List { qualityMeasureName, "Execution Time", "EvaluatedSolutions" };
int currentGeneration = GenerationsParameter.ActualValue != null ? GenerationsParameter.ActualValue.Value : 0;
double[] referenceQualityAverages;
double[] referenceQualityDeviations;
double[] referenceEvaluatedSolutionAverages;
GetReferenceValues(problems.Count, out referenceQualityAverages, out referenceQualityDeviations, out referenceEvaluatedSolutionAverages);
ResultCollection results = ResultsParameter.ActualValue;
if (algorithms.All(x => x.Runs.Count == 1)) {
var runs = new RunCollection();
var qualities = new double[problems.Count][];
var executionTimes = new TimeSpan[problems.Count][];
var evaluatedSolutions = new int[problems.Count][];
for (int i = 0; i < problems.Count; i++) {
qualities[i] = new double[repetitions];
evaluatedSolutions[i] = new int[repetitions];
executionTimes[i] = new TimeSpan[repetitions];
}
for (int i = 0; i < algorithms.Length; i++) {
int problemIndex = problemIndices[i].Value;
int repetitionIndex = repetitionIndices[i].Value;
IRun run = (IRun)algorithms[i].Runs.Single().Clone();
MetaOptimizationUtil.ClearResults(run, resultNames);
MetaOptimizationUtil.ClearParameters(run, parameterNames);
run.Results.Add("Meta-FromCache", new BoolValue(false));
run.Results.Add("Meta-Generation", new IntValue(currentGeneration));
run.Results.Add("Meta-ProblemIndex", new IntValue(problemIndex));
run.Name = string.Format("{0} Problem {1} Run {2}", parameterConfiguration.ParameterInfoString, problemIndex, repetitionIndex);
DoubleValue quality;
if (TryGetResultValue(run.Results, qualityMeasureName, out quality))
qualities[problemIndex][repetitionIndex] = quality.Value;
TimeSpanValue execTime;
if (TryGetResultValue(run.Results, "Execution Time", out execTime))
executionTimes[problemIndex][repetitionIndex] = execTime.Value;
IntValue evalSolutions;
if (TryGetResultValue(run.Results, "EvaluatedSolutions", out evalSolutions))
evaluatedSolutions[problemIndex][repetitionIndex] = evalSolutions.Value;
runs.Add(run);
}
parameterConfiguration.AverageExecutionTimes = new ItemList(executionTimes.Select(t => new TimeSpanValue(TimeSpan.FromMilliseconds(t.Average(ts => ts.TotalMilliseconds)))));
parameterConfiguration.AverageEvaluatedSolutions = new DoubleArray(evaluatedSolutions.Select(x => x.Average()).ToArray());
parameterConfiguration.Repetitions = new IntValue(repetitions);
parameterConfiguration.AverageQualities = new DoubleArray(qualities.Select(q => q.Average()).ToArray());
if (maximization)
parameterConfiguration.BestQualities = new DoubleArray(qualities.Select(q => q.Max()).ToArray());
else
parameterConfiguration.BestQualities = new DoubleArray(qualities.Select(q => q.Min()).ToArray());
if (maximization)
parameterConfiguration.WorstQualities = new DoubleArray(qualities.Select(q => q.Min()).ToArray());
else
parameterConfiguration.WorstQualities = new DoubleArray(qualities.Select(q => q.Max()).ToArray());
parameterConfiguration.QualityVariances = new DoubleArray(qualities.Select(q => q.Variance()).ToArray());
parameterConfiguration.QualityStandardDeviations = new DoubleArray(qualities.Select(q => q.StandardDeviation()).ToArray());
parameterConfiguration.Runs = runs;
this.QualityParameter.ActualValue = new DoubleValue(MetaOptimizationUtil.Normalize(parameterConfiguration, referenceQualityAverages, referenceQualityDeviations, referenceEvaluatedSolutionAverages, qualityWeight, standardDeviationWeight, evaluatedSolutionsWeight, maximization));
} else {
// something terrible happened -> most probably due to invalid parameters.
// penalty with worst quality from latest generation!
double penaltyValue;
if (maximization)
penaltyValue = results.ContainsKey("CurrentWorstQuality") ? ((DoubleValue)results["CurrentWorstQuality"].Value).Value : referenceQualityAverages.Min();
else
penaltyValue = results.ContainsKey("CurrentWorstQuality") ? ((DoubleValue)results["CurrentWorstQuality"].Value).Value : referenceQualityAverages.Max();
this.QualityParameter.ActualValue = new DoubleValue(penaltyValue);
parameterConfiguration.Quality = new DoubleValue(penaltyValue);
parameterConfiguration.AverageExecutionTimes = new ItemList(Enumerable.Repeat(new TimeSpanValue(TimeSpan.Zero), problems.Count));
parameterConfiguration.AverageEvaluatedSolutions = new DoubleArray(Enumerable.Repeat(0.0, problems.Count).ToArray());
parameterConfiguration.Repetitions = new IntValue(repetitions);
parameterConfiguration.AverageQualities = new DoubleArray(Enumerable.Repeat(0.0, problems.Count).ToArray());
parameterConfiguration.BestQualities = new DoubleArray(Enumerable.Repeat(0.0, problems.Count).ToArray());
parameterConfiguration.WorstQualities = new DoubleArray(Enumerable.Repeat(0.0, problems.Count).ToArray());
parameterConfiguration.QualityVariances = new DoubleArray(Enumerable.Repeat(0.0, problems.Count).ToArray());
parameterConfiguration.QualityStandardDeviations = new DoubleArray(Enumerable.Repeat(0.0, problems.Count).ToArray());
parameterConfiguration.Runs = null;
}
// in OSGA there are more subscopes, so be careful which to delete
CurrentScope.SubScopes.RemoveAll(x => x.Variables.Count(v => (v.Name == "RepetitionCount")) == 1);
return base.Apply();
}
private bool TryGetResultValue(IDictionary results, string resultName, out T resultValue) {
IDictionary currentResults = results;
string[] tokens = resultName.Split('.');
T currentResultValue = default(T);
bool found = true;
foreach (var token in tokens) {
if (currentResults != null && currentResults.ContainsKey(token)) {
currentResultValue = (T)currentResults[token];
currentResults = currentResultValue as IDictionary;
} else {
found = false;
break;
}
}
resultValue = found ? currentResultValue : default(T);
return found;
}
private void GetReferenceValues(int problemsCount, out double[] referenceQualityAverages, out double[] referenceQualityDeviations, out double[] referenceEvaluatedSolutionAverages) {
if (ReferenceQualityAveragesParameter.ActualValue == null) {
// this is generation zero. no reference qualities for normalization have been calculated yet. in this special case the ReferenceQualityAnalyzer will do the normalization
referenceQualityAverages = new double[problemsCount];
referenceQualityDeviations = new double[problemsCount];
referenceEvaluatedSolutionAverages = new double[problemsCount];
for (int i = 0; i < referenceQualityAverages.Length; i++) {
referenceQualityAverages[i] = 1;
referenceQualityDeviations[i] = 1;
referenceEvaluatedSolutionAverages[i] = 1;
}
} else {
referenceQualityAverages = ReferenceQualityAveragesParameter.ActualValue.ToArray();
referenceQualityDeviations = ReferenceQualityDeviationsParameter.ActualValue.ToArray();
referenceEvaluatedSolutionAverages = ReferenceEvaluatedSolutionAveragesParameter.ActualValue.ToArray();
}
}
}
}