#region License Information
/* HeuristicLab
* Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using HeuristicLab.Core;
using HeuristicLab.Data;
using HeuristicLab.Encodings.ParameterConfigurationEncoding;
using HeuristicLab.Optimization;
namespace HeuristicLab.Problems.MetaOptimization {
public static class MetaOptimizationUtil {
///
/// Removes those results from the run which are not declared in resultsToKeep
///
public static void ClearResults(IRun run, IEnumerable resultsToKeep) {
var resultsToRemove = new List();
foreach (var result in run.Results) {
if (!resultsToKeep.Contains(result.Key))
resultsToRemove.Add(result.Key);
}
foreach (var result in resultsToRemove)
run.Results.Remove(result);
}
///
/// Removes those parameters from the run which are not declared in parametersToKeep
///
public static void ClearParameters(IRun run, IEnumerable parametersToKeep) {
var parametersToRemove = new List();
foreach (var parameter in run.Parameters) {
if (!parametersToKeep.Contains(parameter.Key))
parametersToRemove.Add(parameter.Key);
}
foreach (var parameter in parametersToRemove)
run.Parameters.Remove(parameter);
}
public static double Normalize(
ParameterConfigurationTree parameterConfigurationTree,
double[] referenceQualityAverages,
double[] referenceQualityDeviations,
double[] referenceEvaluatedSolutionAverages,
double qualityAveragesWeight,
double qualityDeviationsWeight,
double evaluatedSolutionsWeight,
bool maximization) {
double[] qualityAveragesNormalized = new double[referenceQualityAverages.Length];
double[] qualityDeviationsNormalized = new double[referenceQualityDeviations.Length];
double[] evaluatedSolutionAveragesNormalized = new double[referenceEvaluatedSolutionAverages.Length];
for (int i = 0; i < referenceQualityAverages.Length; i++) {
qualityAveragesNormalized[i] = parameterConfigurationTree.AverageQualities[i] / referenceQualityAverages[i];
qualityDeviationsNormalized[i] = parameterConfigurationTree.QualityStandardDeviations[i] / referenceQualityDeviations[i];
evaluatedSolutionAveragesNormalized[i] = parameterConfigurationTree.AverageEvaluatedSolutions[i] / referenceEvaluatedSolutionAverages[i];
if (double.IsNaN(evaluatedSolutionAveragesNormalized[i])) evaluatedSolutionAveragesNormalized[i] = 0.0;
}
parameterConfigurationTree.NormalizedQualityAverages = new DoubleArray(qualityAveragesNormalized);
parameterConfigurationTree.NormalizedQualityDeviations = new DoubleArray(qualityDeviationsNormalized);
parameterConfigurationTree.NormalizedEvaluatedSolutions = new DoubleArray(evaluatedSolutionAveragesNormalized);
double qualityAveragesNormalizedValue = qualityAveragesNormalized.Average();
double qualityDeviationsNormalizedValue = qualityDeviationsNormalized.Average();
double evaluatedSolutionAveragesNormalizedValue = evaluatedSolutionAveragesNormalized.Average();
// deviation and evaluatedSolutions are always minimization problems. so if maximization=true, flip the values around 1.0 (e.g. 1.15 -> 0.85)
if (maximization) {
qualityDeviationsNormalizedValue -= (qualityDeviationsNormalizedValue - 1) * 2;
evaluatedSolutionAveragesNormalizedValue -= (evaluatedSolutionAveragesNormalizedValue - 1) * 2;
}
// apply weights
qualityAveragesNormalizedValue *= qualityAveragesWeight;
qualityDeviationsNormalizedValue *= qualityDeviationsWeight;
evaluatedSolutionAveragesNormalizedValue *= evaluatedSolutionsWeight;
double weightSum = qualityAveragesWeight + qualityDeviationsWeight + evaluatedSolutionsWeight;
parameterConfigurationTree.Quality = new DoubleValue((qualityAveragesNormalizedValue + qualityDeviationsNormalizedValue + evaluatedSolutionAveragesNormalizedValue) / weightSum);
return parameterConfigurationTree.Quality.Value;
}
///
/// Creates a new instance of algorithmType, sets the given problem and parameterizes it with the given configuration
///
public static IAlgorithm CreateParameterizedAlgorithmInstance(ParameterConfigurationTree parameterConfigurationTree, Type algorithmType, IProblem problem, bool randomize = false, IRandom random = null) {
var algorithm = (IAlgorithm)Activator.CreateInstance(algorithmType);
algorithm.Problem = problem;
if (algorithm is EngineAlgorithm) {
((EngineAlgorithm)algorithm).Engine = new SequentialEngine.SequentialEngine();
}
if (randomize) parameterConfigurationTree.Randomize(random);
parameterConfigurationTree.Parameterize(algorithm);
return algorithm;
}
}
}