#region License Information
/* HeuristicLab
* Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
*
* This file is part of HeuristicLab.
*
* HeuristicLab is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HeuristicLab is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HeuristicLab. If not, see .
*/
#endregion
using System.Collections.Generic;
using System.Linq;
using HeuristicLab.Common;
using HeuristicLab.Core;
using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
using HeuristicLab.Optimization;
using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
using HeuristicLab.Problems.DataAnalysis;
using HeuristicLab.Problems.DataAnalysis.Symbolic;
using HeuristicLab.Problems.DataAnalysis.Symbolic.Classification;
namespace HeuristicLab.Algorithms.DataAnalysis {
///
/// 0R classification algorithm.
///
[Item("ZeroR", "0R classification algorithm.")]
[Creatable("Data Analysis")]
[StorableClass]
public sealed class ZeroR : FixedDataAnalysisAlgorithm {
[StorableConstructor]
private ZeroR(bool deserializing) : base(deserializing) { }
private ZeroR(ZeroR original, Cloner cloner)
: base(original, cloner) {
}
public ZeroR()
: base() {
Problem = new ClassificationProblem();
}
public override IDeepCloneable Clone(Cloner cloner) {
return new ZeroR(this, cloner);
}
protected override void Run() {
var solution = CreateZeroRSolution(Problem.ProblemData);
Results.Add(new Result("ZeroR solution", "The 0R classifier.", solution));
}
public static IClassificationSolution CreateZeroRSolution(IClassificationProblemData problemData) {
Dataset dataset = problemData.Dataset;
string target = problemData.TargetVariable;
var classValuesEnumerator = problemData.ClassValues.GetEnumerator();
var classValuesInDatasetEnumerator = dataset.GetDoubleValues(target, problemData.TrainingIndices).GetEnumerator();
Dictionary classValuesCount = new Dictionary(problemData.ClassValues.Count());
//initialize
while (classValuesEnumerator.MoveNext()) {
classValuesCount[classValuesEnumerator.Current] = 0;
}
//count occurence of classes
while (classValuesInDatasetEnumerator.MoveNext()) {
classValuesCount[classValuesInDatasetEnumerator.Current] += 1;
}
classValuesEnumerator.Reset();
double mostOccurences = -1;
double bestClass = double.NaN;
while (classValuesEnumerator.MoveNext()) {
if (classValuesCount[classValuesEnumerator.Current] > mostOccurences) {
mostOccurences = classValuesCount[classValuesEnumerator.Current];
bestClass = classValuesEnumerator.Current;
}
}
ConstantClassificationModel model = new ConstantClassificationModel(bestClass);
ConstantClassificationSolution solution = new ConstantClassificationSolution(model, (IClassificationProblemData)problemData.Clone());
return solution;
}
private static SymbolicDiscriminantFunctionClassificationModel CreateDiscriminantFunctionModel(ISymbolicExpressionTree tree,
ISymbolicDataAnalysisExpressionTreeInterpreter interpreter,
IClassificationProblemData problemData,
IEnumerable rows,
IEnumerable classValues) {
var model = new SymbolicDiscriminantFunctionClassificationModel(tree, interpreter, new AccuracyMaximizationThresholdCalculator());
IList thresholds = new List();
double last = 0;
foreach (double item in classValues) {
if (thresholds.Count == 0) {
thresholds.Add(double.NegativeInfinity);
} else {
thresholds.Add((last + item) / 2);
}
last = item;
}
model.SetThresholdsAndClassValues(thresholds, classValues);
return model;
}
}
}