Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
08/05/16 17:40:11 (8 years ago)
Author:
gkronber
Message:

#2650: merged r14234:14236 from trunk to branch

Location:
branches/symbreg-factors-2650
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • branches/symbreg-factors-2650

  • branches/symbreg-factors-2650/HeuristicLab.Algorithms.DataAnalysis

  • branches/symbreg-factors-2650/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourClassification.cs

    r14185 r14239  
    4040    private const string KParameterName = "K";
    4141    private const string NearestNeighbourClassificationModelResultName = "Nearest neighbour classification solution";
     42    private const string WeightsParameterName = "Weights";
     43
    4244
    4345    #region parameter properties
    4446    public IFixedValueParameter<IntValue> KParameter {
    4547      get { return (IFixedValueParameter<IntValue>)Parameters[KParameterName]; }
     48    }
     49    public IValueParameter<DoubleArray> WeightsParameter {
     50      get { return (IValueParameter<DoubleArray>)Parameters[WeightsParameterName]; }
    4651    }
    4752    #endregion
     
    5358        else KParameter.Value.Value = value;
    5459      }
     60    }
     61    public DoubleArray Weights {
     62      get { return WeightsParameter.Value; }
     63      set { WeightsParameter.Value = value; }
    5564    }
    5665    #endregion
     
    6473      : base() {
    6574      Parameters.Add(new FixedValueParameter<IntValue>(KParameterName, "The number of nearest neighbours to consider for regression.", new IntValue(3)));
     75      Parameters.Add(new OptionalValueParameter<DoubleArray>(WeightsParameterName, "Optional: use weights to specify individual scaling values for all features. If not set the weights are calculated automatically (each feature is scaled to unit variance)"));
    6676      Problem = new ClassificationProblem();
    6777    }
    6878    [StorableHook(HookType.AfterDeserialization)]
    69     private void AfterDeserialization() { }
     79    private void AfterDeserialization() {
     80      // BackwardsCompatibility3.3
     81      #region Backwards compatible code, remove with 3.4
     82      if (!Parameters.ContainsKey(WeightsParameterName)) {
     83        Parameters.Add(new OptionalValueParameter<DoubleArray>(WeightsParameterName, "Optional: use weights to specify individual scaling values for all features. If not set the weights are calculated automatically (each feature is scaled to unit variance)"));
     84      }
     85      #endregion
     86    }
    7087
    7188    public override IDeepCloneable Clone(Cloner cloner) {
     
    7592    #region nearest neighbour
    7693    protected override void Run() {
    77       var solution = CreateNearestNeighbourClassificationSolution(Problem.ProblemData, K);
     94      double[] weights = null;
     95      if (Weights != null) weights = Weights.CloneAsArray();
     96      var solution = CreateNearestNeighbourClassificationSolution(Problem.ProblemData, K, weights);
    7897      Results.Add(new Result(NearestNeighbourClassificationModelResultName, "The nearest neighbour classification solution.", solution));
    7998    }
    8099
    81     public static IClassificationSolution CreateNearestNeighbourClassificationSolution(IClassificationProblemData problemData, int k) {
     100    public static IClassificationSolution CreateNearestNeighbourClassificationSolution(IClassificationProblemData problemData, int k, double[] weights = null) {
    82101      var problemDataClone = (IClassificationProblemData)problemData.Clone();
    83       return new NearestNeighbourClassificationSolution(Train(problemDataClone, k), problemDataClone);
     102      return new NearestNeighbourClassificationSolution(Train(problemDataClone, k, weights), problemDataClone);
    84103    }
    85104
    86     public static INearestNeighbourModel Train(IClassificationProblemData problemData, int k) {
     105    public static INearestNeighbourModel Train(IClassificationProblemData problemData, int k, double[] weights = null) {
    87106      return new NearestNeighbourModel(problemData.Dataset,
    88107        problemData.TrainingIndices,
     
    90109        problemData.TargetVariable,
    91110        problemData.AllowedInputVariables,
     111        weights,
    92112        problemData.ClassValues.ToArray());
    93113    }
Note: See TracChangeset for help on using the changeset viewer.