Changeset 16723 for branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourRegression.cs
- Timestamp:
- 03/28/19 16:54:20 (6 years ago)
- Location:
- branches/2521_ProblemRefactoring
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2521_ProblemRefactoring
- Property svn:mergeinfo changed
-
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis
- Property svn:mergeinfo changed
-
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis/3.4
- Property svn:mergeinfo changed
-
branches/2521_ProblemRefactoring/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourRegression.cs
r16692 r16723 1 1 #region License Information 2 2 /* HeuristicLab 3 * Copyright (C) 2002-201 8Heuristic and Evolutionary Algorithms Laboratory (HEAL)3 * Copyright (C) 2002-2019 Heuristic and Evolutionary Algorithms Laboratory (HEAL) 4 4 * 5 5 * This file is part of HeuristicLab. … … 27 27 using HeuristicLab.Optimization; 28 28 using HeuristicLab.Parameters; 29 using H euristicLab.Persistence.Default.CompositeSerializers.Storable;29 using HEAL.Attic; 30 30 using HeuristicLab.Problems.DataAnalysis; 31 31 … … 36 36 [Item("Nearest Neighbour Regression (kNN)", "Nearest neighbour regression data analysis algorithm (wrapper for ALGLIB).")] 37 37 [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 150)] 38 [Storable Class]38 [StorableType("3F940BE0-4F44-4F7F-A3EE-E47423C7F22D")] 39 39 public sealed class NearestNeighbourRegression : FixedDataAnalysisAlgorithm<IRegressionProblem> { 40 40 private const string KParameterName = "K"; 41 41 private const string NearestNeighbourRegressionModelResultName = "Nearest neighbour regression solution"; 42 42 private const string WeightsParameterName = "Weights"; 43 private const string SelfMatchParameterName = "SelfMatch"; 43 44 44 45 #region parameter properties … … 46 47 get { return (IFixedValueParameter<IntValue>)Parameters[KParameterName]; } 47 48 } 48 49 public IFixedValueParameter<BoolValue> SelfMatchParameter { 50 get { return (IFixedValueParameter<BoolValue>)Parameters[SelfMatchParameterName]; } 51 } 49 52 public IValueParameter<DoubleArray> WeightsParameter { 50 53 get { return (IValueParameter<DoubleArray>)Parameters[WeightsParameterName]; } … … 59 62 } 60 63 } 61 64 public bool SelfMatch { 65 get { return SelfMatchParameter.Value.Value; } 66 set { SelfMatchParameter.Value.Value = value; } 67 } 62 68 public DoubleArray Weights { 63 69 get { return WeightsParameter.Value; } … … 67 73 68 74 [StorableConstructor] 69 private NearestNeighbourRegression( bool deserializing) : base(deserializing) { }75 private NearestNeighbourRegression(StorableConstructorFlag _) : base(_) { } 70 76 private NearestNeighbourRegression(NearestNeighbourRegression original, Cloner cloner) 71 77 : base(original, cloner) { … … 75 81 Parameters.Add(new FixedValueParameter<IntValue>(KParameterName, "The number of nearest neighbours to consider for regression.", new IntValue(3))); 76 82 Parameters.Add(new OptionalValueParameter<DoubleArray>(WeightsParameterName, "Optional: use weights to specify individual scaling values for all features. If not set the weights are calculated automatically (each feature is scaled to unit variance)")); 83 Parameters.Add(new FixedValueParameter<BoolValue>(SelfMatchParameterName, "Should we use equal points for classification?", new BoolValue(false))); 77 84 Problem = new RegressionProblem(); 78 85 } … … 84 91 if (!Parameters.ContainsKey(WeightsParameterName)) { 85 92 Parameters.Add(new OptionalValueParameter<DoubleArray>(WeightsParameterName, "Optional: use weights to specify individual scaling values for all features. If not set the weights are calculated automatically (each feature is scaled to unit variance)")); 93 } 94 if (!Parameters.ContainsKey(SelfMatchParameterName)) { 95 Parameters.Add(new FixedValueParameter<BoolValue>(SelfMatchParameterName, "Should we use equal points for classification?", new BoolValue(false))); 86 96 } 87 97 #endregion … … 96 106 double[] weights = null; 97 107 if (Weights != null) weights = Weights.CloneAsArray(); 98 var solution = CreateNearestNeighbourRegressionSolution(Problem.ProblemData, K, weights);108 var solution = CreateNearestNeighbourRegressionSolution(Problem.ProblemData, K, SelfMatch, weights); 99 109 Results.Add(new Result(NearestNeighbourRegressionModelResultName, "The nearest neighbour regression solution.", solution)); 100 110 } 101 111 102 public static IRegressionSolution CreateNearestNeighbourRegressionSolution(IRegressionProblemData problemData, int k, double[] weights = null) {112 public static IRegressionSolution CreateNearestNeighbourRegressionSolution(IRegressionProblemData problemData, int k, bool selfMatch = false, double[] weights = null) { 103 113 var clonedProblemData = (IRegressionProblemData)problemData.Clone(); 104 return new NearestNeighbourRegressionSolution(Train(problemData, k, weights), clonedProblemData);114 return new NearestNeighbourRegressionSolution(Train(problemData, k, selfMatch, weights), clonedProblemData); 105 115 } 106 116 107 public static INearestNeighbourModel Train(IRegressionProblemData problemData, int k, double[] weights = null) {117 public static INearestNeighbourModel Train(IRegressionProblemData problemData, int k, bool selfMatch = false, double[] weights = null) { 108 118 return new NearestNeighbourModel(problemData.Dataset, 109 119 problemData.TrainingIndices, 110 120 k, 121 selfMatch, 111 122 problemData.TargetVariable, 112 123 problemData.AllowedInputVariables,
Note: See TracChangeset
for help on using the changeset viewer.