Changeset 8681
- Timestamp:
- 09/23/12 00:31:49 (12 years ago)
- Location:
- trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Matrix.cs
r8471 r8681 70 70 71 71 public Matrix Transpose() { 72 var result = new Matrix(Transpose(values, Columns, Rows), Columns, Rows); 73 return result; 72 return new Matrix(Transpose(values, Columns, Rows), Columns, Rows); 74 73 } 75 74 … … 125 124 } 126 125 127 public double VectorLength() { 128 return Math.Sqrt(SquaredVectorLength()); 129 } 130 131 public double SquaredVectorLength() { 132 if (Rows != 1) throw new ArgumentException("Length only works on vectors."); 126 public double EuclideanNorm() { 127 return Math.Sqrt(SumOfSquares()); 128 } 129 130 public double SumOfSquares() { 133 131 return values.Sum(x => x * x); 134 132 } … … 137 135 if (Rows != 1 || other.Rows != 1) throw new ArgumentException("OuterProduct can only be applied to vectors."); 138 136 return Transpose().Multiply(other); 137 } 138 139 public IEnumerable<double> ColumnSums() { 140 return Transpose().RowSums(); 141 } 142 143 public IEnumerable<double> RowSums() { 144 var sum = 0.0; 145 int counter = 0; 146 foreach (var v in values) { 147 sum += v; 148 counter++; 149 if (counter == Rows) { 150 yield return sum; 151 sum = 0.0; 152 counter = 0; 153 } 154 } 139 155 } 140 156 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaAlgorithm.cs
r8523 r8681 35 35 36 36 namespace HeuristicLab.Algorithms.DataAnalysis { 37 internal delegate void Reporter(double quality, double[] coefficients );37 internal delegate void Reporter(double quality, double[] coefficients, double[] gradients); 38 38 /// <summary> 39 39 /// Neighborhood Components Analysis 40 40 /// </summary> 41 [Item("Neighborhood Components Analysis (NCA)", "Implementation of Neighborhood Components Analysis based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520.")] 41 [Item("Neighborhood Components Analysis (NCA)", @"Implementation of Neighborhood Components Analysis 42 based on the description of J. Goldberger, S. Roweis, G. Hinton, R. Salakhutdinov. 2005. 43 Neighbourhood Component Analysis. Advances in Neural Information Processing Systems, 17. pp. 513-520 44 with additional regularizations described in Z. Yang, J. Laaksonen. 2007. 45 Regularized Neighborhood Component Analysis. Lecture Notes in Computer Science, 4522. pp. 253-262.")] 42 46 [Creatable("Data Analysis")] 43 47 [StorableClass] … … 59 63 get { return (IFixedValueParameter<IntValue>)Parameters["Iterations"]; } 60 64 } 65 public IFixedValueParameter<DoubleValue> RegularizationParameter { 66 get { return (IFixedValueParameter<DoubleValue>)Parameters["Regularization"]; } 67 } 61 68 #endregion 62 69 63 70 #region Properties 64 p rivateint K {71 public int K { 65 72 get { return KParameter.Value.Value; } 66 73 set { KParameter.Value.Value = value; } 67 74 } 68 p rivateint Dimensions {75 public int Dimensions { 69 76 get { return DimensionsParameter.Value.Value; } 70 77 set { DimensionsParameter.Value.Value = value; } 71 78 } 72 p rivateint NeighborSamples {79 public int NeighborSamples { 73 80 get { return NeighborSamplesParameter.Value.Value; } 74 81 set { NeighborSamplesParameter.Value.Value = value; } 75 82 } 76 p rivateint Iterations {83 public int Iterations { 77 84 get { return IterationsParameter.Value.Value; } 78 85 set { IterationsParameter.Value.Value = value; } 86 } 87 public double Regularization { 88 get { return RegularizationParameter.Value.Value; } 89 set { RegularizationParameter.Value.Value = value; } 79 90 } 80 91 #endregion … … 85 96 public NcaAlgorithm() 86 97 : base() { 87 Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue( 1)));98 Parameters.Add(new FixedValueParameter<IntValue>("K", "The K for the nearest neighbor.", new IntValue(3))); 88 99 Parameters.Add(new FixedValueParameter<IntValue>("Dimensions", "The number of dimensions that NCA should reduce the data to.", new IntValue(2))); 89 100 Parameters.Add(new ConstrainedValueParameter<INCAInitializer>("Initialization", "Which method should be used to initialize the matrix. Typically LDA (linear discriminant analysis) should provide a good estimate.")); 90 Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(50))); 91 Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(20))); 101 Parameters.Add(new FixedValueParameter<IntValue>("NeighborSamples", "How many of the neighbors should be sampled in order to speed up the calculation. This should be at least the value of k and at most the number of training instances minus one.", new IntValue(60))); 102 Parameters.Add(new FixedValueParameter<IntValue>("Iterations", "How many iterations the conjugate gradient (CG) method should be allowed to perform. The method might still terminate earlier if a local optima has already been reached.", new IntValue(50))); 103 Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0))); 92 104 93 105 INCAInitializer defaultInitializer = null; … … 105 117 } 106 118 119 [StorableHook(HookType.AfterDeserialization)] 120 private void AfterDeserialization() { 121 if (!Parameters.ContainsKey("Regularization")) { 122 Parameters.Add(new FixedValueParameter<DoubleValue>("Regularization", "A non-negative paramter which can be set to increase generalization and avoid overfitting. If set to 0 the algorithm is similar to NCA as proposed by Goldberger et al.", new DoubleValue(0))); 123 } 124 } 125 107 126 public override void Prepare() { 108 127 if (Problem != null) base.Prepare(); … … 113 132 114 133 var clonedProblem = (IClassificationProblemData)Problem.ProblemData.Clone(); 115 var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None); 116 Results.Add(new Result("ClassificationSolution", "The classification solution.", model.CreateClassificationSolution(clonedProblem))); 117 } 118 119 public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) { 134 var model = Train(clonedProblem, K, Dimensions, NeighborSamples, Regularization, Iterations, initializer.Initialize(clonedProblem, Dimensions), ReportQuality, CancellationToken.None); 135 var solution = model.CreateClassificationSolution(clonedProblem); 136 if (!Results.ContainsKey("ClassificationSolution")) 137 Results.Add(new Result("ClassificationSolution", "The classification solution.", solution)); 138 else Results["ClassificationSolution"].Value = solution; 139 } 140 141 public static INcaClassificationSolution CreateClassificationSolution(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) { 120 142 var clonedProblem = (IClassificationProblemData)data.Clone(); 121 var model = Train(clonedProblem, k, dimensions, neighborSamples, iterations, initializer);143 var model = Train(clonedProblem, k, dimensions, neighborSamples, regularization, iterations, initializer); 122 144 return model.CreateClassificationSolution(clonedProblem); 123 145 } 124 146 125 public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, int iterations, INCAInitializer initializer) {126 return Train(problemData, k, dimensions, neighborSamples, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None);127 } 128 129 public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, int iterations, double[,] initalMatrix) {147 public static INcaModel Train(IClassificationProblemData problemData, int k, int dimensions, int neighborSamples, double regularization, int iterations, INCAInitializer initializer) { 148 return Train(problemData, k, dimensions, neighborSamples, regularization, iterations, initializer.Initialize(problemData, dimensions), null, CancellationToken.None); 149 } 150 151 public static INcaModel Train(IClassificationProblemData problemData, int k, int neighborSamples, double regularization, int iterations, double[,] initalMatrix) { 130 152 var matrix = new double[initalMatrix.Length]; 131 153 for (int i = 0; i < initalMatrix.GetLength(0); i++) 132 154 for (int j = 0; j < initalMatrix.GetLength(1); j++) 133 155 matrix[i * initalMatrix.GetLength(1) + j] = initalMatrix[i, j]; 134 return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, iterations, matrix, null, CancellationToken.None);135 } 136 137 private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) {156 return Train(problemData, k, initalMatrix.GetLength(1), neighborSamples, regularization, iterations, matrix, null, CancellationToken.None); 157 } 158 159 private static INcaModel Train(IClassificationProblemData data, int k, int dimensions, int neighborSamples, double regularization, int iterations, double[] matrix, Reporter reporter, CancellationToken cancellation) { 138 160 var scaling = new Scaling(data.Dataset, data.AllowedInputVariables, data.TrainingIndices); 139 161 var scaledData = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling); 140 162 var classes = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).ToArray(); 141 163 var attributes = scaledData.GetLength(1); 142 143 var penalties = new Dictionary<double, Dictionary<double, double>>();144 foreach (var c in data.ClassValues) {145 penalties[c] = new Dictionary<double, double>();146 foreach (var r in data.ClassValues)147 penalties[c][r] = data.GetClassificationPenalty(c, r);148 }149 164 150 165 alglib.mincgstate state; … … 153 168 alglib.mincgsetcond(state, 0, 0, 0, iterations); 154 169 alglib.mincgsetxrep(state, true); 170 //alglib.mincgsetgradientcheck(state, 0.01); 155 171 int neighborSampleSize = neighborSamples; 156 Optimize(state, scaledData, classes, penalties, dimensions, neighborSampleSize, cancellation, reporter);172 Optimize(state, scaledData, classes, dimensions, neighborSampleSize, regularization, cancellation, reporter); 157 173 alglib.mincgresults(state, out matrix, out rep); 174 if (rep.terminationtype == -7) throw new InvalidOperationException("Gradient verification failed."); 158 175 159 176 var transformationMatrix = new double[attributes, dimensions]; … … 166 183 } 167 184 168 private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize, CancellationToken cancellation, Reporter reporter) {185 private static void Optimize(alglib.mincgstate state, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda, CancellationToken cancellation, Reporter reporter) { 169 186 while (alglib.mincgiteration(state)) { 170 187 if (cancellation.IsCancellationRequested) break; 171 188 if (state.needfg) { 172 Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, penalties, dimensions, neighborSampleSize);189 Gradient(state.x, ref state.innerobj.f, state.innerobj.g, data, classes, dimensions, neighborSampleSize, lambda); 173 190 continue; 174 191 } 175 192 if (state.innerobj.xupdated) { 176 193 if (reporter != null) 177 reporter(state.innerobj.f, state.innerobj.x );194 reporter(state.innerobj.f, state.innerobj.x, state.innerobj.g); 178 195 continue; 179 196 } … … 182 199 } 183 200 184 private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, Dictionary<double, Dictionary<double, double>> penalties, int dimensions, int neighborSampleSize) {201 private static void Gradient(double[] A, ref double func, double[] grad, double[,] data, double[] classes, int dimensions, int neighborSampleSize, double lambda) { 185 202 var instances = data.GetLength(0); 186 203 var attributes = data.GetLength(1); … … 199 216 } 200 217 var kVector = new Matrix(GetRow(data, k)); 201 transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SquaredVectorLength()); 202 } 203 var sample = transformedDistances.OrderByDescending(x => x.Value).Take(neighborSampleSize).ToArray(); 204 var normalization = sample.Sum(x => x.Value); 205 if (normalization > 0) { 206 foreach (var s in sample) { 207 if (s.Value <= 0) break; 208 alglib.sparseset(probabilities, i, s.Key, s.Value / normalization); 209 } 218 transformedDistances[k] = Math.Exp(-iVector.Multiply(AMatrix).Subtract(kVector.Multiply(AMatrix)).SumOfSquares()); 219 } 220 var normalization = transformedDistances.Sum(x => x.Value); 221 if (normalization <= 0) continue; 222 foreach (var s in transformedDistances.Where(x => x.Value > 0).OrderByDescending(x => x.Value).Take(neighborSampleSize)) { 223 alglib.sparseset(probabilities, i, s.Key, s.Value / normalization); 210 224 } 211 225 } … … 215 229 double val; 216 230 var pi = new double[instances]; 217 func = 0;218 231 while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) { 219 double vp = val * penalties[classes[r]][classes[c]]; 220 pi[r] += vp; 221 func += vp; 222 } 223 224 t0 = 0; t1 = 0; 232 if (classes[r].IsAlmost(classes[c])) { 233 pi[r] += val; 234 } 235 } 236 225 237 var innerSum = new double[attributes, attributes]; 226 238 while (alglib.sparseenumerate(probabilities, ref t0, ref t1, out r, out c, out val)) { 227 239 var vector = new Matrix(GetRow(data, r)).Subtract(new Matrix(GetRow(data, c))); 228 240 vector.OuterProduct(vector).Multiply(val * pi[r]).AddTo(innerSum); 229 vector.OuterProduct(vector).Multiply(-val * penalties[classes[r]][classes[c]]).AddTo(innerSum); 230 } 241 242 if (classes[r].IsAlmost(classes[c])) { 243 vector.OuterProduct(vector).Multiply(-val).AddTo(innerSum); 244 } 245 } 246 247 func = -pi.Sum() + lambda * AMatrix.SumOfSquares(); 231 248 232 249 r = 0; 233 var newGrad = AMatrix.Multiply( 2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose();250 var newGrad = AMatrix.Multiply(-2.0).Transpose().Multiply(new Matrix(innerSum)).Transpose(); 234 251 foreach (var g in newGrad) { 235 grad[r++] = g; 236 } 237 } 238 239 private void ReportQuality(double func, double[] coefficients) { 252 grad[r] = g + lambda * 2 * A[r]; 253 r++; 254 } 255 } 256 257 private void ReportQuality(double func, double[] coefficients, double[] gradients) { 240 258 var instances = Problem.ProblemData.TrainingIndices.Count(); 241 259 DataTable qualities; 242 260 if (!Results.ContainsKey("Optimization")) { 243 261 qualities = new DataTable("Optimization"); 244 qualities.Rows.Add(new DataRow(" Penalty", string.Empty));262 qualities.Rows.Add(new DataRow("Quality", string.Empty)); 245 263 Results.Add(new Result("Optimization", qualities)); 246 264 } else qualities = (DataTable)Results["Optimization"].Value; 247 qualities.Rows["Penalty"].Values.Add(func / instances); 248 249 if (!Results.ContainsKey("Penalty")) { 250 Results.Add(new Result("Penalty", new DoubleValue(func / instances))); 251 } else ((DoubleValue)Results["Penalty"].Value).Value = func / instances; 265 qualities.Rows["Quality"].Values.Add(-func / instances); 266 267 string[] attributNames = Problem.ProblemData.AllowedInputVariables.ToArray(); 268 if (gradients != null) { 269 DataTable grads; 270 if (!Results.ContainsKey("Gradients")) { 271 grads = new DataTable("Gradients"); 272 for (int i = 0; i < gradients.Length; i++) 273 grads.Rows.Add(new DataRow(attributNames[i / Dimensions] + "-" + (i % Dimensions), string.Empty)); 274 Results.Add(new Result("Gradients", grads)); 275 } else grads = (DataTable)Results["Gradients"].Value; 276 for (int i = 0; i < gradients.Length; i++) 277 grads.Rows[attributNames[i / Dimensions] + "-" + (i % Dimensions)].Values.Add(gradients[i]); 278 } 279 280 if (!Results.ContainsKey("Quality")) { 281 Results.Add(new Result("Quality", new DoubleValue(-func / instances))); 282 } else ((DoubleValue)Results["Quality"].Value).Value = -func / instances; 283 284 var attributes = attributNames.Length; 285 var transformationMatrix = new double[attributes, Dimensions]; 286 var counter = 0; 287 for (var i = 0; i < attributes; i++) 288 for (var j = 0; j < Dimensions; j++) 289 transformationMatrix[i, j] = coefficients[counter++]; 290 291 var scaling = new Scaling(Problem.ProblemData.Dataset, attributNames, Problem.ProblemData.TrainingIndices); 292 var model = new NcaModel(K, transformationMatrix, Problem.ProblemData.Dataset, Problem.ProblemData.TrainingIndices, Problem.ProblemData.TargetVariable, attributNames, scaling, Problem.ProblemData.ClassValues.ToArray()); 293 294 IClassificationSolution solution = model.CreateClassificationSolution(Problem.ProblemData); 295 if (!Results.ContainsKey("ClassificationSolution")) { 296 Results.Add(new Result("ClassificationSolution", solution)); 297 } else { 298 Results["ClassificationSolution"].Value = solution; 299 } 252 300 } 253 301 … … 256 304 yield return data[row, i]; 257 305 } 306 258 307 } 259 308 }
Note: See TracChangeset
for help on using the changeset viewer.