- Timestamp:
- 03/01/13 18:32:26 (12 years ago)
- Location:
- trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization
- Files:
-
- 1 added
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/INcaInitializer.cs
r8471 r9270 24 24 25 25 namespace HeuristicLab.Algorithms.DataAnalysis { 26 public interface IN CAInitializer : IItem{26 public interface INcaInitializer : IOperator { 27 27 /// <summary> 28 28 /// Calculates an initial projection for the NCA to start from. … … 30 30 /// <param name="data">The problem data that contains the AllowedInputVariables and TrainingIndices.</param> 31 31 /// <param name="dimensions">The amount of columns in the matrix</param> 32 /// <returns> A flat representation of a matrix that is read row-wise and contains AllowedInputVariables * TrainingIndices numbers.</returns>33 double[ ] Initialize(IClassificationProblemData data, int dimensions);32 /// <returns>The matrix that projects the input variables into a lower dimensional space.</returns> 33 double[,] Initialize(IClassificationProblemData data, Scaling scaling, int dimensions); 34 34 } 35 35 } -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/LdaInitializer.cs
r8471 r9270 20 20 #endregion 21 21 22 using System.Collections.Generic;23 22 using System.Linq; 24 23 using HeuristicLab.Common; … … 30 29 [Item("LDA", "Initializes the matrix by performing a linear discriminant analysis.")] 31 30 [StorableClass] 32 public class L DAInitializer : Item, INCAInitializer {31 public class LdaInitializer : NcaInitializer { 33 32 34 33 [StorableConstructor] 35 protected L DAInitializer(bool deserializing) : base(deserializing) { }36 protected L DAInitializer(LDAInitializer original, Cloner cloner) : base(original, cloner) { }37 public L DAInitializer() : base() { }34 protected LdaInitializer(bool deserializing) : base(deserializing) { } 35 protected LdaInitializer(LdaInitializer original, Cloner cloner) : base(original, cloner) { } 36 public LdaInitializer() : base() { } 38 37 39 38 public override IDeepCloneable Clone(Cloner cloner) { 40 return new L DAInitializer(this, cloner);39 return new LdaInitializer(this, cloner); 41 40 } 42 41 43 public double[] Initialize(IClassificationProblemData data, int dimensions) {42 public override double[,] Initialize(IClassificationProblemData data, Scaling scaling, int dimensions) { 44 43 var instances = data.TrainingIndices.Count(); 45 44 var attributes = data.AllowedInputVariables.Count(); 46 45 47 46 var ldaDs = new double[instances, attributes + 1]; 48 int row, col= 0;49 foreach (var variablein data.AllowedInputVariables) {50 row= 0;51 foreach (var value in data.Dataset.GetDoubleValues(variable, data.TrainingIndices)) {52 ldaDs[row, col] = value;53 row++;47 int j = 0; 48 foreach (var a in data.AllowedInputVariables) { 49 int i = 0; 50 var sv = scaling.GetScaledValues(data.Dataset, a, data.TrainingIndices); 51 foreach (var v in sv) { 52 ldaDs[i++, j] = v; 54 53 } 55 col++;54 j++; 56 55 } 57 row = 0; 58 var uniqueClasses = new Dictionary<double, int>(); 59 foreach (var label in data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices)) { 60 if (!uniqueClasses.ContainsKey(label)) 61 uniqueClasses[label] = uniqueClasses.Count; 62 ldaDs[row++, attributes] = label; 63 } 64 for (row = 0; row < instances; row++) 65 ldaDs[row, attributes] = uniqueClasses[ldaDs[row, attributes]]; 56 j = 0; 57 foreach (var tv in data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices)) 58 ldaDs[j++, attributes] = tv; 59 60 var uniqueClasses = data.Dataset.GetDoubleValues(data.TargetVariable, data.TrainingIndices).Distinct().Count(); 66 61 67 62 int info; 68 63 double[,] matrix; 69 alglib.fisherldan(ldaDs, instances, attributes, uniqueClasses .Count, out info, out matrix);64 alglib.fisherldan(ldaDs, instances, attributes, uniqueClasses, out info, out matrix); 70 65 71 var result = new double[attributes * dimensions]; 72 for (int i = 0; i < attributes; i++) 73 for (int j = 0; j < dimensions; j++) 74 result[i * dimensions + j] = matrix[i, j]; 75 76 return result; 66 return matrix; 77 67 } 78 68 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/PcaInitializer.cs
r8471 r9270 29 29 [Item("PCA", "Initializes the matrix by performing a principal components analysis.")] 30 30 [StorableClass] 31 public sealed class P CAInitializer : Item, INCAInitializer {31 public sealed class PcaInitializer : NcaInitializer { 32 32 33 33 [StorableConstructor] 34 private P CAInitializer(bool deserializing) : base(deserializing) { }35 private P CAInitializer(PCAInitializer original, Cloner cloner) : base(original, cloner) { }36 public P CAInitializer() : base() { }34 private PcaInitializer(bool deserializing) : base(deserializing) { } 35 private PcaInitializer(PcaInitializer original, Cloner cloner) : base(original, cloner) { } 36 public PcaInitializer() : base() { } 37 37 38 38 public override IDeepCloneable Clone(Cloner cloner) { 39 return new P CAInitializer(this, cloner);39 return new PcaInitializer(this, cloner); 40 40 } 41 41 42 public double[] Initialize(IClassificationProblemData data, int dimensions) {42 public override double[,] Initialize(IClassificationProblemData data, Scaling scaling, int dimensions) { 43 43 var instances = data.TrainingIndices.Count(); 44 44 var attributes = data.AllowedInputVariables.Count(); 45 45 46 var pcaDs = new double[instances, attributes]; 47 int col = 0; 48 foreach (var variable in data.AllowedInputVariables) { 49 int row = 0; 50 foreach (var value in data.Dataset.GetDoubleValues(variable, data.TrainingIndices)) { 51 pcaDs[row, col] = value; 52 row++; 53 } 54 col++; 55 } 46 var pcaDs = AlglibUtil.PrepareAndScaleInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices, scaling); 56 47 57 48 int info; … … 60 51 alglib.pcabuildbasis(pcaDs, instances, attributes, out info, out varianceValues, out matrix); 61 52 62 var result = new double[attributes * dimensions]; 63 for (int i = 0; i < attributes; i++) 64 for (int j = 0; j < dimensions; j++) 65 result[i * dimensions + j] = matrix[i, j]; 66 67 return result; 53 return matrix; 68 54 } 69 55 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/RandomInitializer.cs
r8471 r9270 23 23 using HeuristicLab.Common; 24 24 using HeuristicLab.Core; 25 using HeuristicLab. Data;25 using HeuristicLab.Optimization; 26 26 using HeuristicLab.Parameters; 27 27 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; 28 28 using HeuristicLab.Problems.DataAnalysis; 29 using HeuristicLab.Random;30 29 31 30 namespace HeuristicLab.Algorithms.DataAnalysis { 32 31 [Item("Random", "Initializes the matrix randomly.")] 33 32 [StorableClass] 34 public class RandomInitializer : ParameterizedNamedItem, INCAInitializer { 35 private IValueParameter<IntValue> RandomParameter { 36 get { return (IValueParameter<IntValue>)Parameters["Seed"]; } 37 } 38 private IValueParameter<BoolValue> SetSeedRandomlyParameter { 39 get { return (IValueParameter<BoolValue>)Parameters["SetSeedRandomly"]; } 40 } 41 42 public int Seed { 43 get { return RandomParameter.Value.Value; } 44 set { RandomParameter.Value.Value = value; } 45 } 46 47 public bool SetSeedRandomly { 48 get { return SetSeedRandomlyParameter.Value.Value; } 49 set { SetSeedRandomlyParameter.Value.Value = value; } 33 public sealed class RandomInitializer : NcaInitializer, IStochasticOperator { 34 public ILookupParameter<IRandom> RandomParameter { 35 get { return (ILookupParameter<IRandom>)Parameters["Random"]; } 50 36 } 51 37 52 38 [StorableConstructor] 53 pr otectedRandomInitializer(bool deserializing) : base(deserializing) { }54 pr otectedRandomInitializer(RandomInitializer original, Cloner cloner) : base(original, cloner) { }39 private RandomInitializer(bool deserializing) : base(deserializing) { } 40 private RandomInitializer(RandomInitializer original, Cloner cloner) : base(original, cloner) { } 55 41 public RandomInitializer() 56 42 : base() { 57 Parameters.Add(new ValueParameter<IntValue>("Seed", "The seed for the random number generator.", new IntValue(0))); 58 Parameters.Add(new ValueParameter<BoolValue>("SetSeedRandomly", "Whether the seed should be randomized for each call.", new BoolValue(true))); 43 Parameters.Add(new LookupParameter<IRandom>("Random", "The random number generator to use.")); 59 44 } 60 45 … … 63 48 } 64 49 65 public double[] Initialize(IClassificationProblemData data, int dimensions) { 66 var instances = data.TrainingIndices.Count(); 50 public override double[,] Initialize(IClassificationProblemData data, Scaling scaling, int dimensions) { 67 51 var attributes = data.AllowedInputVariables.Count(); 68 52 69 var random = new MersenneTwister(); 70 if (SetSeedRandomly) Seed = random.Next(); 71 random.Reset(Seed); 72 73 var range = data.AllowedInputVariables.Select(x => data.Dataset.GetDoubleValues(x).Max() - data.Dataset.GetDoubleValues(x).Min()).ToArray(); 74 var matrix = new double[attributes * dimensions]; 75 for (int i = 0; i < matrix.Length; i++) 76 matrix[i] = random.NextDouble() / range[i / dimensions]; 53 var random = RandomParameter.ActualValue; 54 var matrix = new double[attributes, dimensions]; 55 for (int i = 0; i < attributes; i++) 56 for (int j = 0; j < dimensions; j++) 57 matrix[i, j] = random.NextDouble(); 77 58 78 59 return matrix;
Note: See TracChangeset
for help on using the changeset viewer.