Changeset 14843 for trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4
- Timestamp:
- 04/11/17 15:55:44 (8 years ago)
- Location:
- trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4
- Files:
-
- 1 deleted
- 21 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r14400 r14843 165 165 try { 166 166 CalculateModel(ds, rows, scaleInputs); 167 } 168 catch (alglib.alglibexception ae) { 167 } catch (alglib.alglibexception ae) { 169 168 // wrap exception so that calling code doesn't have to know about alglib implementation 170 169 throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae); … … 260 259 private static double[,] GetData(IDataset ds, IEnumerable<string> allowedInputs, IEnumerable<int> rows, Scaling scaling) { 261 260 if (scaling != null) { 262 return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputs, rows, scaling); 261 // TODO: completely remove Scaling class 262 List<ITransformation<double>> transformations = new List<ITransformation<double>>(); 263 264 foreach (var varName in allowedInputs) { 265 double min; 266 double max; 267 scaling.GetScalingParameters(varName, out min, out max); 268 var add = -min / (max - min); 269 var mult = 1.0 / (max - min); 270 transformations.Add(new LinearTransformation(allowedInputs) { Addend = add, Multiplier = mult }); 271 } 272 return ds.ToArray(allowedInputs, transformations, rows); 263 273 } else { 264 return AlglibUtil.PrepareInputMatrix(ds,allowedInputs, rows);274 return ds.ToArray(allowedInputs, rows); 265 275 } 266 276 } … … 334 344 return Enumerable.Range(0, newN) 335 345 .Select(i => ms[i] + Util.ScalarProd(Ks[i], alpha)); 336 } 337 catch (alglib.alglibexception ae) { 346 } catch (alglib.alglibexception ae) { 338 347 // wrap exception so that calling code doesn't have to know about alglib implementation 339 348 throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae); … … 381 390 } 382 391 return kss; 383 } 384 catch (alglib.alglibexception ae) { 392 } catch (alglib.alglibexception ae) { 385 393 // wrap exception so that calling code doesn't have to know about alglib implementation 386 394 throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj
r14826 r14843 246 246 <SubType>Code</SubType> 247 247 </Compile> 248 <Compile Include="Linear\AlglibUtil.cs" />249 <Compile Include="Linear\Scaling.cs" />250 248 <Compile Include="Linear\LinearDiscriminantAnalysis.cs" /> 251 249 <Compile Include="Linear\LinearRegression.cs"> … … 255 253 <Compile Include="Linear\MultinomialLogitClassificationSolution.cs" /> 256 254 <Compile Include="Linear\MultinomialLogitModel.cs" /> 255 <Compile Include="Linear\Scaling.cs" /> 257 256 <Compile Include="MctsSymbolicRegression\Automaton.cs" /> 258 257 <Compile Include="MctsSymbolicRegression\CodeGenerator.cs" /> -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearDiscriminantAnalysis.cs
r14826 r14843 73 73 var doubleVariableNames = allowedInputVariables.Where(dataset.VariableHasType<double>).ToArray(); 74 74 var factorVariableNames = allowedInputVariables.Where(dataset.VariableHasType<string>).ToArray(); 75 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,doubleVariableNames.Concat(new string[] { targetVariable }), rows);75 double[,] inputMatrix = dataset.ToArray(doubleVariableNames.Concat(new string[] { targetVariable }), rows); 76 76 77 var factorVariables = AlglibUtil.GetFactorVariableValues(dataset,factorVariableNames, rows);78 double[,] factorMatrix = AlglibUtil.PrepareInputMatrix(dataset,factorVariables, rows);77 var factorVariables = dataset.GetFactorVariableValues(factorVariableNames, rows); 78 var factorMatrix = dataset.ToArray(factorVariables, rows); 79 79 80 80 inputMatrix = factorMatrix.HorzCat(inputMatrix); … … 94 94 if (info < 1) throw new ArgumentException("Error in calculation of linear discriminant analysis solution"); 95 95 96 ISymbolicExpressionTree tree = new SymbolicExpressionTree(new ProgramRootSymbol().CreateTreeNode()); 97 ISymbolicExpressionTreeNode startNode = new StartSymbol().CreateTreeNode(); 98 tree.Root.AddSubtree(startNode); 99 ISymbolicExpressionTreeNode addition = new Addition().CreateTreeNode(); 100 startNode.AddSubtree(addition); 101 102 int col = 0; 103 foreach (var kvp in factorVariables) { 104 var varName = kvp.Key; 105 foreach (var cat in kvp.Value) { 106 BinaryFactorVariableTreeNode vNode = 107 (BinaryFactorVariableTreeNode)new HeuristicLab.Problems.DataAnalysis.Symbolic.BinaryFactorVariable().CreateTreeNode(); 108 vNode.VariableName = varName; 109 vNode.VariableValue = cat; 110 vNode.Weight = w[col]; 111 addition.AddSubtree(vNode); 112 col++; 113 } 114 } 115 foreach (string column in doubleVariableNames) { 116 VariableTreeNode vNode = (VariableTreeNode)new HeuristicLab.Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); 117 vNode.VariableName = column; 118 vNode.Weight = w[col]; 119 addition.AddSubtree(vNode); 120 col++; 121 } 96 var nFactorCoeff = factorMatrix.GetLength(1); 97 var tree = LinearModelToTreeConverter.CreateTree(factorVariables, w.Take(nFactorCoeff).ToArray(), 98 doubleVariableNames, w.Skip(nFactorCoeff).Take(doubleVariableNames.Length).ToArray()); 122 99 123 100 var model = CreateDiscriminantFunctionModel(tree, new SymbolicDataAnalysisExpressionTreeLinearInterpreter(), problemData, rows); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearRegression.cs
r14826 r14843 76 76 var doubleVariables = allowedInputVariables.Where(dataset.VariableHasType<double>); 77 77 var factorVariableNames = allowedInputVariables.Where(dataset.VariableHasType<string>); 78 var factorVariables = AlglibUtil.GetFactorVariableValues(dataset,factorVariableNames, rows);79 double[,] binaryMatrix = AlglibUtil.PrepareInputMatrix(dataset,factorVariables, rows);80 double[,] doubleVarMatrix = AlglibUtil.PrepareInputMatrix(dataset,doubleVariables.Concat(new string[] { targetVariable }), rows);78 var factorVariables = dataset.GetFactorVariableValues(factorVariableNames, rows); 79 double[,] binaryMatrix = dataset.ToArray(factorVariables, rows); 80 double[,] doubleVarMatrix = dataset.ToArray(doubleVariables.Concat(new string[] { targetVariable }), rows); 81 81 var inputMatrix = binaryMatrix.HorzCat(doubleVarMatrix); 82 82 … … 98 98 alglib.lrunpack(lm, out coefficients, out nFeatures); 99 99 100 ISymbolicExpressionTree tree = new SymbolicExpressionTree(new ProgramRootSymbol().CreateTreeNode()); 101 ISymbolicExpressionTreeNode startNode = new StartSymbol().CreateTreeNode(); 102 tree.Root.AddSubtree(startNode); 103 ISymbolicExpressionTreeNode addition = new Addition().CreateTreeNode(); 104 startNode.AddSubtree(addition); 105 106 int col = 0; 107 foreach (var kvp in factorVariables) { 108 var varName = kvp.Key; 109 foreach (var cat in kvp.Value) { 110 BinaryFactorVariableTreeNode vNode = 111 (BinaryFactorVariableTreeNode)new HeuristicLab.Problems.DataAnalysis.Symbolic.BinaryFactorVariable().CreateTreeNode(); 112 vNode.VariableName = varName; 113 vNode.VariableValue = cat; 114 vNode.Weight = coefficients[col]; 115 addition.AddSubtree(vNode); 116 col++; 117 } 118 } 119 foreach (string column in doubleVariables) { 120 VariableTreeNode vNode = (VariableTreeNode)new HeuristicLab.Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); 121 vNode.VariableName = column; 122 vNode.Weight = coefficients[col]; 123 addition.AddSubtree(vNode); 124 col++; 125 } 126 127 ConstantTreeNode cNode = (ConstantTreeNode)new Constant().CreateTreeNode(); 128 cNode.Value = coefficients[coefficients.Length - 1]; 129 addition.AddSubtree(cNode); 130 100 int nFactorCoeff = binaryMatrix.GetLength(1); 101 int nVarCoeff = doubleVariables.Count(); 102 var tree = LinearModelToTreeConverter.CreateTree(factorVariables, coefficients.Take(nFactorCoeff).ToArray(), 103 doubleVariables.ToArray(), coefficients.Skip(nFactorCoeff).Take(nVarCoeff).ToArray(), 104 @const: coefficients[nFeatures]); 105 131 106 SymbolicRegressionSolution solution = new SymbolicRegressionSolution(new SymbolicRegressionModel(problemData.TargetVariable, tree, new SymbolicDataAnalysisExpressionTreeLinearInterpreter()), (IRegressionProblemData)problemData.Clone()); 132 107 solution.Model.Name = "Linear Regression Model"; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitClassification.cs
r14826 r14843 72 72 var factorVariableNames = problemData.AllowedInputVariables.Where(dataset.VariableHasType<string>); 73 73 IEnumerable<int> rows = problemData.TrainingIndices; 74 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,doubleVariableNames.Concat(new string[] { targetVariable }), rows);74 double[,] inputMatrix = dataset.ToArray(doubleVariableNames.Concat(new string[] { targetVariable }), rows); 75 75 76 var factorVariableValues = AlglibUtil.GetFactorVariableValues(dataset,factorVariableNames, rows);77 var factorMatrix = AlglibUtil.PrepareInputMatrix(dataset,factorVariableValues, rows);76 var factorVariableValues = dataset.GetFactorVariableValues(factorVariableNames, rows); 77 var factorMatrix = dataset.ToArray(factorVariableValues, rows); 78 78 inputMatrix = factorMatrix.HorzCat(inputMatrix); 79 79 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitModel.cs
r14826 r14843 97 97 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 98 98 99 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);100 double[,] factorData = AlglibUtil.PrepareInputMatrix(dataset,factorVariables, rows);99 double[,] inputData = dataset.ToArray(allowedInputVariables, rows); 100 double[,] factorData = dataset.ToArray(factorVariables, rows); 101 101 102 102 inputData = factorData.HorzCat(inputData); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/Scaling.cs
r14400 r14843 29 29 30 30 namespace HeuristicLab.Algorithms.DataAnalysis { 31 [Obsolete("Use transformation classes in Problems.DataAnalysis instead")] 31 32 [StorableClass] 32 33 [Item(Name = "Scaling", Description = "Contains information about scaling of variables for data-analysis algorithms.")] -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/LdaInitializer.cs
r14400 r14843 44 44 var attributes = data.AllowedInputVariables.Count(); 45 45 46 var ldaDs = AlglibUtil.PrepareInputMatrix(data.Dataset,47 48 46 var ldaDs = data.Dataset.ToArray( 47 data.AllowedInputVariables.Concat(data.TargetVariable.ToEnumerable()), 48 data.TrainingIndices); 49 49 50 50 // map class values to sequential natural numbers (required by alglib) -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/PcaInitializer.cs
r14400 r14843 44 44 var attributes = data.AllowedInputVariables.Count(); 45 45 46 var pcaDs = AlglibUtil.PrepareInputMatrix(data.Dataset,data.AllowedInputVariables, data.TrainingIndices);46 var pcaDs = data.Dataset.ToArray(data.AllowedInputVariables, data.TrainingIndices); 47 47 48 48 int info; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaGradientCalculator.cs
r14400 r14843 99 99 } 100 100 101 var data = AlglibUtil.PrepareInputMatrix(problemData.Dataset,problemData.AllowedInputVariables,102 101 var data = problemData.Dataset.ToArray(problemData.AllowedInputVariables, 102 problemData.TrainingIndices); 103 103 var classes = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).ToArray(); 104 104 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaModel.cs
r14400 r14843 86 86 87 87 public double[,] Reduce(IDataset dataset, IEnumerable<int> rows) { 88 var data = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);88 var data = dataset.ToArray(allowedInputVariables, rows); 89 89 90 90 var targets = dataset.GetDoubleValues(TargetVariable, rows).ToArray(); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs
r14826 r14843 119 119 if (IsCompatibilityLoaded) { 120 120 // no scaling 121 inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,121 inputMatrix = dataset.ToArray( 122 122 this.allowedInputVariables.Concat(new string[] { targetVariable }), 123 123 rows); … … 167 167 168 168 private static double[,] CreateScaledData(IDataset dataset, IEnumerable<string> variables, IEnumerable<int> rows, double[] offsets, double[] factors) { 169 var x = new double[rows.Count(), variables.Count()]; 170 var colIdx = 0; 171 foreach (var variableName in variables) { 172 var rowIdx = 0; 173 foreach (var val in dataset.GetDoubleValues(variableName, rows)) { 174 x[rowIdx, colIdx] = (val + offsets[colIdx]) * factors[colIdx]; 175 rowIdx++; 176 } 177 colIdx++; 178 } 179 return x; 169 var transforms = 170 variables.Select( 171 (_, colIdx) => 172 new LinearTransformation(variables) { Addend = offsets[colIdx] * factors[colIdx], Multiplier = factors[colIdx] }); 173 return dataset.ToArray(variables, transforms, rows); 180 174 } 181 175 … … 187 181 double[,] inputData; 188 182 if (IsCompatibilityLoaded) { 189 inputData = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);183 inputData = dataset.ToArray(allowedInputVariables, rows); 190 184 } else { 191 185 inputData = CreateScaledData(dataset, allowedInputVariables, rows, offsets, weights); … … 223 217 double[,] inputData; 224 218 if (IsCompatibilityLoaded) { 225 inputData = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);219 inputData = dataset.ToArray(allowedInputVariables, rows); 226 220 } else { 227 221 inputData = CreateScaledData(dataset, allowedInputVariables, rows, offsets, weights); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs
r14523 r14843 184 184 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 185 185 IEnumerable<int> rows = problemData.TrainingIndices; 186 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables.Concat(new string[] { targetVariable }), rows);186 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows); 187 187 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 188 188 throw new NotSupportedException("Neural network classification does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassification.cs
r14523 r14843 125 125 public NeuralNetworkEnsembleClassification() 126 126 : base() { 127 var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { 128 (IntValue)new IntValue(0).AsReadOnly(), 129 (IntValue)new IntValue(1).AsReadOnly(), 127 var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { 128 (IntValue)new IntValue(0).AsReadOnly(), 129 (IntValue)new IntValue(1).AsReadOnly(), 130 130 (IntValue)new IntValue(2).AsReadOnly() }); 131 131 var selectedHiddenLayerValue = (from v in validHiddenLayerValues … … 170 170 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 171 171 IEnumerable<int> rows = problemData.TrainingIndices; 172 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables.Concat(new string[] { targetVariable }), rows);172 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows); 173 173 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 174 174 throw new NotSupportedException("Neural network ensemble classification does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs
r14400 r14843 91 91 92 92 public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) { 93 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);93 double[,] inputData = dataset.ToArray(allowedInputVariables, rows); 94 94 95 95 int n = inputData.GetLength(0); … … 108 108 109 109 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 110 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);110 double[,] inputData = dataset.ToArray(allowedInputVariables, rows); 111 111 112 112 int n = inputData.GetLength(0); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegression.cs
r14523 r14843 169 169 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 170 170 IEnumerable<int> rows = problemData.TrainingIndices; 171 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables.Concat(new string[] { targetVariable }), rows);171 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows); 172 172 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 173 173 throw new NotSupportedException("Neural network ensemble regression does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs
r14400 r14843 95 95 96 96 public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) { 97 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);97 double[,] inputData = dataset.ToArray(allowedInputVariables, rows); 98 98 99 99 int n = inputData.GetLength(0); … … 112 112 113 113 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 114 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);114 double[,] inputData = dataset.ToArray( allowedInputVariables, rows); 115 115 116 116 int n = inputData.GetLength(0); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs
r14523 r14843 185 185 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 186 186 IEnumerable<int> rows = problemData.TrainingIndices; 187 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables.Concat(new string[] { targetVariable }), rows);187 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows); 188 188 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 189 189 throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs
r14400 r14843 139 139 140 140 public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) { 141 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,AllowedInputVariables, rows);141 double[,] inputData = dataset.ToArray(AllowedInputVariables, rows); 142 142 AssertInputMatrix(inputData); 143 143 … … 157 157 158 158 public IEnumerable<double> GetEstimatedVariances(IDataset dataset, IEnumerable<int> rows) { 159 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,AllowedInputVariables, rows);159 double[,] inputData = dataset.ToArray(AllowedInputVariables, rows); 160 160 AssertInputMatrix(inputData); 161 161 … … 175 175 176 176 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 177 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset,AllowedInputVariables, rows);177 double[,] inputData = dataset.ToArray(AllowedInputVariables, rows); 178 178 AssertInputMatrix(inputData); 179 179 … … 294 294 out double rmsError, out double outOfBagRmsError, out double avgRelError, out double outOfBagAvgRelError) { 295 295 var variables = problemData.AllowedInputVariables.Concat(new string[] { problemData.TargetVariable }); 296 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(problemData.Dataset,variables, trainingIndices);296 double[,] inputMatrix = problemData.Dataset.ToArray(variables, trainingIndices); 297 297 298 298 alglib.dfreport rep; … … 316 316 317 317 var variables = problemData.AllowedInputVariables.Concat(new string[] { problemData.TargetVariable }); 318 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(problemData.Dataset,variables, trainingIndices);318 double[,] inputMatrix = problemData.Dataset.ToArray(variables, trainingIndices); 319 319 320 320 var classValues = problemData.ClassValues.ToArray(); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/TimeSeries/AutoregressiveModeling.cs
r14523 r14843 115 115 alglib.lrunpack(lm, out coefficients, out nFeatures); 116 116 117 118 ISymbolicExpressionTree tree = new SymbolicExpressionTree(new ProgramRootSymbol().CreateTreeNode()); 119 ISymbolicExpressionTreeNode startNode = new StartSymbol().CreateTreeNode(); 120 tree.Root.AddSubtree(startNode); 121 ISymbolicExpressionTreeNode addition = new Addition().CreateTreeNode(); 122 startNode.AddSubtree(addition); 123 124 for (int i = 0; i < timeOffset; i++) { 125 LaggedVariableTreeNode node = (LaggedVariableTreeNode)new LaggedVariable().CreateTreeNode(); 126 node.VariableName = targetVariable; 127 node.Weight = coefficients[i]; 128 node.Lag = (i + 1) * -1; 129 addition.AddSubtree(node); 130 } 131 132 ConstantTreeNode cNode = (ConstantTreeNode)new Constant().CreateTreeNode(); 133 cNode.Value = coefficients[coefficients.Length - 1]; 134 addition.AddSubtree(cNode); 117 var tree = LinearModelToTreeConverter.CreateTree( 118 variableNames: Enumerable.Repeat(problemData.TargetVariable, nFeatures).ToArray(), 119 lags: Enumerable.Range(0, timeOffset).Select(i => (i + 1) * -1).ToArray(), 120 coefficients: coefficients.Take(nFeatures).ToArray(), 121 @const: coefficients[nFeatures] 122 ); 135 123 136 124 var interpreter = new SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(problemData.TargetVariable); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/kMeans/KMeansClustering.cs
r14523 r14843 90 90 double[,] centers; 91 91 int[] xyc; 92 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset,allowedInputVariables, rows);92 double[,] inputMatrix = dataset.ToArray(allowedInputVariables, rows); 93 93 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 94 94 throw new NotSupportedException("k-Means clustering does not support NaN or infinity values in the input dataset.");
Note: See TracChangeset
for help on using the changeset viewer.