Changeset 14400
- Timestamp:
- 11/17/16 15:41:33 (8 years ago)
- Location:
- trunk/sources
- Files:
-
- 2 deleted
- 41 edited
- 4 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs
r14393 r14400 165 165 try { 166 166 CalculateModel(ds, rows, scaleInputs); 167 } catch (alglib.alglibexception ae) { 167 } 168 catch (alglib.alglibexception ae) { 168 169 // wrap exception so that calling code doesn't have to know about alglib implementation 169 170 throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae); … … 259 260 private static double[,] GetData(IDataset ds, IEnumerable<string> allowedInputs, IEnumerable<int> rows, Scaling scaling) { 260 261 if (scaling != null) { 261 // TODO: completely remove Scaling class 262 List<ITransformation<double>> transformations = new List<ITransformation<double>>(); 263 264 foreach (var varName in allowedInputs) { 265 double min; 266 double max; 267 scaling.GetScalingParameters(varName, out min, out max); 268 var add = -min / (max - min); 269 var mult = 1.0 / (max - min); 270 transformations.Add(new LinearTransformation(allowedInputs) { Addend = add, Multiplier = mult }); 271 } 272 return ds.ToArray(allowedInputs, transformations, rows); 262 return AlglibUtil.PrepareAndScaleInputMatrix(ds, allowedInputs, rows, scaling); 273 263 } else { 274 return ds.ToArray(allowedInputs, rows);264 return AlglibUtil.PrepareInputMatrix(ds, allowedInputs, rows); 275 265 } 276 266 } … … 344 334 return Enumerable.Range(0, newN) 345 335 .Select(i => ms[i] + Util.ScalarProd(Ks[i], alpha)); 346 } catch (alglib.alglibexception ae) { 336 } 337 catch (alglib.alglibexception ae) { 347 338 // wrap exception so that calling code doesn't have to know about alglib implementation 348 339 throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae); … … 390 381 } 391 382 return kss; 392 } catch (alglib.alglibexception ae) { 383 } 384 catch (alglib.alglibexception ae) { 393 385 // wrap exception so that calling code doesn't have to know about alglib implementation 394 386 throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj
r14393 r14400 244 244 <SubType>Code</SubType> 245 245 </Compile> 246 <Compile Include="Linear\AlglibUtil.cs" /> 247 <Compile Include="Linear\Scaling.cs" /> 246 248 <Compile Include="Linear\LinearDiscriminantAnalysis.cs" /> 247 249 <Compile Include="Linear\LinearRegression.cs"> … … 251 253 <Compile Include="Linear\MultinomialLogitClassificationSolution.cs" /> 252 254 <Compile Include="Linear\MultinomialLogitModel.cs" /> 253 <Compile Include="Linear\Scaling.cs" />254 255 <Compile Include="MctsSymbolicRegression\Automaton.cs" /> 255 256 <Compile Include="MctsSymbolicRegression\CodeGenerator.cs" /> -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearDiscriminantAnalysis.cs
r14393 r14400 70 70 IEnumerable<int> rows = problemData.TrainingIndices; 71 71 int nClasses = problemData.ClassNames.Count(); 72 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);72 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 73 73 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 74 74 throw new NotSupportedException("Linear discriminant analysis does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/LinearRegression.cs
r14393 r14400 73 73 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 74 74 IEnumerable<int> rows = problemData.TrainingIndices; 75 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);75 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 76 76 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 77 77 throw new NotSupportedException("Linear regression does not support NaN or infinity values in the input dataset."); … … 81 81 int nRows = inputMatrix.GetLength(0); 82 82 int nFeatures = inputMatrix.GetLength(1) - 1; 83 double[] coefficients ;83 double[] coefficients = new double[nFeatures + 1]; // last coefficient is for the constant 84 84 85 85 int retVal = 1; … … 91 91 alglib.lrunpack(lm, out coefficients, out nFeatures); 92 92 93 var tree = LinearModelToTreeConverter.CreateTree(allowedInputVariables.ToArray(), 94 coefficients.Take(nFeatures).ToArray(), @const: coefficients[nFeatures]); 93 ISymbolicExpressionTree tree = new SymbolicExpressionTree(new ProgramRootSymbol().CreateTreeNode()); 94 ISymbolicExpressionTreeNode startNode = new StartSymbol().CreateTreeNode(); 95 tree.Root.AddSubtree(startNode); 96 ISymbolicExpressionTreeNode addition = new Addition().CreateTreeNode(); 97 startNode.AddSubtree(addition); 98 99 int col = 0; 100 foreach (string column in allowedInputVariables) { 101 VariableTreeNode vNode = (VariableTreeNode)new HeuristicLab.Problems.DataAnalysis.Symbolic.Variable().CreateTreeNode(); 102 vNode.VariableName = column; 103 vNode.Weight = coefficients[col]; 104 addition.AddSubtree(vNode); 105 col++; 106 } 107 108 ConstantTreeNode cNode = (ConstantTreeNode)new Constant().CreateTreeNode(); 109 cNode.Value = coefficients[coefficients.Length - 1]; 110 addition.AddSubtree(cNode); 95 111 96 112 SymbolicRegressionSolution solution = new SymbolicRegressionSolution(new SymbolicRegressionModel(problemData.TargetVariable, tree, new SymbolicDataAnalysisExpressionTreeInterpreter()), (IRegressionProblemData)problemData.Clone()); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitClassification.cs
r14393 r14400 70 70 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 71 71 IEnumerable<int> rows = problemData.TrainingIndices; 72 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);72 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 73 73 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 74 74 throw new NotSupportedException("Multinomial logit classification does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/MultinomialLogitModel.cs
r14393 r14400 83 83 84 84 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 85 double[,] inputData = dataset.ToArray(allowedInputVariables, rows);85 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 86 86 87 87 int n = inputData.GetLength(0); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Linear/Scaling.cs
r14393 r14400 29 29 30 30 namespace HeuristicLab.Algorithms.DataAnalysis { 31 [Obsolete("Use transformation classes in Problems.DataAnalysis instead")]32 31 [StorableClass] 33 32 [Item(Name = "Scaling", Description = "Contains information about scaling of variables for data-analysis algorithms.")] -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/LdaInitializer.cs
r14393 r14400 44 44 var attributes = data.AllowedInputVariables.Count(); 45 45 46 var ldaDs = data.Dataset.ToArray(47 data.AllowedInputVariables.Concat(data.TargetVariable.ToEnumerable()),48 data.TrainingIndices);46 var ldaDs = AlglibUtil.PrepareInputMatrix(data.Dataset, 47 data.AllowedInputVariables.Concat(data.TargetVariable.ToEnumerable()), 48 data.TrainingIndices); 49 49 50 50 // map class values to sequential natural numbers (required by alglib) -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/Initialization/PcaInitializer.cs
r14393 r14400 44 44 var attributes = data.AllowedInputVariables.Count(); 45 45 46 var pcaDs = data.Dataset.ToArray(data.AllowedInputVariables, data.TrainingIndices);46 var pcaDs = AlglibUtil.PrepareInputMatrix(data.Dataset, data.AllowedInputVariables, data.TrainingIndices); 47 47 48 48 int info; -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaGradientCalculator.cs
r14393 r14400 99 99 } 100 100 101 var data = problemData.Dataset.ToArray(problemData.AllowedInputVariables,102 problemData.TrainingIndices);101 var data = AlglibUtil.PrepareInputMatrix(problemData.Dataset, problemData.AllowedInputVariables, 102 problemData.TrainingIndices); 103 103 var classes = problemData.Dataset.GetDoubleValues(problemData.TargetVariable, problemData.TrainingIndices).ToArray(); 104 104 -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Nca/NcaModel.cs
r14393 r14400 86 86 87 87 public double[,] Reduce(IDataset dataset, IEnumerable<int> rows) { 88 var data = dataset.ToArray(allowedInputVariables, rows);88 var data = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 89 89 90 90 var targets = dataset.GetDoubleValues(TargetVariable, rows).ToArray(); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NearestNeighbour/NearestNeighbourModel.cs
r14393 r14400 119 119 if (IsCompatibilityLoaded) { 120 120 // no scaling 121 inputMatrix = dataset.ToArray(121 inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, 122 122 this.allowedInputVariables.Concat(new string[] { targetVariable }), 123 123 rows); … … 167 167 168 168 private static double[,] CreateScaledData(IDataset dataset, IEnumerable<string> variables, IEnumerable<int> rows, double[] offsets, double[] factors) { 169 var transforms = 170 variables.Select( 171 (_, colIdx) => 172 new LinearTransformation(variables) { Addend = offsets[colIdx] * factors[colIdx], Multiplier = factors[colIdx] }); 173 return dataset.ToArray(variables, transforms, rows); 169 var x = new double[rows.Count(), variables.Count()]; 170 var colIdx = 0; 171 foreach (var variableName in variables) { 172 var rowIdx = 0; 173 foreach (var val in dataset.GetDoubleValues(variableName, rows)) { 174 x[rowIdx, colIdx] = (val + offsets[colIdx]) * factors[colIdx]; 175 rowIdx++; 176 } 177 colIdx++; 178 } 179 return x; 174 180 } 175 181 … … 181 187 double[,] inputData; 182 188 if (IsCompatibilityLoaded) { 183 inputData = dataset.ToArray(allowedInputVariables, rows);189 inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 184 190 } else { 185 191 inputData = CreateScaledData(dataset, allowedInputVariables, rows, offsets, weights); … … 217 223 double[,] inputData; 218 224 if (IsCompatibilityLoaded) { 219 inputData = dataset.ToArray(allowedInputVariables, rows);225 inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 220 226 } else { 221 227 inputData = CreateScaledData(dataset, allowedInputVariables, rows, offsets, weights); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkClassification.cs
r14393 r14400 183 183 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 184 184 IEnumerable<int> rows = problemData.TrainingIndices; 185 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);185 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 186 186 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 187 187 throw new NotSupportedException("Neural network classification does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleClassification.cs
r14393 r14400 124 124 public NeuralNetworkEnsembleClassification() 125 125 : base() { 126 var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { 127 (IntValue)new IntValue(0).AsReadOnly(), 128 (IntValue)new IntValue(1).AsReadOnly(), 126 var validHiddenLayerValues = new ItemSet<IntValue>(new IntValue[] { 127 (IntValue)new IntValue(0).AsReadOnly(), 128 (IntValue)new IntValue(1).AsReadOnly(), 129 129 (IntValue)new IntValue(2).AsReadOnly() }); 130 130 var selectedHiddenLayerValue = (from v in validHiddenLayerValues … … 169 169 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 170 170 IEnumerable<int> rows = problemData.TrainingIndices; 171 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);171 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 172 172 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 173 173 throw new NotSupportedException("Neural network ensemble classification does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleModel.cs
r14393 r14400 91 91 92 92 public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) { 93 double[,] inputData = dataset.ToArray(allowedInputVariables, rows);93 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 94 94 95 95 int n = inputData.GetLength(0); … … 108 108 109 109 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 110 double[,] inputData = dataset.ToArray(allowedInputVariables, rows);110 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 111 111 112 112 int n = inputData.GetLength(0); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkEnsembleRegression.cs
r14393 r14400 168 168 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 169 169 IEnumerable<int> rows = problemData.TrainingIndices; 170 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);170 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 171 171 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 172 172 throw new NotSupportedException("Neural network ensemble regression does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkModel.cs
r14393 r14400 95 95 96 96 public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) { 97 double[,] inputData = dataset.ToArray(allowedInputVariables, rows);97 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 98 98 99 99 int n = inputData.GetLength(0); … … 112 112 113 113 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 114 double[,] inputData = dataset.ToArray(allowedInputVariables, rows);114 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 115 115 116 116 int n = inputData.GetLength(0); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/NeuralNetwork/NeuralNetworkRegression.cs
r14393 r14400 184 184 IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables; 185 185 IEnumerable<int> rows = problemData.TrainingIndices; 186 double[,] inputMatrix = dataset.ToArray(allowedInputVariables.Concat(new string[] { targetVariable }), rows);186 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables.Concat(new string[] { targetVariable }), rows); 187 187 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 188 188 throw new NotSupportedException("Neural network regression does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/RandomForest/RandomForestModel.cs
r14393 r14400 139 139 140 140 public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) { 141 double[,] inputData = dataset.ToArray(AllowedInputVariables, rows);141 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, AllowedInputVariables, rows); 142 142 AssertInputMatrix(inputData); 143 143 … … 157 157 158 158 public IEnumerable<double> GetEstimatedVariances(IDataset dataset, IEnumerable<int> rows) { 159 double[,] inputData = dataset.ToArray(AllowedInputVariables, rows);159 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, AllowedInputVariables, rows); 160 160 AssertInputMatrix(inputData); 161 161 … … 175 175 176 176 public override IEnumerable<double> GetEstimatedClassValues(IDataset dataset, IEnumerable<int> rows) { 177 double[,] inputData = dataset.ToArray(AllowedInputVariables, rows);177 double[,] inputData = AlglibUtil.PrepareInputMatrix(dataset, AllowedInputVariables, rows); 178 178 AssertInputMatrix(inputData); 179 179 … … 294 294 out double rmsError, out double outOfBagRmsError, out double avgRelError, out double outOfBagAvgRelError) { 295 295 var variables = problemData.AllowedInputVariables.Concat(new string[] { problemData.TargetVariable }); 296 double[,] inputMatrix = problemData.Dataset.ToArray(variables, trainingIndices);296 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(problemData.Dataset, variables, trainingIndices); 297 297 298 298 alglib.dfreport rep; … … 316 316 317 317 var variables = problemData.AllowedInputVariables.Concat(new string[] { problemData.TargetVariable }); 318 double[,] inputMatrix = problemData.Dataset.ToArray(variables, trainingIndices);318 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(problemData.Dataset, variables, trainingIndices); 319 319 320 320 var classValues = problemData.ClassValues.ToArray(); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/TimeSeries/AutoregressiveModeling.cs
r14391 r14400 114 114 alglib.lrunpack(lm, out coefficients, out nFeatures); 115 115 116 var tree = LinearModelToTreeConverter.CreateTree( 117 variableNames: Enumerable.Repeat(problemData.TargetVariable, nFeatures).ToArray(), 118 lags: Enumerable.Range(0, timeOffset).Select(i => (i + 1) * -1).ToArray(), 119 coefficients: coefficients.Take(nFeatures).ToArray(), 120 @const: coefficients[nFeatures] 121 ); 116 117 ISymbolicExpressionTree tree = new SymbolicExpressionTree(new ProgramRootSymbol().CreateTreeNode()); 118 ISymbolicExpressionTreeNode startNode = new StartSymbol().CreateTreeNode(); 119 tree.Root.AddSubtree(startNode); 120 ISymbolicExpressionTreeNode addition = new Addition().CreateTreeNode(); 121 startNode.AddSubtree(addition); 122 123 for (int i = 0; i < timeOffset; i++) { 124 LaggedVariableTreeNode node = (LaggedVariableTreeNode)new LaggedVariable().CreateTreeNode(); 125 node.VariableName = targetVariable; 126 node.Weight = coefficients[i]; 127 node.Lag = (i + 1) * -1; 128 addition.AddSubtree(node); 129 } 130 131 ConstantTreeNode cNode = (ConstantTreeNode)new Constant().CreateTreeNode(); 132 cNode.Value = coefficients[coefficients.Length - 1]; 133 addition.AddSubtree(cNode); 122 134 123 135 var interpreter = new SymbolicTimeSeriesPrognosisExpressionTreeInterpreter(problemData.TargetVariable); -
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/kMeans/KMeansClustering.cs
r14393 r14400 89 89 double[,] centers; 90 90 int[] xyc; 91 double[,] inputMatrix = dataset.ToArray(allowedInputVariables, rows);91 double[,] inputMatrix = AlglibUtil.PrepareInputMatrix(dataset, allowedInputVariables, rows); 92 92 if (inputMatrix.Cast<double>().Any(x => double.IsNaN(x) || double.IsInfinity(x))) 93 93 throw new NotSupportedException("k-Means clustering does not support NaN or infinity values in the input dataset."); -
trunk/sources/HeuristicLab.Common/3.3/EnumerableExtensions.cs
r14393 r14400 133 133 } 134 134 } 135 public static IEnumerable<T> TakeEvery<T>(this IEnumerable<T> xs, int nth) {136 int i = 0;137 foreach (var x in xs) {138 if (i % nth == 0) yield return x;139 i++;140 }141 }142 135 143 136 /// <summary> -
trunk/sources/HeuristicLab.DataPreprocessing/3.4/PreprocessingTransformator.cs
r14393 r14400 113 113 // don't apply when the check fails 114 114 if (success) 115 return transformation. ConfigureAndApply(data);115 return transformation.Apply(data); 116 116 else 117 117 return data; -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression.Views/3.4/SymbolicRegressionSolutionErrorCharacteristicsCurveView.cs
r14390 r14400 53 53 //check inputVariables used in the symbolic regression model 54 54 var usedVariables = 55 Content.Model.VariablesUsedForPrediction; 55 Content.Model.SymbolicExpressionTree.IterateNodesPostfix().OfType<VariableTreeNode>().Select( 56 node => node.VariableName).Distinct(); 56 57 foreach (var variable in usedVariables) { 57 58 problemData.InputVariables.SetItemCheckedState( -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4.csproj
r14378 r14400 102 102 <SpecificVersion>False</SpecificVersion> 103 103 <HintPath>..\..\bin\ALGLIB-3.7.0.dll</HintPath> 104 <Private>False</Private> 105 </Reference> 106 <Reference Include="AutoDiff-1.0, Version=1.0.0.14388, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL"> 107 <HintPath>..\..\bin\AutoDiff-1.0.dll</HintPath> 104 108 <Private>False</Private> 105 109 </Reference> -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/Plugin.cs.frame
r14378 r14400 29 29 [PluginFile("HeuristicLab.Problems.DataAnalysis.Symbolic.Regression-3.4.dll", PluginFileType.Assembly)] 30 30 [PluginDependency("HeuristicLab.ALGLIB", "3.7.0")] 31 [PluginDependency("HeuristicLab.AutoDiff", "1.0")] 31 32 [PluginDependency("HeuristicLab.Analysis", "3.3")] 32 33 [PluginDependency("HeuristicLab.Common", "3.3")] -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs
r14390 r14400 23 23 using System.Collections.Generic; 24 24 using System.Linq; 25 using AutoDiff; 25 26 using HeuristicLab.Common; 26 27 using HeuristicLab.Core; … … 152 153 } 153 154 155 #region derivations of functions 156 // create function factory for arctangent 157 private readonly Func<Term, UnaryFunc> arctan = UnaryFunc.Factory( 158 eval: Math.Atan, 159 diff: x => 1 / (1 + x * x)); 160 private static readonly Func<Term, UnaryFunc> sin = UnaryFunc.Factory( 161 eval: Math.Sin, 162 diff: Math.Cos); 163 private static readonly Func<Term, UnaryFunc> cos = UnaryFunc.Factory( 164 eval: Math.Cos, 165 diff: x => -Math.Sin(x)); 166 private static readonly Func<Term, UnaryFunc> tan = UnaryFunc.Factory( 167 eval: Math.Tan, 168 diff: x => 1 + Math.Tan(x) * Math.Tan(x)); 169 private static readonly Func<Term, UnaryFunc> erf = UnaryFunc.Factory( 170 eval: alglib.errorfunction, 171 diff: x => 2.0 * Math.Exp(-(x * x)) / Math.Sqrt(Math.PI)); 172 private static readonly Func<Term, UnaryFunc> norm = UnaryFunc.Factory( 173 eval: alglib.normaldistribution, 174 diff: x => -(Math.Exp(-(x * x)) * Math.Sqrt(Math.Exp(x * x)) * x) / Math.Sqrt(2 * Math.PI)); 175 #endregion 176 177 154 178 public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, bool updateVariableWeights = true, double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, bool updateConstantsInTree = true) { 155 179 156 string[] variableNames;157 int[] lags;158 double[] constants;159 160 TreeToAutoDiffTermConverter.ParametricFunction func; 161 TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad;162 if (!Tr eeToAutoDiffTermConverter.TryTransformToAutoDiff(tree, updateVariableWeights, out variableNames, out lags, out constants, out func, out func_grad))180 List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>(); 181 List<AutoDiff.Variable> parameters = new List<AutoDiff.Variable>(); 182 List<string> variableNames = new List<string>(); 183 List<int> lags = new List<int>(); 184 185 AutoDiff.Term func; 186 if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out func)) 163 187 throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree."); 164 if (variableNames.Length == 0) return 0.0; 188 if (variableNames.Count == 0) return 0.0; 189 190 AutoDiff.IParametricCompiledTerm compiledFunc = func.Compile(variables.ToArray(), parameters.ToArray()); 191 192 List<SymbolicExpressionTreeTerminalNode> terminalNodes = null; 193 if (updateVariableWeights) 194 terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList(); 195 else 196 terminalNodes = new List<SymbolicExpressionTreeTerminalNode>(tree.Root.IterateNodesPrefix().OfType<ConstantTreeNode>()); 165 197 166 198 //extract inital constants 167 double[] c = new double[constants.Length + 2]; 168 c[0] = 0.0; 169 c[1] = 1.0; 170 Array.Copy(constants, 0, c, 2, constants.Length); 199 double[] c = new double[variables.Count]; 200 { 201 c[0] = 0.0; 202 c[1] = 1.0; 203 int i = 2; 204 foreach (var node in terminalNodes) { 205 ConstantTreeNode constantTreeNode = node as ConstantTreeNode; 206 VariableTreeNode variableTreeNode = node as VariableTreeNode; 207 if (constantTreeNode != null) 208 c[i++] = constantTreeNode.Value; 209 else if (updateVariableWeights && variableTreeNode != null) 210 c[i++] = variableTreeNode.Weight; 211 } 212 } 171 213 double[] originalConstants = (double[])c.Clone(); 172 214 double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); … … 176 218 int info; 177 219 178 // TODO: refactor179 220 IDataset ds = problemData.Dataset; 180 double[,] x = new double[rows.Count(), variableNames. Length];221 double[,] x = new double[rows.Count(), variableNames.Count]; 181 222 int row = 0; 182 223 foreach (var r in rows) { 183 for (int col = 0; col < variableNames. Length; col++) {224 for (int col = 0; col < variableNames.Count; col++) { 184 225 int lag = lags[col]; 185 226 x[row, col] = ds.GetDoubleValue(variableNames[col], r + lag); … … 192 233 int k = c.Length; 193 234 194 alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc( func);195 alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad( func_grad);235 alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(compiledFunc); 236 alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(compiledFunc); 196 237 197 238 try { … … 231 272 } 232 273 233 private static alglib.ndimensional_pfunc CreatePFunc( TreeToAutoDiffTermConverter.ParametricFunction func) {234 return (double[] c, double[] x, ref double f x, object o) => {235 f x = func(c, x);274 private static alglib.ndimensional_pfunc CreatePFunc(AutoDiff.IParametricCompiledTerm compiledFunc) { 275 return (double[] c, double[] x, ref double func, object o) => { 276 func = compiledFunc.Evaluate(c, x); 236 277 }; 237 278 } 238 279 239 private static alglib.ndimensional_pgrad CreatePGrad( TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) {240 return (double[] c, double[] x, ref double f x, double[] grad, object o) => {241 var tupel = func_grad(c, x);242 f x= tupel.Item2;280 private static alglib.ndimensional_pgrad CreatePGrad(AutoDiff.IParametricCompiledTerm compiledFunc) { 281 return (double[] c, double[] x, ref double func, double[] grad, object o) => { 282 var tupel = compiledFunc.Differentiate(c, x); 283 func = tupel.Item2; 243 284 Array.Copy(tupel.Item1, grad, grad.Length); 244 285 }; 245 286 } 246 287 288 private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, List<int> lags, bool updateVariableWeights, out AutoDiff.Term term) { 289 if (node.Symbol is Constant) { 290 var var = new AutoDiff.Variable(); 291 variables.Add(var); 292 term = var; 293 return true; 294 } 295 if (node.Symbol is Variable) { 296 var varNode = node as VariableTreeNode; 297 var par = new AutoDiff.Variable(); 298 parameters.Add(par); 299 variableNames.Add(varNode.VariableName); 300 lags.Add(0); 301 302 if (updateVariableWeights) { 303 var w = new AutoDiff.Variable(); 304 variables.Add(w); 305 term = AutoDiff.TermBuilder.Product(w, par); 306 } else { 307 term = varNode.Weight * par; 308 } 309 return true; 310 } 311 if (node.Symbol is LaggedVariable) { 312 var varNode = node as LaggedVariableTreeNode; 313 var par = new AutoDiff.Variable(); 314 parameters.Add(par); 315 variableNames.Add(varNode.VariableName); 316 lags.Add(varNode.Lag); 317 318 if (updateVariableWeights) { 319 var w = new AutoDiff.Variable(); 320 variables.Add(w); 321 term = AutoDiff.TermBuilder.Product(w, par); 322 } else { 323 term = varNode.Weight * par; 324 } 325 return true; 326 } 327 if (node.Symbol is Addition) { 328 List<AutoDiff.Term> terms = new List<Term>(); 329 foreach (var subTree in node.Subtrees) { 330 AutoDiff.Term t; 331 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 332 term = null; 333 return false; 334 } 335 terms.Add(t); 336 } 337 term = AutoDiff.TermBuilder.Sum(terms); 338 return true; 339 } 340 if (node.Symbol is Subtraction) { 341 List<AutoDiff.Term> terms = new List<Term>(); 342 for (int i = 0; i < node.SubtreeCount; i++) { 343 AutoDiff.Term t; 344 if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 345 term = null; 346 return false; 347 } 348 if (i > 0) t = -t; 349 terms.Add(t); 350 } 351 if (terms.Count == 1) term = -terms[0]; 352 else term = AutoDiff.TermBuilder.Sum(terms); 353 return true; 354 } 355 if (node.Symbol is Multiplication) { 356 List<AutoDiff.Term> terms = new List<Term>(); 357 foreach (var subTree in node.Subtrees) { 358 AutoDiff.Term t; 359 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 360 term = null; 361 return false; 362 } 363 terms.Add(t); 364 } 365 if (terms.Count == 1) term = terms[0]; 366 else term = terms.Aggregate((a, b) => new AutoDiff.Product(a, b)); 367 return true; 368 369 } 370 if (node.Symbol is Division) { 371 List<AutoDiff.Term> terms = new List<Term>(); 372 foreach (var subTree in node.Subtrees) { 373 AutoDiff.Term t; 374 if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 375 term = null; 376 return false; 377 } 378 terms.Add(t); 379 } 380 if (terms.Count == 1) term = 1.0 / terms[0]; 381 else term = terms.Aggregate((a, b) => new AutoDiff.Product(a, 1.0 / b)); 382 return true; 383 } 384 if (node.Symbol is Logarithm) { 385 AutoDiff.Term t; 386 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 387 term = null; 388 return false; 389 } else { 390 term = AutoDiff.TermBuilder.Log(t); 391 return true; 392 } 393 } 394 if (node.Symbol is Exponential) { 395 AutoDiff.Term t; 396 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 397 term = null; 398 return false; 399 } else { 400 term = AutoDiff.TermBuilder.Exp(t); 401 return true; 402 } 403 } 404 if (node.Symbol is Square) { 405 AutoDiff.Term t; 406 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 407 term = null; 408 return false; 409 } else { 410 term = AutoDiff.TermBuilder.Power(t, 2.0); 411 return true; 412 } 413 } 414 if (node.Symbol is SquareRoot) { 415 AutoDiff.Term t; 416 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 417 term = null; 418 return false; 419 } else { 420 term = AutoDiff.TermBuilder.Power(t, 0.5); 421 return true; 422 } 423 } 424 if (node.Symbol is Sine) { 425 AutoDiff.Term t; 426 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 427 term = null; 428 return false; 429 } else { 430 term = sin(t); 431 return true; 432 } 433 } 434 if (node.Symbol is Cosine) { 435 AutoDiff.Term t; 436 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 437 term = null; 438 return false; 439 } else { 440 term = cos(t); 441 return true; 442 } 443 } 444 if (node.Symbol is Tangent) { 445 AutoDiff.Term t; 446 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 447 term = null; 448 return false; 449 } else { 450 term = tan(t); 451 return true; 452 } 453 } 454 if (node.Symbol is Erf) { 455 AutoDiff.Term t; 456 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 457 term = null; 458 return false; 459 } else { 460 term = erf(t); 461 return true; 462 } 463 } 464 if (node.Symbol is Norm) { 465 AutoDiff.Term t; 466 if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out t)) { 467 term = null; 468 return false; 469 } else { 470 term = norm(t); 471 return true; 472 } 473 } 474 if (node.Symbol is StartSymbol) { 475 var alpha = new AutoDiff.Variable(); 476 var beta = new AutoDiff.Variable(); 477 variables.Add(beta); 478 variables.Add(alpha); 479 AutoDiff.Term branchTerm; 480 if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, lags, updateVariableWeights, out branchTerm)) { 481 term = branchTerm * alpha + beta; 482 return true; 483 } else { 484 term = null; 485 return false; 486 } 487 } 488 term = null; 489 return false; 490 } 491 247 492 public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) { 248 return TreeToAutoDiffTermConverter.IsCompatible(tree); 493 var containsUnknownSymbol = ( 494 from n in tree.Root.GetSubtree(0).IterateNodesPrefix() 495 where 496 !(n.Symbol is Variable) && 497 !(n.Symbol is LaggedVariable) && 498 !(n.Symbol is Constant) && 499 !(n.Symbol is Addition) && 500 !(n.Symbol is Subtraction) && 501 !(n.Symbol is Multiplication) && 502 !(n.Symbol is Division) && 503 !(n.Symbol is Logarithm) && 504 !(n.Symbol is Exponential) && 505 !(n.Symbol is SquareRoot) && 506 !(n.Symbol is Square) && 507 !(n.Symbol is Sine) && 508 !(n.Symbol is Cosine) && 509 !(n.Symbol is Tangent) && 510 !(n.Symbol is Erf) && 511 !(n.Symbol is Norm) && 512 !(n.Symbol is StartSymbol) 513 select n). 514 Any(); 515 return !containsUnknownSymbol; 249 516 } 250 517 } -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Views/3.4/InteractiveSymbolicDataAnalysisSolutionSimplifierView.cs
r14390 r14400 259 259 260 260 private void btnSimplify_Click(object sender, EventArgs e) { 261 var simplifier = new TreeSimplifier();261 var simplifier = new SymbolicDataAnalysisExpressionTreeSimplifier(); 262 262 var simplifiedExpressionTree = simplifier.Simplify(Content.Model.SymbolicExpressionTree); 263 263 UpdateModel(simplifiedExpressionTree); -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Formatters/SymbolicDataAnalysisExpressionExcelFormatter.cs
r14390 r14400 51 51 while (dividend > 0) { 52 52 int modulo = (dividend - 1) % 26; 53 columnName = System.Convert.ToChar(65 + modulo).ToString() + columnName;53 columnName = Convert.ToChar(65 + modulo).ToString() + columnName; 54 54 dividend = (int)((dividend - modulo) / 26); 55 55 } -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/HeuristicLab.Problems.DataAnalysis.Symbolic-3.4.csproj
r14390 r14400 103 103 <Private>False</Private> 104 104 </Reference> 105 <Reference Include="AutoDiff-1.0, Version=1.0.0.14388, Culture=neutral, PublicKeyToken=ba48961d6f65dcec, processorArchitecture=MSIL">106 <HintPath>..\..\bin\AutoDiff-1.0.dll</HintPath>107 <Private>False</Private>108 </Reference>109 105 <Reference Include="System" /> 110 106 <Reference Include="System.Core"> … … 137 133 <SubType>Code</SubType> 138 134 </Compile> 139 <Compile Include="Converters\Convert.cs" />140 <Compile Include="Converters\LinearModelToTreeConverter.cs" />141 <Compile Include="Converters\TreeSimplifier.cs" />142 <Compile Include="Converters\TreeToAutoDiffTermConverter.cs" />143 135 <Compile Include="Formatters\InfixExpressionFormatter.cs" /> 144 136 <Compile Include="Formatters\SymbolicDataAnalysisExpressionMathematicaFormatter.cs" /> … … 151 143 <Compile Include="SymbolicDataAnalysisExpressionTreeSimplificationOperator.cs" /> 152 144 <Compile Include="SymbolicDataAnalysisModelComplexityCalculator.cs" /> 145 <Compile Include="SymbolicExpressionTreeBacktransformator.cs" /> 153 146 <Compile Include="SymbolicDataAnalysisExpressionPruningOperator.cs" /> 154 147 <Compile Include="Analyzers\SymbolicDataAnalysisVariableFrequencyAnalyzer.cs" /> … … 193 186 <Compile Include="Interfaces\ISymbolicDataAnalysisAnalyzer.cs" /> 194 187 <Compile Include="SymbolicDataAnalysisSingleObjectiveProblem.cs" /> 188 <Compile Include="SymbolicDataAnalysisExpressionTreeSimplifier.cs" /> 195 189 <Compile Include="SymbolicDataAnalysisProblem.cs" /> 196 190 <Compile Include="SymbolicDataAnalysisSolutionImpactValuesCalculator.cs" /> … … 246 240 <Compile Include="Symbols\VariableConditionTreeNode.cs" /> 247 241 <Compile Include="Symbols\VariableTreeNode.cs" /> 248 <Compile Include="Transformations\SymbolicExpressionTreeBacktransformator.cs" /> 249 <Compile Include="Transformations\TransformationToSymbolicTreeMapper.cs" /> 242 <Compile Include="TransformationToSymbolicTreeMapper.cs" /> 250 243 <Compile Include="TreeMatching\SymbolicExpressionTreeBottomUpSimilarityCalculator.cs" /> 251 244 <Compile Include="TreeMatching\SymbolicExpressionTreeCanonicalSorter.cs" /> -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Plugin.cs.frame
r14378 r14400 29 29 [PluginFile("HeuristicLab.Problems.DataAnalysis.Symbolic-3.4.dll", PluginFileType.Assembly)] 30 30 [PluginDependency("HeuristicLab.ALGLIB", "3.7.0")] 31 [PluginDependency("HeuristicLab.AutoDiff", "1.0")]32 31 [PluginDependency("HeuristicLab.Analysis", "3.3")] 33 32 [PluginDependency("HeuristicLab.Collections", "3.3")] -
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/SymbolicDataAnalysisExpressionTreeSimplificationOperator.cs
r14390 r14400 37 37 } 38 38 39 private readonly TreeSimplifier simplifier = newTreeSimplifier();39 private readonly SymbolicDataAnalysisExpressionTreeSimplifier simplifier = new SymbolicDataAnalysisExpressionTreeSimplifier(); 40 40 41 41 [StorableConstructor] -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/DatasetExtensions.cs
r14393 r14400 20 20 #endregion 21 21 22 using System;23 22 using System.Collections.Generic; 24 using System.Linq;25 23 26 24 namespace HeuristicLab.Problems.DataAnalysis { 27 25 public static class DatasetExtensions { 28 public static double[,] ToArray(this IDataset dataset, IEnumerable<string> variables, IEnumerable<int> rows) { 29 return ToArray(dataset, 30 variables, 31 transformations: variables.Select(_ => (ITransformation<double>)null), // no transform 32 rows: rows); 33 } 34 public static double[,] ToArray(this IDataset dataset, IEnumerable<string> variables, IEnumerable<ITransformation<double>> transformations, IEnumerable<int> rows) { 35 string[] variablesArr = variables.ToArray(); 36 int[] rowsArr = rows.ToArray(); 37 ITransformation<double>[] transformArr = transformations.ToArray(); 38 if (transformArr.Length != variablesArr.Length) 39 throw new ArgumentException("Number of variables and number of transformations must match."); 40 41 double[,] matrix = new double[rowsArr.Length, variablesArr.Length]; 42 43 for (int i = 0; i < variablesArr.Length; i++) { 44 var origValues = dataset.GetDoubleValues(variablesArr[i], rowsArr); 45 var values = transformArr[i] != null ? transformArr[i].Apply(origValues) : origValues; 46 int row = 0; 47 foreach (var value in values) { 48 matrix[row, i] = value; 49 row++; 50 } 26 public static IEnumerable<T> TakeEvery<T>(this IEnumerable<T> xs, int nth) { 27 int i = 0; 28 foreach (var x in xs) { 29 if (i % nth == 0) yield return x; 30 i++; 51 31 } 52 53 return matrix;54 32 } 55 33 } -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/DataAnalysisProblemData.cs
r14396 r14400 73 73 } 74 74 75 public double[,] AllowedInputsTrainingValues {76 get { return Dataset.ToArray(AllowedInputVariables, TrainingIndices); }77 }78 79 public double[,] AllowedInputsTestValues { get { return Dataset.ToArray(AllowedInputVariables, TestIndices); } }80 75 public IntRange TrainingPartition { 81 76 get { return TrainingPartitionParameter.Value; } -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Transformations/LinearTransformation.cs
r14393 r14400 52 52 public double Multiplier { 53 53 get { return MultiplierParameter.Value.Value; } 54 set {54 protected set { 55 55 MultiplierParameter.Value.Value = value; 56 56 } … … 59 59 public double Addend { 60 60 get { return AddendParameter.Value.Value; } 61 set {61 protected set { 62 62 AddendParameter.Value.Value = value; 63 63 } -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Transformations/ShiftStandardDistributionTransformation.cs
r14393 r14400 71 71 72 72 public override IEnumerable<double> Apply(IEnumerable<double> data) { 73 ConfigureParameters(data); 73 74 if (OriginalStandardDeviation.IsAlmost(0.0)) { 74 75 return data; … … 93 94 } 94 95 95 p ublic overridevoid ConfigureParameters(IEnumerable<double> data) {96 protected void ConfigureParameters(IEnumerable<double> data) { 96 97 OriginalStandardDeviation = data.StandardDeviation(); 97 98 OriginalMean = data.Average(); -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Transformations/ShiftToRangeTransformation.cs
r14393 r14400 44 44 } 45 45 46 public override IEnumerable<double> Apply(IEnumerable<double> data) { 47 ConfigureParameters(data); 48 return base.Apply(data); 49 } 50 46 51 public override bool Check(IEnumerable<double> data, out string errorMsg) { 47 52 ConfigureParameters(data); … … 49 54 } 50 55 51 p ublic overridevoid ConfigureParameters(IEnumerable<double> data) {56 protected void ConfigureParameters(IEnumerable<double> data) { 52 57 double originalRangeStart = data.Min(); 53 58 double originalRangeEnd = data.Max(); -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Transformations/Transformation.cs
r14393 r14400 66 66 protected Transformation(IEnumerable<string> allowedColumns) : base(allowedColumns) { } 67 67 68 public virtual void ConfigureParameters(IEnumerable<T> data) {69 // override in transformations with parameters70 }71 72 68 public abstract IEnumerable<T> Apply(IEnumerable<T> data); 73 public IEnumerable<T> ConfigureAndApply(IEnumerable<T> data) {74 ConfigureParameters(data);75 return Apply(data);76 }77 69 78 70 public abstract bool Check(IEnumerable<T> data, out string errorMsg); -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/IDataAnalysisProblemData.cs
r14396 r14400 33 33 IEnumerable<string> AllowedInputVariables { get; } 34 34 35 double[,] AllowedInputsTrainingValues { get; }36 double[,] AllowedInputsTestValues { get; }37 38 35 IntRange TrainingPartition { get; } 39 36 IntRange TestPartition { get; } -
trunk/sources/HeuristicLab.Problems.DataAnalysis/3.4/Interfaces/ITransformation.cs
r14393 r14400 30 30 31 31 public interface ITransformation<T> : ITransformation { 32 void ConfigureParameters(IEnumerable<T> data);33 IEnumerable<T> ConfigureAndApply(IEnumerable<T> data);34 32 IEnumerable<T> Apply(IEnumerable<T> data); 35 33 } -
trunk/sources/HeuristicLab.Tests/HeuristicLab.Problems.DataAnalysis.Symbolic-3.4/SymbolicDataAnalysisExpressionTreeSimplifierTest.cs
r14394 r14400 36 36 public void SimplifierAxiomsTest() { 37 37 SymbolicExpressionImporter importer = new SymbolicExpressionImporter(); 38 TreeSimplifier simplifier = newTreeSimplifier();38 SymbolicDataAnalysisExpressionTreeSimplifier simplifier = new SymbolicDataAnalysisExpressionTreeSimplifier(); 39 39 SymbolicExpressionTreeStringFormatter formatter = new SymbolicExpressionTreeStringFormatter(); 40 40 #region single argument arithmetics
Note: See TracChangeset
for help on using the changeset viewer.