Changeset 13929
 Timestamp:
 06/20/16 22:14:50 (5 years ago)
 Location:
 branches/HeuristicLab.Algorithms.DataAnalysis.Glmnet/3.4
 Files:

 2 edited
Legend:
 Unmodified
 Added
 Removed

branches/HeuristicLab.Algorithms.DataAnalysis.Glmnet/3.4/ElasticNetLinearRegression.cs
r13928 r13929 49 49 protected override void Run() { 50 50 double[] lambda; 51 double[] rsq; 51 double[] trainRsq; 52 double[] testRsq; 52 53 double[,] coeff; 53 54 double[] intercept; 54 RunElasticNetLinearRegression(Problem.ProblemData, Penality, out lambda, out rsq, out coeff, out intercept);55 RunElasticNetLinearRegression(Problem.ProblemData, Penality, out lambda, out trainRsq, out testRsq, out coeff, out intercept); 55 56 56 57 var coeffTable = new DataTable("Coefficient Paths", "The paths of coefficient values over different lambda values"); … … 69 70 70 71 var rsqTable = new DataTable("RSquared", "Path of R² values over different lambda values"); 71 rsqTable.Rows.Add(new DataRow("RSquared", "Path of R² values over different lambda values", rsq)); 72 rsqTable.Rows.Add(new DataRow("R² (train)", "Path of R² values over different lambda values", trainRsq)); 73 rsqTable.Rows.Add(new DataRow("R² (test)", "Path of R² values over different lambda values", testRsq)); 72 74 Results.Add(new Result(rsqTable.Name, rsqTable.Description, rsqTable)); 73 75 } 74 76 75 77 public static double[] CreateElasticNetLinearRegressionSolution(IRegressionProblemData problemData, double penalty, double lambda, 76 out double rsq,78 out double trainRsq, out double testRsq, 77 79 double coeffLowerBound = double.NegativeInfinity, double coeffUpperBound = double.PositiveInfinity) { 78 double[] rsqs; 80 double[] trainRsqs; 81 double[] testRsqs; 79 82 // run for exactly one lambda 80 var coeffs = CreateElasticNetLinearRegressionSolution(problemData, penalty, new double[] { lambda }, out rsqs, coeffLowerBound, coeffUpperBound); 81 rsq = rsqs[0]; 83 var coeffs = CreateElasticNetLinearRegressionSolution(problemData, penalty, new double[] { lambda }, out trainRsqs, out testRsqs, coeffLowerBound, coeffUpperBound); 84 trainRsq = trainRsqs[0]; 85 testRsq = testRsqs[0]; 82 86 return coeffs[0]; 83 87 } 84 88 public static double[][] CreateElasticNetLinearRegressionSolution(IRegressionProblemData problemData, double penalty, double[] lambda, 85 out double[] rsq,89 out double[] trainRsq, out double[] testRsq, 86 90 double coeffLowerBound = double.NegativeInfinity, double coeffUpperBound = double.PositiveInfinity, 87 91 int maxVars = 1) { … … 89 93 double[,] coeff; 90 94 double[] intercept; 91 RunElasticNetLinearRegression(problemData, penalty, lambda.Length, 1.0, lambda, out lambda, out rsq, out coeff, out intercept, coeffLowerBound, coeffUpperBound, maxVars);95 RunElasticNetLinearRegression(problemData, penalty, lambda.Length, 1.0, lambda, out lambda, out trainRsq, out testRsq, out coeff, out intercept, coeffLowerBound, coeffUpperBound, maxVars); 92 96 93 97 int nRows = intercept.Length; … … 105 109 106 110 public static void RunElasticNetLinearRegression(IRegressionProblemData problemData, double penalty, 107 out double[] lambda, out double[] rsq, out double[,] coeff, out double[] intercept,111 out double[] lambda, out double[] trainRsq, out double[] testRsq, out double[,] coeff, out double[] intercept, 108 112 double coeffLowerBound = double.NegativeInfinity, double coeffUpperBound = double.PositiveInfinity, 109 113 int maxVars = 1 … … 111 115 double[] userLambda = new double[0]; 112 116 // automatically determine lambda values (maximum 100 different lambda values) 113 RunElasticNetLinearRegression(problemData, penalty, 100, 0.0, userLambda, out lambda, out rsq, out coeff, out intercept, coeffLowerBound, coeffUpperBound, maxVars);117 RunElasticNetLinearRegression(problemData, penalty, 100, 0.0, userLambda, out lambda, out trainRsq, out testRsq, out coeff, out intercept, coeffLowerBound, coeffUpperBound, maxVars); 114 118 } 115 119 … … 123 127 /// <param name="ulam">User supplied lambda values</param> 124 128 /// <param name="lambda">Output lambda values</param> 125 /// <param name="rsq">Vector of R² values for each set of coefficients along the path</param> 129 /// <param name="trainRsq">Vector of R² values on the training set for each set of coefficients along the path</param> 130 /// <param name="testRsq">Vector of R² values on the test set for each set of coefficients along the path</param> 126 131 /// <param name="coeff">Vector of coefficient vectors for each solution along the path</param> 127 132 /// <param name="intercept">Vector of intercepts for each solution along the path</param> … … 130 135 /// <param name="maxVars">Maximum allowed number of variables in each solution along the path (1 => all variables are allowed)</param> 131 136 private static void RunElasticNetLinearRegression(IRegressionProblemData problemData, double penalty, 132 int nlam, double flmin, double[] ulam, out double[] lambda, out double[] rsq, out double[,] coeff, out double[] intercept,137 int nlam, double flmin, double[] ulam, out double[] lambda, out double[] trainRsq, out double[] testRsq, out double[,] coeff, out double[] intercept, 133 138 double coeffLowerBound = double.NegativeInfinity, double coeffUpperBound = double.PositiveInfinity, 134 139 int maxVars = 1 … … 136 141 if (penalty < 0.0  penalty > 1.0) throw new ArgumentException("0 <= penalty <= 1", "penalty"); 137 142 138 double[,] x; 139 double[] y; 140 int numObs; 143 double[,] trainX; 144 double[,] testX; 145 146 double[] trainY; 147 double[] testY; 148 int numTrainObs, numTestObs; 141 149 int numVars; 142 PrepareData(problemData, out x, out y, out numObs, out numVars);150 PrepareData(problemData, out trainX, out trainY, out numTrainObs, out testX, out testY, out numTestObs, out numVars); 143 151 144 152 int ka = 1; // => covariance updating algorithm 145 153 double parm = penalty; 146 double[] w = Enumerable.Repeat(1.0, num Obs).ToArray(); // all observations have the same weight154 double[] w = Enumerable.Repeat(1.0, numTrainObs).ToArray(); // all observations have the same weight 147 155 int[] jd = new int[1]; // do not force to use any of the variables 148 156 double[] vp = Enumerable.Repeat(1.0, numVars).ToArray(); // all predictor variables are unpenalized … … 167 175 int jerr = 99; 168 176 169 elnet(ka, parm, numObs, numVars, x, y, w, jd, vp, cl, ne, nx, nlam, flmin, ulam, thr, isd, intr, maxit, out lmu, out intercept, out ca, out ia, out nin, out rsq, out lambda, out nlp, out jerr); 170 177 elnet(ka, parm, numTrainObs, numVars, trainX, trainY, w, jd, vp, cl, ne, nx, nlam, flmin, ulam, thr, isd, intr, maxit, out lmu, out intercept, out ca, out ia, out nin, out trainRsq, out lambda, out nlp, out jerr); 178 179 testRsq = new double[lmu]; 171 180 coeff = new double[lmu, numVars]; 172 181 for (int solIdx = 0; solIdx < lmu; solIdx++) { … … 175 184 double[] coefficients; 176 185 double[] selectedCa = new double[nx]; 177 for (int i = 0; i < nx; i++) selectedCa[i] = ca[solIdx, i]; 178 186 for (int i = 0; i < nx; i++) { 187 selectedCa[i] = ca[solIdx, i]; 188 } 189 190 // apply to test set to calculate test R² values for each lambda step 191 double[] fn; 192 modval(intercept[solIdx], selectedCa, ia, selectedNin, numTestObs, testX, out fn); 193 OnlineCalculatorError error; 194 var r = OnlinePearsonsRCalculator.Calculate(testY, fn, out error); 195 if (error != OnlineCalculatorError.None) r = 0; 196 testRsq[solIdx] = r * r; 197 198 // uncompress coefficients 179 199 uncomp(numVars, selectedCa, ia, selectedNin, out coefficients); 180 200 for (int i = 0; i < coefficients.Length; i++) { … … 184 204 } 185 205 186 private static void PrepareData(IRegressionProblemData problemData, out double[,] x, out double[] y, out int numObs, out int numVars) { 206 private static void PrepareData(IRegressionProblemData problemData, out double[,] trainX, out double[] trainY, out int numTrainObs, 207 out double[,] testX, out double[] testY, out int numTestObs, out int numVars) { 187 208 numVars = problemData.AllowedInputVariables.Count(); 188 numObs = problemData.TrainingIndices.Count(); 189 190 x = new double[numVars, numObs]; 191 y = new double[numObs]; 209 numTrainObs = problemData.TrainingIndices.Count(); 210 numTestObs = problemData.TestIndices.Count(); 211 212 trainX = new double[numVars, numTrainObs]; 213 trainY = new double[numTrainObs]; 214 testX = new double[numVars, numTestObs]; 215 testY = new double[numTestObs]; 192 216 var ds = problemData.Dataset; 193 217 var targetVar = problemData.TargetVariable; 218 // train 194 219 int rIdx = 0; 195 220 foreach (var row in problemData.TrainingIndices) { 196 221 int cIdx = 0; 197 222 foreach (var var in problemData.AllowedInputVariables) { 198 x[cIdx, rIdx] = ds.GetDoubleValue(var, row);223 trainX[cIdx, rIdx] = ds.GetDoubleValue(var, row); 199 224 cIdx++; 200 225 } 201 y[rIdx] = ds.GetDoubleValue(targetVar, row); 226 trainY[rIdx] = ds.GetDoubleValue(targetVar, row); 227 rIdx++; 228 } 229 // test 230 rIdx = 0; 231 foreach(var row in problemData.TestIndices) { 232 int cIdx = 0; 233 foreach(var var in problemData.AllowedInputVariables) { 234 testX[cIdx, rIdx] = ds.GetDoubleValue(var, row); 235 cIdx++; 236 } 237 testY[rIdx] = ds.GetDoubleValue(targetVar, row); 202 238 rIdx++; 203 239 } … … 449 485 private static extern void uncomp_x64(ref int numVars, double[] ca, int[] ia, ref int nin, double[] a); 450 486 487 private static void modval(double a0, double[] ca, int[] ia, int nin, int numObs, double[,] x, out double[] fn) { 488 fn = new double[numObs]; 489 if (Environment.Is64BitProcess) { 490 modval_x64(ref a0, ca, ia, ref nin, ref numObs, x, fn); 491 } else { 492 modval_x86(ref a0, ca, ia, ref nin, ref numObs, x, fn); 493 } 494 } 495 // evaluate linear model from compressed coefficients and 496 // uncompressed predictor matrix: 497 // 498 // call modval(a0, ca, ia, nin, n, x, f); 499 // c 500 // c input: 501 // 502 // a0 = intercept 503 // ca(nx) = compressed coefficient values for a solution 504 // ia(nx) = pointers to compressed coefficients 505 // nin = number of compressed coefficients for solution 506 // n = number of predictor vectors(observations) 507 // x(n, ni) = full(uncompressed) predictor matrix 508 // 509 // output: 510 // 511 // f(n) = model predictions 512 [DllImport("glmnetx86.dll", EntryPoint = "modval_", CallingConvention = CallingConvention.Cdecl)] 513 private static extern void modval_x86(ref double a0, double[] ca, int[] ia, ref int nin, ref int numObs, [Out] double[,] x, double[] fn); 514 [DllImport("glmnetx64.dll", EntryPoint = "modval_", CallingConvention = CallingConvention.Cdecl)] 515 private static extern void modval_x64(ref double a0, double[] ca, int[] ia, ref int nin, ref int numObs, [Out] double[,] x, double[] fn); 516 451 517 #endregion 452 518 } 
branches/HeuristicLab.Algorithms.DataAnalysis.Glmnet/3.4/HeuristicLab.Algorithms.DataAnalysis.Glmnet.csproj
r13928 r13929 123 123 </ItemGroup> 124 124 <ItemGroup> 125 <Content Include="glmnetlicensegpl2.txt" /> 125 <Content Include="glmnetlicensegpl2.txt"> 126 <CopyToOutputDirectory>Always</CopyToOutputDirectory> 127 </Content> 126 128 <Content Include="glmnetx64.dll"> 127 129 <CopyToOutputDirectory>Always</CopyToOutputDirectory> … … 133 135 <ItemGroup> 134 136 <Compile Include="ElasticNetLinearRegression.cs" /> 135 <Compile Include="Plugin.cs"> 136 <CopyToOutputDirectory>Always</CopyToOutputDirectory> 137 </Compile> 137 <Compile Include="Plugin.cs" /> 138 138 <Compile Include="Properties\AssemblyInfo.cs" /> 139 139 </ItemGroup>
Note: See TracChangeset
for help on using the changeset viewer.