Changeset 15322 for branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental
- Timestamp:
- 08/10/17 17:16:25 (7 years ago)
- Location:
- branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental/SymbolicRegressionConstantOptimizationEvaluator.cs
r15314 r15322 33 33 using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression; 34 34 using DiffSharp.Interop.Float64; 35 using System.Diagnostics; 35 36 36 37 namespace HeuristicLab.Algorithms.DataAnalysis.Experimental { … … 179 180 180 181 // extract inital constants 181 double[] c = new double[initialConstants.Length + 2];182 double[] c = new double[initialConstants.Length]; 182 183 double[] s = new double[c.Length]; 183 184 { 184 c[0] = 1.0; 185 c[1] = 0.0; 186 Array.Copy(initialConstants, 0, c, 2, initialConstants.Length); 185 Array.Copy(initialConstants, 0, c, 0, initialConstants.Length); 186 187 // c[0] = 1.0; 188 // c[1] = 0.0; 189 // Array.Copy(initialConstants, 0, c, 2, initialConstants.Length); 187 190 188 191 // s[0] = 1.0; … … 200 203 201 204 IDataset ds = problemData.Dataset; 202 double[,] x = new double[ ds.Rows, parameters.Count];205 double[,] x = new double[rows.Count(), parameters.Count]; 203 206 int col = 0; 204 207 int row = 0; … … 206 209 row = 0; 207 210 // copy training rows 208 foreach (var r in rows) { 211 foreach (var r in rows) { 209 212 if (ds.VariableHasType<double>(info.variableName)) { 210 213 x[row, col] = ds.GetDoubleValue(info.variableName, r + info.lag); … … 222 225 // only count constraints 223 226 var constraintRows = Enumerable.Range(0, problemData.Dataset.Rows).Where(rIdx => !string.IsNullOrEmpty(ds.GetStringValue("f(x) constraint-type", rIdx))); 224 double[,] constraints = new double[constraintRows.Count(), parameters.Count + 1]; // +1 for constraint for f(x) 225 string[,] comp = new string[constraintRows.Count(), parameters.Count + 1]; 227 228 double[,] constraintX = new double[constraintRows.Count(), parameters.Count]; // inputs for constraint values 229 double[,] constraints = new double[constraintRows.Count(), parameters.Count + 1]; // constraint values +1 column for constraint values for f(x) 230 string[,] comp = new string[constraintRows.Count(), parameters.Count + 1]; // comparison types <= = >= 226 231 int eqConstraints = 0; 227 232 int ieqConstraints = 0; … … 233 238 var compColName = string.Format("df/d({0}) constraint-type", info.variableName); 234 239 235 if (ds.VariableNames.Contains(colName)) { 236 foreach (var r in constraintRows) { 240 foreach (var r in constraintRows) { 241 constraintX[row, col] = ds.GetDoubleValue(info.variableName, r); 242 if (ds.VariableNames.Contains(colName)) { 237 243 constraints[row, col] = ds.GetDoubleValue(colName, r); 238 244 comp[row, col] = ds.GetStringValue(compColName, r); … … 240 246 if (comp[row, col] == "EQ") eqConstraints++; 241 247 else if (comp[row, col] == "LEQ" || comp[row, col] == "GEQ") ieqConstraints++; 242 243 row++;244 248 } 249 250 row++; 245 251 } 246 252 col++; … … 260 266 double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray(); 261 267 262 alglib.ndimensional_jac jac = CreateJac(x, y, ds, func);263 double rho = 1000 ;268 alglib.ndimensional_jac jac = CreateJac(x, y, constraintX, constraints, comp, func); 269 double rho = 10000; 264 270 int outeriters = 3; 265 271 int updateFreq = 10; … … 275 281 alglib.minnlcsetscale(state, s); 276 282 alglib.minnlcsetprecexactlowrank(state, updateFreq); 277 // TODO set constraints;278 283 alglib.minnlcsetnlc(state, eqConstraints, ieqConstraints); 279 284 alglib.minnlcoptimize(state, jac, null, null); … … 288 293 // -8 => integrity check failed (e.g. gradient NaN 289 294 if (rep.terminationtype != -7 && rep.terminationtype != -8) 290 UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights); 295 UpdateConstants(tree, c.ToArray(), updateVariableWeights); 296 else { 297 UpdateConstants(tree, Enumerable.Repeat(0.0, c.Length).ToArray(), updateVariableWeights); 298 } 291 299 var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 292 293 if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights); 300 Console.WriteLine("{0:N4} {1:N4} {2} {3}", originalQuality, quality, state.fi.Skip(1).Where(fii => fii > 0).Count(), rep.terminationtype); 301 302 if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.ToArray(), updateVariableWeights); 294 303 if ( 295 304 // originalQuality - quality > 0.001 || 296 305 double.IsNaN(quality)) { 297 UpdateConstants(tree, originalConstants. Skip(2).ToArray(), updateVariableWeights);306 UpdateConstants(tree, originalConstants.ToArray(), updateVariableWeights); 298 307 return originalQuality; 299 308 } … … 319 328 320 329 private static alglib.ndimensional_jac CreateJac( 321 double[,] x, // x is larger thany330 double[,] x, // x is same size as y 322 331 double[] y, // only targets 323 // double[,] constraints, // df/d(xi), same order as for x, same number of rows as x less columns324 // string[,] comparison, // {LEQ, GEQ, EQ } same size as constraints325 IDataset ds,332 double[,] constraintX, // inputs for constraints 333 double[,] constraints, // df/d(xi), same size as constraintX, same number of rows as x less columns 334 string[,] comparison, // {LEQ, GEQ, EQ } same size as constraints 326 335 Func<DV, D> func) { 336 Trace.Assert(x.GetLength(0) == y.Length); 337 Trace.Assert(x.GetLength(1) == constraintX.GetLength(1) - 1); 338 Trace.Assert(constraintX.GetLength(0) == constraints.GetLength(0)); 339 Trace.Assert(constraintX.GetLength(1) == constraints.GetLength(1)); 340 Trace.Assert(constraints.GetLength(0) == comparison.GetLength(0)); 341 Trace.Assert(constraints.GetLength(1) == comparison.GetLength(1)); 327 342 return (double[] c, double[] fi, double[,] jac, object o) => { 328 343 // objective function is sum of squared errors … … 350 365 351 366 int fidx = 1; 352 var constraintRows = Enumerable.Range(0, ds.Rows).Where(rIdx => !string.IsNullOrEmpty(ds.GetStringValue("f(x) constraint-type", rIdx)));353 367 354 368 // eq constraints 355 for each (var rowIdx in constraintRows) {356 for each (var varName in ds.VariableNames.Where(vn => vn.EndsWith("constraint-type"))) {357 if ( ds.GetStringValue(varName, rowIdx)== "EQ") {369 for (int rowIdx = 0; rowIdx < constraintX.GetLength(0); rowIdx++) { 370 for (var colIdx = 0; colIdx < constraintX.GetLength(1); colIdx++) { 371 if (comparison[rowIdx, colIdx] == "EQ") { 358 372 throw new NotSupportedException(); 359 373 } … … 361 375 } 362 376 // ineq constraints 363 foreach (var rowIdx in constraintRows) { 364 XXX 377 for (int rowIdx = 0; rowIdx < constraintX.GetLength(0); rowIdx++) { 365 378 for (int colIdx = 0; colIdx < constraints.GetLength(1); colIdx++) { 366 379 // there is a constraint value … … 369 382 : comparison[rowIdx, colIdx] == "GEQ" ? -1.0 : 0.0; 370 383 384 // copy x_i to the beginning of the function parameters vector 385 for (int cIdx = 0; cIdx < nParams; cIdx++) 386 p[cIdx] = constraintX[rowIdx, cIdx]; 387 371 388 // f(x) constraint 372 389 if (colIdx == constraints.GetLength(1) - 1) { 373 // copy x_i to the beginning of the function parameters vector374 for (int cIdx = 0; cIdx < nParams; cIdx++)375 p[cIdx] = x[rowIdx, cIdx];376 390 377 391 fi[fidx] = factor * ((double)(func(p)) - constraints[rowIdx, colIdx]); … … 385 399 var g = (double[])AD.Grad(func, p); 386 400 fi[fidx] = factor * g[colIdx]; 401 387 402 var hess = AD.Hessian(func, p); 388 403 for (int jacIdx = 0; jacIdx < c.Length; jacIdx++) { 389 jac[fidx, jacIdx] = factor * (double)hess[ nParams +colIdx, nParams + jacIdx]; // skip the elements for the variable values404 jac[fidx, jacIdx] = factor * (double)hess[colIdx, nParams + jacIdx]; // skip the elements for the variable values 390 405 } 391 406 fidx++; -
branches/MathNetNumerics-Exploration-2789/HeuristicLab.Algorithms.DataAnalysis.Experimental/TreeToDiffSharpConverter.cs
r15313 r15322 306 306 // } 307 307 if (node.Symbol is StartSymbol) { 308 var alpha = Expression.Call(dv, DvIndexer, Expression.Constant(paramIdx++)); 309 var beta = Expression.Call(dv, DvIndexer, Expression.Constant(paramIdx++)); 310 311 return Expression.Call(d_Add_d, beta, 312 Expression.Call(d_Mul_d, alpha, MakeExpr(node.GetSubtree(0), parameters, dv))); 308 //var alpha = Expression.Call(dv, DvIndexer, Expression.Constant(paramIdx++)); 309 //var beta = Expression.Call(dv, DvIndexer, Expression.Constant(paramIdx++)); 310 311 // return Expression.Call(d_Add_d, beta, 312 // Expression.Call(d_Mul_d, alpha, MakeExpr(node.GetSubtree(0), parameters, dv))); 313 return MakeExpr(node.GetSubtree(0), parameters, dv); 313 314 } 314 315 throw new ConversionException();
Note: See TracChangeset
for help on using the changeset viewer.