Changeset 18086 for branches/2521_ProblemRefactoring/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs
- Timestamp:
- 11/19/21 16:07:45 (2 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/2521_ProblemRefactoring/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs
r17226 r18086 209 209 bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) { 210 210 211 // numeric constants in the tree become variables for constant opt212 // variables in the tree become parameters (fixed values) for constant opt213 // for each parameter (variable in the original tree) we store the211 // Numeric constants in the tree become variables for parameter optimization. 212 // Variables in the tree become parameters (fixed values) for parameter optimization. 213 // For each parameter (variable in the original tree) we store the 214 214 // variable name, variable value (for factor vars) and lag as a DataForVariable object. 215 215 // A dictionary is used to find parameters … … 221 221 if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad)) 222 222 throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree."); 223 if (parameters.Count == 0) return 0.0; // gkronber:constant expressions always have a R² of 0.0223 if (parameters.Count == 0) return 0.0; // constant expressions always have a R² of 0.0 224 224 var parameterEntries = parameters.ToArray(); // order of entries must be the same for x 225 225 226 // extract inital constants226 // extract inital constants 227 227 double[] c; 228 228 if (applyLinearScaling) { … … 270 270 try { 271 271 alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state); 272 alglib.lsfitsetcond(state, 0.0, 0.0,maxIterations);272 alglib.lsfitsetcond(state, 0.0, maxIterations); 273 273 alglib.lsfitsetxrep(state, iterationCallback != null); 274 //alglib.lsfitsetgradientcheck(state, 0.001);275 274 alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter); 276 275 alglib.lsfitresults(state, out retVal, out c, out rep); … … 285 284 286 285 //retVal == -7 => constant optimization failed due to wrong gradient 287 if (retVal != -7) { 286 // -8 => optimizer detected NAN / INF in the target 287 // function and/ or gradient 288 if (retVal != -7 && retVal != -8) { 288 289 if (applyLinearScaling) { 289 290 var tmp = new double[c.Length - 2]; … … 309 310 VariableTreeNodeBase variableTreeNodeBase = node as VariableTreeNodeBase; 310 311 FactorVariableTreeNode factorVarTreeNode = node as FactorVariableTreeNode; 311 if (constantTreeNode != null) 312 if (constantTreeNode != null) { 313 if (constantTreeNode.Parent.Symbol is Power 314 && constantTreeNode.Parent.GetSubtree(1) == constantTreeNode) continue; // exponents in powers are not optimizated (see TreeToAutoDiffTermConverter) 312 315 constantTreeNode.Value = constants[i++]; 313 else if (updateVariableWeights && variableTreeNodeBase != null)316 } else if (updateVariableWeights && variableTreeNodeBase != null) 314 317 variableTreeNodeBase.Weight = constants[i++]; 315 318 else if (factorVarTreeNode != null) {
Note: See TracChangeset
for help on using the changeset viewer.