Changeset 15515
- Timestamp:
- 12/12/17 16:16:12 (7 years ago)
- Location:
- stable
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
stable
- Property svn:mergeinfo changed
/trunk/sources merged: 15447-15448,15480-15481,15483
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis.Symbolic
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic merged: 15447,15480-15481
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression merged: 15447-15448,15480-15481,15483
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4
- Property svn:mergeinfo changed
/trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4 merged: 15447-15448,15480-15481,15483
- Property svn:mergeinfo changed
-
stable/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs
r15406 r15515 27 27 using HeuristicLab.Data; 28 28 using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding; 29 using HeuristicLab.Optimization; 29 30 using HeuristicLab.Parameters; 30 31 using HeuristicLab.Persistence.Default.CompositeSerializers.Storable; … … 41 42 private const string UpdateVariableWeightsParameterName = "Update Variable Weights"; 42 43 44 private const string FunctionEvaluationsResultParameterName = "Constants Optimization Function Evaluations"; 45 private const string GradientEvaluationsResultParameterName = "Constants Optimization Gradient Evaluations"; 46 private const string CountEvaluationsParameterName = "Count Function and Gradient Evaluations"; 47 43 48 public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter { 44 49 get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; } … … 58 63 public IFixedValueParameter<BoolValue> UpdateVariableWeightsParameter { 59 64 get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateVariableWeightsParameterName]; } 65 } 66 67 public IResultParameter<IntValue> FunctionEvaluationsResultParameter { 68 get { return (IResultParameter<IntValue>)Parameters[FunctionEvaluationsResultParameterName]; } 69 } 70 public IResultParameter<IntValue> GradientEvaluationsResultParameter { 71 get { return (IResultParameter<IntValue>)Parameters[GradientEvaluationsResultParameterName]; } 72 } 73 public IFixedValueParameter<BoolValue> CountEvaluationsParameter { 74 get { return (IFixedValueParameter<BoolValue>)Parameters[CountEvaluationsParameterName]; } 60 75 } 61 76 … … 81 96 get { return UpdateVariableWeightsParameter.Value.Value; } 82 97 set { UpdateVariableWeightsParameter.Value.Value = value; } 98 } 99 100 public bool CountEvaluations { 101 get { return CountEvaluationsParameter.Value.Value; } 102 set { CountEvaluationsParameter.Value.Value = value; } 83 103 } 84 104 … … 100 120 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)) { Hidden = true }); 101 121 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true)) { Hidden = true }); 122 123 Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false))); 124 Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue())); 125 Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue())); 102 126 } 103 127 … … 112 136 if (!Parameters.ContainsKey(UpdateVariableWeightsParameterName)) 113 137 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateVariableWeightsParameterName, "Determines if the variable weights in the tree should be optimized.", new BoolValue(true))); 114 } 115 138 139 if (!Parameters.ContainsKey(CountEvaluationsParameterName)) 140 Parameters.Add(new FixedValueParameter<BoolValue>(CountEvaluationsParameterName, "Determines if function and gradient evaluation should be counted.", new BoolValue(false))); 141 if (Parameters.ContainsKey(FunctionEvaluationsResultParameterName) && Parameters.ContainsKey(GradientEvaluationsResultParameterName)) 142 CountEvaluations = true; 143 144 if (!Parameters.ContainsKey(FunctionEvaluationsResultParameterName)) 145 Parameters.Add(new ResultParameter<IntValue>(FunctionEvaluationsResultParameterName, "The number of function evaluations performed by the constants optimization evaluator", "Results", new IntValue())); 146 if (!Parameters.ContainsKey(GradientEvaluationsResultParameterName)) 147 Parameters.Add(new ResultParameter<IntValue>(GradientEvaluationsResultParameterName, "The number of gradient evaluations performed by the constants optimization evaluator", "Results", new IntValue())); 148 } 149 150 private static readonly object locker = new object(); 116 151 public override IOperation InstrumentedApply() { 117 152 var solution = SymbolicExpressionTreeParameter.ActualValue; … … 119 154 if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) { 120 155 IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value); 156 var counter = new EvaluationsCounter(); 121 157 quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue, 122 constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree );158 constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, updateVariableWeights: UpdateVariableWeights, lowerEstimationLimit: EstimationLimitsParameter.ActualValue.Lower, upperEstimationLimit: EstimationLimitsParameter.ActualValue.Upper, updateConstantsInTree: UpdateConstantsInTree, counter: counter); 123 159 124 160 if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) { … … 126 162 quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value); 127 163 } 164 165 if (CountEvaluations) { 166 lock (locker) { 167 FunctionEvaluationsResultParameter.ActualValue.Value += counter.FunctionEvaluations; 168 GradientEvaluationsResultParameter.ActualValue.Value += counter.GradientEvaluations; 169 } 170 } 171 128 172 } else { 129 173 var evaluationRows = GenerateRowsToEvaluate(); … … 139 183 EstimationLimitsParameter.ExecutionContext = context; 140 184 ApplyLinearScalingParameter.ExecutionContext = context; 185 FunctionEvaluationsResultParameter.ExecutionContext = context; 186 GradientEvaluationsResultParameter.ExecutionContext = context; 141 187 142 188 // Pearson R² evaluator is used on purpose instead of the const-opt evaluator, … … 148 194 EstimationLimitsParameter.ExecutionContext = null; 149 195 ApplyLinearScalingParameter.ExecutionContext = null; 196 FunctionEvaluationsResultParameter.ExecutionContext = null; 197 GradientEvaluationsResultParameter.ExecutionContext = null; 150 198 151 199 return r2; 200 } 201 202 public class EvaluationsCounter { 203 public int FunctionEvaluations = 0; 204 public int GradientEvaluations = 0; 152 205 } 153 206 … … 156 209 int maxIterations, bool updateVariableWeights = true, 157 210 double lowerEstimationLimit = double.MinValue, double upperEstimationLimit = double.MaxValue, 158 bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null ) {211 bool updateConstantsInTree = true, Action<double[], double, object> iterationCallback = null, EvaluationsCounter counter = null) { 159 212 160 213 // numeric constants in the tree become variables for constant opt … … 168 221 TreeToAutoDiffTermConverter.ParametricFunction func; 169 222 TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad; 170 if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, out parameters, out initialConstants, out func, out func_grad))223 if (!TreeToAutoDiffTermConverter.TryConvertToAutoDiff(tree, updateVariableWeights, applyLinearScaling, out parameters, out initialConstants, out func, out func_grad)) 171 224 throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree."); 172 225 if (parameters.Count == 0) return 0.0; // gkronber: constant expressions always have a R² of 0.0 173 174 226 var parameterEntries = parameters.ToArray(); // order of entries must be the same for x 175 227 176 228 //extract inital constants 177 double[] c = new double[initialConstants.Length + 2]; 178 { 229 double[] c; 230 if (applyLinearScaling) { 231 c = new double[initialConstants.Length + 2]; 179 232 c[0] = 0.0; 180 233 c[1] = 1.0; 181 234 Array.Copy(initialConstants, 0, c, 2, initialConstants.Length); 182 } 183 double[] originalConstants = (double[])c.Clone(); 235 } else { 236 c = (double[])initialConstants.Clone(); 237 } 238 184 239 double originalQuality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 240 241 if (counter == null) counter = new EvaluationsCounter(); 242 var rowEvaluationsCounter = new EvaluationsCounter(); 185 243 186 244 alglib.lsfitstate state; … … 217 275 alglib.lsfitsetxrep(state, iterationCallback != null); 218 276 //alglib.lsfitsetgradientcheck(state, 0.001); 219 alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, null);277 alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, xrep, rowEvaluationsCounter); 220 278 alglib.lsfitresults(state, out retVal, out c, out rep); 221 } 222 catch (ArithmeticException) { 279 } catch (ArithmeticException) { 223 280 return originalQuality; 224 } 225 catch (alglib.alglibexception) { 281 } catch (alglib.alglibexception) { 226 282 return originalQuality; 227 283 } 228 284 285 counter.FunctionEvaluations += rowEvaluationsCounter.FunctionEvaluations / n; 286 counter.GradientEvaluations += rowEvaluationsCounter.GradientEvaluations / n; 287 229 288 //retVal == -7 => constant optimization failed due to wrong gradient 230 if (retVal != -7) UpdateConstants(tree, c.Skip(2).ToArray(), updateVariableWeights); 289 if (retVal != -7) { 290 if (applyLinearScaling) { 291 var tmp = new double[c.Length - 2]; 292 Array.Copy(c, 2, tmp, 0, tmp.Length); 293 UpdateConstants(tree, tmp, updateVariableWeights); 294 } else UpdateConstants(tree, c, updateVariableWeights); 295 } 231 296 var quality = SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 232 297 233 if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights); 298 if (!updateConstantsInTree) UpdateConstants(tree, initialConstants, updateVariableWeights); 299 234 300 if (originalQuality - quality > 0.001 || double.IsNaN(quality)) { 235 UpdateConstants(tree, originalConstants.Skip(2).ToArray(), updateVariableWeights);301 UpdateConstants(tree, initialConstants, updateVariableWeights); 236 302 return originalQuality; 237 303 } … … 259 325 return (double[] c, double[] x, ref double fx, object o) => { 260 326 fx = func(c, x); 327 var counter = (EvaluationsCounter)o; 328 counter.FunctionEvaluations++; 261 329 }; 262 330 } … … 264 332 private static alglib.ndimensional_pgrad CreatePGrad(TreeToAutoDiffTermConverter.ParametricFunctionGradient func_grad) { 265 333 return (double[] c, double[] x, ref double fx, double[] grad, object o) => { 266 var tupel = func_grad(c, x); 267 fx = tupel.Item2; 268 Array.Copy(tupel.Item1, grad, grad.Length); 334 var tuple = func_grad(c, x); 335 fx = tuple.Item2; 336 Array.Copy(tuple.Item1, grad, grad.Length); 337 var counter = (EvaluationsCounter)o; 338 counter.GradientEvaluations++; 269 339 }; 270 340 } -
stable/HeuristicLab.Problems.DataAnalysis.Symbolic/3.4/Converters/TreeToAutoDiffTermConverter.cs
r15145 r15515 87 87 #endregion 88 88 89 public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree, bool makeVariableWeightsVariable, 89 public static bool TryConvertToAutoDiff(ISymbolicExpressionTree tree, bool makeVariableWeightsVariable, bool addLinearScalingTerms, 90 90 out List<DataForVariable> parameters, out double[] initialConstants, 91 91 out ParametricFunction func, … … 93 93 94 94 // use a transformator object which holds the state (variable list, parameter list, ...) for recursive transformation of the tree 95 var transformator = new TreeToAutoDiffTermConverter(makeVariableWeightsVariable );95 var transformator = new TreeToAutoDiffTermConverter(makeVariableWeightsVariable, addLinearScalingTerms); 96 96 AutoDiff.Term term; 97 97 try { … … 120 120 private readonly List<AutoDiff.Variable> variables; 121 121 private readonly bool makeVariableWeightsVariable; 122 123 private TreeToAutoDiffTermConverter(bool makeVariableWeightsVariable) { 122 private readonly bool addLinearScalingTerms; 123 124 private TreeToAutoDiffTermConverter(bool makeVariableWeightsVariable, bool addLinearScalingTerms) { 124 125 this.makeVariableWeightsVariable = makeVariableWeightsVariable; 126 this.addLinearScalingTerms = addLinearScalingTerms; 125 127 this.initialConstants = new List<double>(); 126 128 this.parameters = new Dictionary<DataForVariable, AutoDiff.Variable>(); … … 248 250 } 249 251 if (node.Symbol is StartSymbol) { 250 var alpha = new AutoDiff.Variable(); 251 var beta = new AutoDiff.Variable(); 252 variables.Add(beta); 253 variables.Add(alpha); 254 return ConvertToAutoDiff(node.GetSubtree(0)) * alpha + beta; 252 if (addLinearScalingTerms) { 253 // scaling variables α, β are given at the beginning of the parameter vector 254 var alpha = new AutoDiff.Variable(); 255 var beta = new AutoDiff.Variable(); 256 variables.Add(beta); 257 variables.Add(alpha); 258 var t = ConvertToAutoDiff(node.GetSubtree(0)); 259 return t * alpha + beta; 260 } else return ConvertToAutoDiff(node.GetSubtree(0)); 255 261 } 256 262 throw new ConversionException();
Note: See TracChangeset
for help on using the changeset viewer.