Changeset 8823 for trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective
- Timestamp:
- 10/19/12 15:00:31 (12 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/sources/HeuristicLab.Problems.DataAnalysis.Symbolic.Regression/3.4/SingleObjective/Evaluators/SymbolicRegressionConstantOptimizationEvaluator.cs
r8730 r8823 39 39 private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability"; 40 40 private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage"; 41 private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree"; 41 42 42 43 private const string EvaluatedTreesResultName = "EvaluatedTrees"; … … 62 63 get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; } 63 64 } 65 public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter { 66 get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; } 67 } 64 68 65 69 public IntValue ConstantOptimizationIterations { … … 74 78 public PercentValue ConstantOptimizationRowsPercentage { 75 79 get { return ConstantOptimizationRowsPercentageParameter.Value; } 80 } 81 public bool UpdateConstantsInTree { 82 get { return UpdateConstantsInTreeParameter.Value.Value; } 83 set { UpdateConstantsInTreeParameter.Value.Value = value; } 76 84 } 77 85 … … 91 99 Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true)); 92 100 Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true)); 101 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true))); 93 102 94 103 Parameters.Add(new LookupParameter<IntValue>(EvaluatedTreesResultName)); … … 98 107 public override IDeepCloneable Clone(Cloner cloner) { 99 108 return new SymbolicRegressionConstantOptimizationEvaluator(this, cloner); 109 } 110 111 [StorableHook(HookType.AfterDeserialization)] 112 private void AfterDeserialization() { 113 if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName)) 114 Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true))); 100 115 } 101 116 … … 108 123 quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue, 109 124 constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value, 110 EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, 125 EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, UpdateConstantsInTree, 111 126 EvaluatedTreesParameter.ActualValue, EvaluatedTreeNodesParameter.ActualValue); 112 127 if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) { … … 157 172 } 158 173 174 #region derivations of functions 159 175 // create function factory for arctangent 160 176 private readonly Func<Term, UnaryFunc> arctan = UnaryFunc.Factory( 161 x => Math.Atan(x), // evaluate 162 x => 1 / (1 + x * x)); // derivative of atan 163 177 eval: Math.Atan, 178 diff: x => 1 / (1 + x * x)); 164 179 private static readonly Func<Term, UnaryFunc> sin = UnaryFunc.Factory( 165 x => Math.Sin(x),166 x => Math.Cos(x));180 eval: Math.Sin, 181 diff: Math.Cos); 167 182 private static readonly Func<Term, UnaryFunc> cos = UnaryFunc.Factory( 168 x => Math.Cos(x),169 x => -Math.Sin(x));183 eval: Math.Cos, 184 diff: x => -Math.Sin(x)); 170 185 private static readonly Func<Term, UnaryFunc> tan = UnaryFunc.Factory( 171 x => Math.Tan(x),172 x => 1 + Math.Tan(x) * Math.Tan(x));186 eval: Math.Tan, 187 diff: x => 1 + Math.Tan(x) * Math.Tan(x)); 173 188 private static readonly Func<Term, UnaryFunc> square = UnaryFunc.Factory( 174 x => x * x,175 x => 2 * x);189 eval: x => x * x, 190 diff: x => 2 * x); 176 191 private static readonly Func<Term, UnaryFunc> erf = UnaryFunc.Factory( 177 x => alglib.errorfunction(x), 178 x => 2.0 * Math.Exp(-(x * x)) / Math.Sqrt(Math.PI)); 179 192 eval: alglib.errorfunction, 193 diff: x => 2.0 * Math.Exp(-(x * x)) / Math.Sqrt(Math.PI)); 180 194 private static readonly Func<Term, UnaryFunc> norm = UnaryFunc.Factory( 181 x => alglib.normaldistribution(x),182 x => -(Math.Exp(-(x * x)) * Math.Sqrt(Math.Exp(x * x)) * x) / Math.Sqrt(2 * Math.PI)183 );195 eval: alglib.normaldistribution, 196 diff: x => -(Math.Exp(-(x * x)) * Math.Sqrt(Math.Exp(x * x)) * x) / Math.Sqrt(2 * Math.PI)); 197 #endregion 184 198 185 199 186 200 public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData, 187 IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) {201 IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, bool updateConstantsInTree = true, IntValue evaluatedTrees = null, IntValue evaluatedTreeNodes = null) { 188 202 189 203 List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>(); … … 249 263 return 0.0; 250 264 } 265 var newTree = tree; 266 if (!updateConstantsInTree) newTree = (ISymbolicExpressionTree)tree.Clone(); 251 267 { 252 268 // only when no error occurred 253 269 // set constants in tree 254 270 int i = 2; 255 foreach (var node in terminalNodes) {271 foreach (var node in newTree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) { 256 272 ConstantTreeNode constantTreeNode = node as ConstantTreeNode; 257 273 VariableTreeNode variableTreeNode = node as VariableTreeNode; … … 261 277 variableTreeNode.Weight = c[i++]; 262 278 } 263 } 264 265 return SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);279 280 } 281 return SymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, newTree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling); 266 282 } 267 283 … … 310 326 return false; 311 327 } 328 terms.Add(t); 329 } 330 term = AutoDiff.TermBuilder.Sum(terms); 331 return true; 332 } 333 if (node.Symbol is Subtraction) { 334 List<AutoDiff.Term> terms = new List<Term>(); 335 for (int i = 0; i < node.SubtreeCount; i++) { 336 AutoDiff.Term t; 337 if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) { 338 term = null; 339 return false; 340 } 341 if (i > 0) t = -t; 312 342 terms.Add(t); 313 343 }
Note: See TracChangeset
for help on using the changeset viewer.