Free cookie consent management tool by TermsFeed Policy Generator

source: branches/GrammaticalEvolution/HeuristicLab.Problems.GrammaticalEvolution/Symbolic/GESymbolicRegressionConstantOptimizationEvaluator.cs @ 10227

Last change on this file since 10227 was 10227, checked in by sawinkle, 10 years ago

#2109: Added four additional evaluators for the Symbolic Regression problem, namely

  • ConstantOptimizationEvaluator
  • MaxAbsoluteErrorEvaluator
  • MeanAbsoluteErrorEvaluator
  • MeanSquaredErrorEvaluator
File size: 22.6 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2013 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using AutoDiff;
26using HeuristicLab.Common;
27using HeuristicLab.Core;
28using HeuristicLab.Data;
29using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
30using HeuristicLab.Parameters;
31using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
32using HeuristicLab.Problems.DataAnalysis;
33using HeuristicLab.Problems.DataAnalysis.Symbolic;
34
35namespace HeuristicLab.Problems.GrammaticalEvolution {
36  [Item("Constant Optimization Evaluator", "Calculates Pearson R² of a symbolic regression solution and optimizes the constant used.")]
37  [StorableClass]
38  public class GESymbolicRegressionConstantOptimizationEvaluator : GESymbolicRegressionSingleObjectiveEvaluator {
39    private const string ConstantOptimizationIterationsParameterName = "ConstantOptimizationIterations";
40    private const string ConstantOptimizationImprovementParameterName = "ConstantOptimizationImprovement";
41    private const string ConstantOptimizationProbabilityParameterName = "ConstantOptimizationProbability";
42    private const string ConstantOptimizationRowsPercentageParameterName = "ConstantOptimizationRowsPercentage";
43    private const string UpdateConstantsInTreeParameterName = "UpdateConstantsInSymbolicExpressionTree";
44
45    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
46      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
47    }
48    public IFixedValueParameter<DoubleValue> ConstantOptimizationImprovementParameter {
49      get { return (IFixedValueParameter<DoubleValue>)Parameters[ConstantOptimizationImprovementParameterName]; }
50    }
51    public IFixedValueParameter<PercentValue> ConstantOptimizationProbabilityParameter {
52      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationProbabilityParameterName]; }
53    }
54    public IFixedValueParameter<PercentValue> ConstantOptimizationRowsPercentageParameter {
55      get { return (IFixedValueParameter<PercentValue>)Parameters[ConstantOptimizationRowsPercentageParameterName]; }
56    }
57    public IFixedValueParameter<BoolValue> UpdateConstantsInTreeParameter {
58      get { return (IFixedValueParameter<BoolValue>)Parameters[UpdateConstantsInTreeParameterName]; }
59    }
60
61    public IntValue ConstantOptimizationIterations {
62      get { return ConstantOptimizationIterationsParameter.Value; }
63    }
64    public DoubleValue ConstantOptimizationImprovement {
65      get { return ConstantOptimizationImprovementParameter.Value; }
66    }
67    public PercentValue ConstantOptimizationProbability {
68      get { return ConstantOptimizationProbabilityParameter.Value; }
69    }
70    public PercentValue ConstantOptimizationRowsPercentage {
71      get { return ConstantOptimizationRowsPercentageParameter.Value; }
72    }
73    public bool UpdateConstantsInTree {
74      get { return UpdateConstantsInTreeParameter.Value.Value; }
75      set { UpdateConstantsInTreeParameter.Value.Value = value; }
76    }
77
78    public override bool Maximization {
79      get { return true; }
80    }
81
82    [StorableConstructor]
83    protected GESymbolicRegressionConstantOptimizationEvaluator(bool deserializing) : base(deserializing) { }
84    protected GESymbolicRegressionConstantOptimizationEvaluator(GESymbolicRegressionConstantOptimizationEvaluator original, Cloner cloner)
85      : base(original, cloner) {
86    }
87    public GESymbolicRegressionConstantOptimizationEvaluator()
88      : base() {
89      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName, "Determines how many iterations should be calculated while optimizing the constant of a symbolic expression tree (0 indicates other or default stopping criterion).", new IntValue(10), true));
90      Parameters.Add(new FixedValueParameter<DoubleValue>(ConstantOptimizationImprovementParameterName, "Determines the relative improvement which must be achieved in the constant optimization to continue with it (0 indicates other or default stopping criterion).", new DoubleValue(0), true));
91      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationProbabilityParameterName, "Determines the probability that the constants are optimized", new PercentValue(1), true));
92      Parameters.Add(new FixedValueParameter<PercentValue>(ConstantOptimizationRowsPercentageParameterName, "Determines the percentage of the rows which should be used for constant optimization", new PercentValue(1), true));
93      Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
94    }
95
96    public override IDeepCloneable Clone(Cloner cloner) {
97      return new GESymbolicRegressionConstantOptimizationEvaluator(this, cloner);
98    }
99
100    [StorableHook(HookType.AfterDeserialization)]
101    private void AfterDeserialization() {
102      if (!Parameters.ContainsKey(UpdateConstantsInTreeParameterName))
103        Parameters.Add(new FixedValueParameter<BoolValue>(UpdateConstantsInTreeParameterName, "Determines if the constants in the tree should be overwritten by the optimized constants.", new BoolValue(true)));
104    }
105
106    public override IOperation Apply() {
107      //var solution = SymbolicExpressionTreeParameter.ActualValue;
108      var solution = GenotypeToPhenotypeMapperParameter.ActualValue.Map(
109        SymbolicExpressionTreeGrammarParameter.ActualValue,
110        IntegerVectorParameter.ActualValue
111      );
112      SymbolicExpressionTreeParameter.ActualValue = solution;
113      double quality;
114      if (RandomParameter.ActualValue.NextDouble() < ConstantOptimizationProbability.Value) {
115        IEnumerable<int> constantOptimizationRows = GenerateRowsToEvaluate(ConstantOptimizationRowsPercentage.Value);
116        quality = OptimizeConstants(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, ProblemDataParameter.ActualValue,
117           constantOptimizationRows, ApplyLinearScalingParameter.ActualValue.Value, ConstantOptimizationIterations.Value,
118           EstimationLimitsParameter.ActualValue.Upper, EstimationLimitsParameter.ActualValue.Lower, UpdateConstantsInTree);
119
120        if (ConstantOptimizationRowsPercentage.Value != RelativeNumberOfEvaluatedSamplesParameter.ActualValue.Value) {
121          var evaluationRows = GenerateRowsToEvaluate();
122          quality = GESymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
123        }
124      } else {
125        var evaluationRows = GenerateRowsToEvaluate();
126        quality = GESymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, solution, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, ProblemDataParameter.ActualValue, evaluationRows, ApplyLinearScalingParameter.ActualValue.Value);
127      }
128      QualityParameter.ActualValue = new DoubleValue(quality);
129
130      if (Successor != null)
131        return ExecutionContext.CreateOperation(Successor);
132      else
133        return null;
134    }
135
136    public override double Evaluate(IExecutionContext context, ISymbolicExpressionTree tree, IRegressionProblemData problemData, IEnumerable<int> rows) {
137      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = context;
138      EstimationLimitsParameter.ExecutionContext = context;
139      ApplyLinearScalingParameter.ExecutionContext = context;
140
141      // Pearson R² evaluator is used on purpose instead of the const-opt evaluator,
142      // because Evaluate() is used to get the quality of evolved models on
143      // different partitions of the dataset (e.g., best validation model)
144      double r2 = GESymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(SymbolicDataAnalysisTreeInterpreterParameter.ActualValue, tree, EstimationLimitsParameter.ActualValue.Lower, EstimationLimitsParameter.ActualValue.Upper, problemData, rows, ApplyLinearScalingParameter.ActualValue.Value);
145
146      SymbolicDataAnalysisTreeInterpreterParameter.ExecutionContext = null;
147      EstimationLimitsParameter.ExecutionContext = null;
148      ApplyLinearScalingParameter.ExecutionContext = null;
149
150      return r2;
151    }
152
153    #region derivations of functions
154    // create function factory for arctangent
155    private readonly Func<Term, UnaryFunc> arctan = UnaryFunc.Factory(
156      eval: Math.Atan,
157      diff: x => 1 / (1 + x * x));
158    private static readonly Func<Term, UnaryFunc> sin = UnaryFunc.Factory(
159      eval: Math.Sin,
160      diff: Math.Cos);
161    private static readonly Func<Term, UnaryFunc> cos = UnaryFunc.Factory(
162       eval: Math.Cos,
163       diff: x => -Math.Sin(x));
164    private static readonly Func<Term, UnaryFunc> tan = UnaryFunc.Factory(
165      eval: Math.Tan,
166      diff: x => 1 + Math.Tan(x) * Math.Tan(x));
167    private static readonly Func<Term, UnaryFunc> square = UnaryFunc.Factory(
168       eval: x => x * x,
169       diff: x => 2 * x);
170    private static readonly Func<Term, UnaryFunc> erf = UnaryFunc.Factory(
171      eval: alglib.errorfunction,
172      diff: x => 2.0 * Math.Exp(-(x * x)) / Math.Sqrt(Math.PI));
173    private static readonly Func<Term, UnaryFunc> norm = UnaryFunc.Factory(
174      eval: alglib.normaldistribution,
175      diff: x => -(Math.Exp(-(x * x)) * Math.Sqrt(Math.Exp(x * x)) * x) / Math.Sqrt(2 * Math.PI));
176    #endregion
177
178
179    public static double OptimizeConstants(ISymbolicDataAnalysisExpressionTreeInterpreter interpreter, ISymbolicExpressionTree tree, IRegressionProblemData problemData,
180      IEnumerable<int> rows, bool applyLinearScaling, int maxIterations, double upperEstimationLimit = double.MaxValue, double lowerEstimationLimit = double.MinValue, bool updateConstantsInTree = true) {
181
182      List<AutoDiff.Variable> variables = new List<AutoDiff.Variable>();
183      List<AutoDiff.Variable> parameters = new List<AutoDiff.Variable>();
184      List<string> variableNames = new List<string>();
185
186      AutoDiff.Term func;
187      if (!TryTransformToAutoDiff(tree.Root.GetSubtree(0), variables, parameters, variableNames, out func))
188        throw new NotSupportedException("Could not optimize constants of symbolic expression tree due to not supported symbols used in the tree.");
189      if (variableNames.Count == 0) return 0.0;
190
191      AutoDiff.IParametricCompiledTerm compiledFunc = AutoDiff.TermUtils.Compile(func, variables.ToArray(), parameters.ToArray());
192
193      List<SymbolicExpressionTreeTerminalNode> terminalNodes = tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>().ToList();
194      double[] c = new double[variables.Count];
195
196      {
197        c[0] = 0.0;
198        c[1] = 1.0;
199        //extract inital constants
200        int i = 2;
201        foreach (var node in terminalNodes) {
202          ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
203          VariableTreeNode variableTreeNode = node as VariableTreeNode;
204          if (constantTreeNode != null)
205            c[i++] = constantTreeNode.Value;
206          else if (variableTreeNode != null)
207            c[i++] = variableTreeNode.Weight;
208        }
209      }
210      double[] originalConstants = (double[])c.Clone();
211      double originalQuality = GESymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
212
213      alglib.lsfitstate state;
214      alglib.lsfitreport rep;
215      int info;
216
217      Dataset ds = problemData.Dataset;
218      double[,] x = new double[rows.Count(), variableNames.Count];
219      int row = 0;
220      foreach (var r in rows) {
221        for (int col = 0; col < variableNames.Count; col++) {
222          x[row, col] = ds.GetDoubleValue(variableNames[col], r);
223        }
224        row++;
225      }
226      double[] y = ds.GetDoubleValues(problemData.TargetVariable, rows).ToArray();
227      int n = x.GetLength(0);
228      int m = x.GetLength(1);
229      int k = c.Length;
230
231      alglib.ndimensional_pfunc function_cx_1_func = CreatePFunc(compiledFunc);
232      alglib.ndimensional_pgrad function_cx_1_grad = CreatePGrad(compiledFunc);
233
234      try {
235        alglib.lsfitcreatefg(x, y, c, n, m, k, false, out state);
236        alglib.lsfitsetcond(state, 0.0, 0.0, maxIterations);
237        //alglib.lsfitsetgradientcheck(state, 0.001);
238        alglib.lsfitfit(state, function_cx_1_func, function_cx_1_grad, null, null);
239        alglib.lsfitresults(state, out info, out c, out rep);
240      }
241      catch (ArithmeticException) {
242        return originalQuality;
243      }
244      catch (alglib.alglibexception) {
245        return originalQuality;
246      }
247
248      //info == -7  => constant optimization failed due to wrong gradient
249      if (info != -7) UpdateConstants(tree, c.Skip(2).ToArray());
250      var quality = GESymbolicRegressionSingleObjectivePearsonRSquaredEvaluator.Calculate(interpreter, tree, lowerEstimationLimit, upperEstimationLimit, problemData, rows, applyLinearScaling);
251
252      if (!updateConstantsInTree) UpdateConstants(tree, originalConstants.Skip(2).ToArray());
253      if (originalQuality - quality > 0.001 || double.IsNaN(quality)) {
254        UpdateConstants(tree, originalConstants.Skip(2).ToArray());
255        return originalQuality;
256      }
257      return quality;
258    }
259
260    private static void UpdateConstants(ISymbolicExpressionTree tree, double[] constants) {
261      int i = 0;
262      foreach (var node in tree.Root.IterateNodesPrefix().OfType<SymbolicExpressionTreeTerminalNode>()) {
263        ConstantTreeNode constantTreeNode = node as ConstantTreeNode;
264        VariableTreeNode variableTreeNode = node as VariableTreeNode;
265        if (constantTreeNode != null)
266          constantTreeNode.Value = constants[i++];
267        else if (variableTreeNode != null)
268          variableTreeNode.Weight = constants[i++];
269      }
270    }
271
272    private static alglib.ndimensional_pfunc CreatePFunc(AutoDiff.IParametricCompiledTerm compiledFunc) {
273      return (double[] c, double[] x, ref double func, object o) => {
274        func = compiledFunc.Evaluate(c, x);
275      };
276    }
277
278    private static alglib.ndimensional_pgrad CreatePGrad(AutoDiff.IParametricCompiledTerm compiledFunc) {
279      return (double[] c, double[] x, ref double func, double[] grad, object o) => {
280        var tupel = compiledFunc.Differentiate(c, x);
281        func = tupel.Item2;
282        Array.Copy(tupel.Item1, grad, grad.Length);
283      };
284    }
285
286    private static bool TryTransformToAutoDiff(ISymbolicExpressionTreeNode node, List<AutoDiff.Variable> variables, List<AutoDiff.Variable> parameters, List<string> variableNames, out AutoDiff.Term term) {
287      if (node.Symbol is HeuristicLab.Problems.DataAnalysis.Symbolic.Constant) {
288        var var = new AutoDiff.Variable();
289        variables.Add(var);
290        term = var;
291        return true;
292      }
293      if (node.Symbol is HeuristicLab.Problems.DataAnalysis.Symbolic.Variable) {
294        var varNode = node as VariableTreeNode;
295        var par = new AutoDiff.Variable();
296        parameters.Add(par);
297        variableNames.Add(varNode.VariableName);
298        var w = new AutoDiff.Variable();
299        variables.Add(w);
300        term = AutoDiff.TermBuilder.Product(w, par);
301        return true;
302      }
303      if (node.Symbol is Addition) {
304        List<AutoDiff.Term> terms = new List<Term>();
305        foreach (var subTree in node.Subtrees) {
306          AutoDiff.Term t;
307          if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out t)) {
308            term = null;
309            return false;
310          }
311          terms.Add(t);
312        }
313        term = AutoDiff.TermBuilder.Sum(terms);
314        return true;
315      }
316      if (node.Symbol is Subtraction) {
317        List<AutoDiff.Term> terms = new List<Term>();
318        for (int i = 0; i < node.SubtreeCount; i++) {
319          AutoDiff.Term t;
320          if (!TryTransformToAutoDiff(node.GetSubtree(i), variables, parameters, variableNames, out t)) {
321            term = null;
322            return false;
323          }
324          if (i > 0) t = -t;
325          terms.Add(t);
326        }
327        term = AutoDiff.TermBuilder.Sum(terms);
328        return true;
329      }
330      if (node.Symbol is Multiplication) {
331        AutoDiff.Term a, b;
332        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||
333          !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {
334          term = null;
335          return false;
336        } else {
337          List<AutoDiff.Term> factors = new List<Term>();
338          foreach (var subTree in node.Subtrees.Skip(2)) {
339            AutoDiff.Term f;
340            if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {
341              term = null;
342              return false;
343            }
344            factors.Add(f);
345          }
346          term = AutoDiff.TermBuilder.Product(a, b, factors.ToArray());
347          return true;
348        }
349      }
350      if (node.Symbol is Division) {
351        // only works for at least two subtrees
352        AutoDiff.Term a, b;
353        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out a) ||
354          !TryTransformToAutoDiff(node.GetSubtree(1), variables, parameters, variableNames, out b)) {
355          term = null;
356          return false;
357        } else {
358          List<AutoDiff.Term> factors = new List<Term>();
359          foreach (var subTree in node.Subtrees.Skip(2)) {
360            AutoDiff.Term f;
361            if (!TryTransformToAutoDiff(subTree, variables, parameters, variableNames, out f)) {
362              term = null;
363              return false;
364            }
365            factors.Add(1.0 / f);
366          }
367          term = AutoDiff.TermBuilder.Product(a, 1.0 / b, factors.ToArray());
368          return true;
369        }
370      }
371      if (node.Symbol is Logarithm) {
372        AutoDiff.Term t;
373        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
374          term = null;
375          return false;
376        } else {
377          term = AutoDiff.TermBuilder.Log(t);
378          return true;
379        }
380      }
381      if (node.Symbol is Exponential) {
382        AutoDiff.Term t;
383        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
384          term = null;
385          return false;
386        } else {
387          term = AutoDiff.TermBuilder.Exp(t);
388          return true;
389        }
390      } if (node.Symbol is Sine) {
391        AutoDiff.Term t;
392        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
393          term = null;
394          return false;
395        } else {
396          term = sin(t);
397          return true;
398        }
399      } if (node.Symbol is Cosine) {
400        AutoDiff.Term t;
401        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
402          term = null;
403          return false;
404        } else {
405          term = cos(t);
406          return true;
407        }
408      } if (node.Symbol is Tangent) {
409        AutoDiff.Term t;
410        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
411          term = null;
412          return false;
413        } else {
414          term = tan(t);
415          return true;
416        }
417      }
418      if (node.Symbol is Square) {
419        AutoDiff.Term t;
420        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
421          term = null;
422          return false;
423        } else {
424          term = square(t);
425          return true;
426        }
427      } if (node.Symbol is Erf) {
428        AutoDiff.Term t;
429        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
430          term = null;
431          return false;
432        } else {
433          term = erf(t);
434          return true;
435        }
436      } if (node.Symbol is Norm) {
437        AutoDiff.Term t;
438        if (!TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out t)) {
439          term = null;
440          return false;
441        } else {
442          term = norm(t);
443          return true;
444        }
445      }
446      if (node.Symbol is StartSymbol) {
447        var alpha = new AutoDiff.Variable();
448        var beta = new AutoDiff.Variable();
449        variables.Add(beta);
450        variables.Add(alpha);
451        AutoDiff.Term branchTerm;
452        if (TryTransformToAutoDiff(node.GetSubtree(0), variables, parameters, variableNames, out branchTerm)) {
453          term = branchTerm * alpha + beta;
454          return true;
455        } else {
456          term = null;
457          return false;
458        }
459      }
460      term = null;
461      return false;
462    }
463
464    public static bool CanOptimizeConstants(ISymbolicExpressionTree tree) {
465      var containsUnknownSymbol = (
466        from n in tree.Root.GetSubtree(0).IterateNodesPrefix()
467        where
468         !(n.Symbol is HeuristicLab.Problems.DataAnalysis.Symbolic.Variable) &&
469         !(n.Symbol is HeuristicLab.Problems.DataAnalysis.Symbolic.Constant) &&
470         !(n.Symbol is Addition) &&
471         !(n.Symbol is Subtraction) &&
472         !(n.Symbol is Multiplication) &&
473         !(n.Symbol is Division) &&
474         !(n.Symbol is Logarithm) &&
475         !(n.Symbol is Exponential) &&
476         !(n.Symbol is Sine) &&
477         !(n.Symbol is Cosine) &&
478         !(n.Symbol is Tangent) &&
479         !(n.Symbol is Square) &&
480         !(n.Symbol is Erf) &&
481         !(n.Symbol is Norm) &&
482         !(n.Symbol is StartSymbol)
483        select n).
484      Any();
485      return !containsUnknownSymbol;
486    }
487  }
488}
Note: See TracBrowser for help on using the repository browser.