Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
07/08/16 14:40:02 (8 years ago)
Author:
gkronber
Message:

#2434: merged trunk changes r12934:14026 from trunk to branch

Location:
branches/crossvalidation-2434
Files:
8 edited
3 copied

Legend:

Unmodified
Added
Removed
  • branches/crossvalidation-2434

  • branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis

  • branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesAlgorithm.cs

    r12874 r14029  
    3535
    3636namespace HeuristicLab.Algorithms.DataAnalysis {
    37   [Item("Gradient Boosted Trees", "Gradient boosted trees algorithm. Friedman, J. \"Greedy Function Approximation: A Gradient Boosting Machine\", IMS 1999 Reitz Lecture.")]
     37  [Item("Gradient Boosted Trees (GBT)", "Gradient boosted trees algorithm. Specific implementation of gradient boosting for regression trees. Friedman, J. \"Greedy Function Approximation: A Gradient Boosting Machine\", IMS 1999 Reitz Lecture.")]
    3838  [StorableClass]
    3939  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 125)]
     
    255255      // produce solution
    256256      if (CreateSolution) {
    257         var surrogateModel = new GradientBoostedTreesModelSurrogate(problemData, (uint)Seed, lossFunction,
    258           Iterations, MaxSize, R, M, Nu, state.GetModel());
     257        var model = state.GetModel();
    259258
    260259        // for logistic regression we produce a classification solution
    261260        if (lossFunction is LogisticRegressionLoss) {
    262           var model = new DiscriminantFunctionClassificationModel(surrogateModel,
     261          var classificationModel = new DiscriminantFunctionClassificationModel(model,
    263262            new AccuracyMaximizationThresholdCalculator());
    264263          var classificationProblemData = new ClassificationProblemData(problemData.Dataset,
    265264            problemData.AllowedInputVariables, problemData.TargetVariable, problemData.Transformations);
    266           model.RecalculateModelParameters(classificationProblemData, classificationProblemData.TrainingIndices);
    267 
    268           var classificationSolution = new DiscriminantFunctionClassificationSolution(model, classificationProblemData);
     265          classificationModel.RecalculateModelParameters(classificationProblemData, classificationProblemData.TrainingIndices);
     266
     267          var classificationSolution = new DiscriminantFunctionClassificationSolution(classificationModel, classificationProblemData);
    269268          Results.Add(new Result("Solution", classificationSolution));
    270269        } else {
    271270          // otherwise we produce a regression solution
    272           Results.Add(new Result("Solution", new RegressionSolution(surrogateModel, problemData)));
     271          Results.Add(new Result("Solution", new RegressionSolution(model, problemData)));
    273272        }
    274273      }
  • branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesAlgorithmStatic.cs

    r12710 r14029  
    5252      internal RegressionTreeBuilder treeBuilder { get; private set; }
    5353
     54      private readonly uint randSeed;
    5455      private MersenneTwister random { get; set; }
    5556
     
    7172        this.m = m;
    7273
     74        this.randSeed = randSeed;
    7375        random = new MersenneTwister(randSeed);
    7476        this.problemData = problemData;
     
    9496        weights = new List<double>();
    9597        // add constant model
    96         models.Add(new ConstantRegressionModel(f0));
     98        models.Add(new ConstantModel(f0, problemData.TargetVariable));
    9799        weights.Add(1.0);
    98100      }
    99101
    100102      public IRegressionModel GetModel() {
    101         return new GradientBoostedTreesModel(models, weights);
     103#pragma warning disable 618
     104        var model = new GradientBoostedTreesModel(models, weights);
     105#pragma warning restore 618
     106        // we don't know the number of iterations here but the number of weights is equal
     107        // to the number of iterations + 1 (for the constant model)
     108        // wrap the actual model in a surrogate that enables persistence and lazy recalculation of the model if necessary
     109        return new GradientBoostedTreesModelSurrogate(problemData, randSeed, lossFunction, weights.Count - 1, maxSize, r, m, nu, model);
    102110      }
    103111      public IEnumerable<KeyValuePair<string, double>> GetVariableRelevance() {
     
    122130
    123131    // simple interface
    124     public static IRegressionSolution TrainGbm(IRegressionProblemData problemData, ILossFunction lossFunction, int maxSize, double nu, double r, double m, int maxIterations, uint randSeed = 31415) {
     132    public static GradientBoostedTreesSolution TrainGbm(IRegressionProblemData problemData, ILossFunction lossFunction, int maxSize, double nu, double r, double m, int maxIterations, uint randSeed = 31415) {
    125133      Contract.Assert(r > 0);
    126134      Contract.Assert(r <= 1.0);
     
    135143
    136144      var model = state.GetModel();
    137       return new RegressionSolution(model, (IRegressionProblemData)problemData.Clone());
     145      return new GradientBoostedTreesSolution(model, (IRegressionProblemData)problemData.Clone());
    138146    }
    139147
  • branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModel.cs

    r12869 r14029  
    3333  [Item("Gradient boosted tree model", "")]
    3434  // this is essentially a collection of weighted regression models
    35   public sealed class GradientBoostedTreesModel : NamedItem, IRegressionModel {
     35  public sealed class GradientBoostedTreesModel : RegressionModel, IGradientBoostedTreesModel {
    3636    // BackwardsCompatibility3.4 for allowing deserialization & serialization of old models
    3737    #region Backwards compatible code, remove with 3.5
     
    5858    #endregion
    5959
     60    public override IEnumerable<string> VariablesUsedForPrediction {
     61      get { return models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x); }
     62    }
     63
    6064    private readonly IList<IRegressionModel> models;
    6165    public IEnumerable<IRegressionModel> Models { get { return models; } }
     
    7680      this.isCompatibilityLoaded = original.isCompatibilityLoaded;
    7781    }
    78     public GradientBoostedTreesModel(IEnumerable<IRegressionModel> models, IEnumerable<double> weights)
    79       : base("Gradient boosted tree model", string.Empty) {
     82    [Obsolete("The constructor of GBTModel should not be used directly anymore (use GBTModelSurrogate instead)")]
     83    internal GradientBoostedTreesModel(IEnumerable<IRegressionModel> models, IEnumerable<double> weights)
     84      : base(string.Empty, "Gradient boosted tree model", string.Empty) {
    8085      this.models = new List<IRegressionModel>(models);
    8186      this.weights = new List<double>(weights);
     
    8893    }
    8994
    90     public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
     95    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    9196      // allocate target array go over all models and add up weighted estimation for each row
    9297      if (!rows.Any()) return Enumerable.Empty<double>(); // return immediately if rows is empty. This prevents multiple iteration over lazy rows enumerable.
     
    104109    }
    105110
    106     public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     111    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    107112      return new RegressionSolution(this, (IRegressionProblemData)problemData.Clone());
    108113    }
     114
    109115  }
    110116}
  • branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/GradientBoostedTreesModelSurrogate.cs

    r12874 r14029  
    2121#endregion
    2222
    23 using System;
    2423using System.Collections.Generic;
    2524using System.Linq;
     
    2726using HeuristicLab.Core;
    2827using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
    29 using HeuristicLab.PluginInfrastructure;
    3028using HeuristicLab.Problems.DataAnalysis;
    3129
     
    3634  // recalculate the actual GBT model on demand
    3735  [Item("Gradient boosted tree model", "")]
    38   public sealed class GradientBoostedTreesModelSurrogate : NamedItem, IRegressionModel {
     36  public sealed class GradientBoostedTreesModelSurrogate : RegressionModel, IGradientBoostedTreesModel {
    3937    // don't store the actual model!
    40     private IRegressionModel actualModel; // the actual model is only recalculated when necessary
     38    private IGradientBoostedTreesModel actualModel; // the actual model is only recalculated when necessary
    4139
    4240    [Storable]
     
    5856
    5957
     58    public override IEnumerable<string> VariablesUsedForPrediction {
     59      get { return actualModel.Models.SelectMany(x => x.VariablesUsedForPrediction).Distinct().OrderBy(x => x); }
     60    }
     61
    6062    [StorableConstructor]
    6163    private GradientBoostedTreesModelSurrogate(bool deserializing) : base(deserializing) { }
     
    7678
    7779    // create only the surrogate model without an actual model
    78     public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
    79       : base("Gradient boosted tree model", string.Empty) {
     80    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
     81      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu)
     82      : base(trainingProblemData.TargetVariable, "Gradient boosted tree model", string.Empty) {
    8083      this.trainingProblemData = trainingProblemData;
    8184      this.seed = seed;
     
    8992
    9093    // wrap an actual model in a surrograte
    91     public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed, ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu, IRegressionModel model)
     94    public GradientBoostedTreesModelSurrogate(IRegressionProblemData trainingProblemData, uint seed,
     95      ILossFunction lossFunction, int iterations, int maxSize, double r, double m, double nu,
     96      IGradientBoostedTreesModel model)
    9297      : this(trainingProblemData, seed, lossFunction, iterations, maxSize, r, m, nu) {
    9398      this.actualModel = model;
     
    99104
    100105    // forward message to actual model (recalculate model first if necessary)
    101     public IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
     106    public override IEnumerable<double> GetEstimatedValues(IDataset dataset, IEnumerable<int> rows) {
    102107      if (actualModel == null) actualModel = RecalculateModel();
    103108      return actualModel.GetEstimatedValues(dataset, rows);
    104109    }
    105110
    106     public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     111    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    107112      return new RegressionSolution(this, (IRegressionProblemData)problemData.Clone());
    108113    }
    109114
     115    private IGradientBoostedTreesModel RecalculateModel() {
     116      return GradientBoostedTreesAlgorithmStatic.TrainGbm(trainingProblemData, lossFunction, maxSize, nu, r, m, iterations, seed).Model;
     117    }
    110118
    111     private IRegressionModel RecalculateModel() {
    112       return GradientBoostedTreesAlgorithmStatic.TrainGbm(trainingProblemData, lossFunction, maxSize, nu, r, m, iterations, seed).Model;
     119    public IEnumerable<IRegressionModel> Models {
     120      get {
     121        if (actualModel == null) actualModel = RecalculateModel();
     122        return actualModel.Models;
     123      }
     124    }
     125
     126    public IEnumerable<double> Weights {
     127      get {
     128        if (actualModel == null) actualModel = RecalculateModel();
     129        return actualModel.Weights;
     130      }
    113131    }
    114132  }
  • branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeBuilder.cs

    r12700 r14029  
    119119    }
    120120
    121     // simple API produces a single regression tree optimizing sum of squared errors
    122     // this can be used if only a simple regression tree should be produced
    123     // for a set of trees use the method CreateRegressionTreeForGradientBoosting below
    124     //
    125     // r and m work in the same way as for alglib random forest
    126     // r is fraction of rows to use for training
    127     // m is fraction of variables to use for training
    128     public IRegressionModel CreateRegressionTree(int maxSize, double r = 0.5, double m = 0.5) {
    129       // subtract mean of y first
    130       var yAvg = y.Average();
    131       for (int i = 0; i < y.Length; i++) y[i] -= yAvg;
    132 
    133       var seLoss = new SquaredErrorLoss();
    134 
    135       var model = CreateRegressionTreeForGradientBoosting(y, curPred, maxSize, problemData.TrainingIndices.ToArray(), seLoss, r, m);
    136 
    137       return new GradientBoostedTreesModel(new[] { new ConstantRegressionModel(yAvg), model }, new[] { 1.0, 1.0 });
    138     }
    139 
    140121    // specific interface that allows to specify the target labels and the training rows which is necessary when for gradient boosted trees
    141122    public IRegressionModel CreateRegressionTreeForGradientBoosting(double[] y, double[] curPred, int maxSize, int[] idx, ILossFunction lossFunction, double r = 0.5, double m = 0.5) {
     
    156137      int nRows = idx.Count();
    157138
    158       // shuffle variable idx
     139      // shuffle variable names
    159140      HeuristicLab.Random.ListExtensions.ShuffleInPlace(allowedVariables, random);
    160141
     
    195176      CreateRegressionTreeFromQueue(maxSize, lossFunction);
    196177
    197       return new RegressionTreeModel(tree.ToArray());
    198     }
    199 
    200 
    201     // processes potential splits from the queue as long as splits are left and the maximum size of the tree is not reached
     178      return new RegressionTreeModel(tree.ToArray(), problemData.TargetVariable);
     179    }
     180
     181
     182    // processes potential splits from the queue as long as splits are remaining and the maximum size of the tree is not reached
    202183    private void CreateRegressionTreeFromQueue(int maxNodes, ILossFunction lossFunction) {
    203184      while (queue.Any() && curTreeNodeIdx + 1 < maxNodes) { // two nodes are created in each loop
     
    223204
    224205        // overwrite existing leaf node with an internal node
    225         tree[f.ParentNodeIdx] = new RegressionTreeModel.TreeNode(f.SplittingVariable, f.SplittingThreshold, leftTreeIdx, rightTreeIdx);
     206        tree[f.ParentNodeIdx] = new RegressionTreeModel.TreeNode(f.SplittingVariable, f.SplittingThreshold, leftTreeIdx, rightTreeIdx, weightLeft: (splitIdx - startIdx + 1) / (double)(endIdx - startIdx + 1));
    226207      }
    227208    }
  • branches/crossvalidation-2434/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/RegressionTreeModel.cs

    r12869 r14029  
    2323using System;
    2424using System.Collections.Generic;
     25using System.Collections.ObjectModel;
    2526using System.Globalization;
    2627using System.Linq;
     
    3334  [StorableClass]
    3435  [Item("RegressionTreeModel", "Represents a decision tree for regression.")]
    35   public sealed class RegressionTreeModel : NamedItem, IRegressionModel {
     36  public sealed class RegressionTreeModel : RegressionModel {
     37    public override IEnumerable<string> VariablesUsedForPrediction {
     38      get { return tree.Select(t => t.VarName).Where(v => v != TreeNode.NO_VARIABLE); }
     39    }
    3640
    3741    // trees are represented as a flat array   
     
    3943      public readonly static string NO_VARIABLE = null;
    4044
    41       public TreeNode(string varName, double val, int leftIdx = -1, int rightIdx = -1)
     45      public TreeNode(string varName, double val, int leftIdx = -1, int rightIdx = -1, double weightLeft = -1.0)
    4246        : this() {
    4347        VarName = varName;
     
    4549        LeftIdx = leftIdx;
    4650        RightIdx = rightIdx;
    47       }
    48 
    49       public string VarName { get; private set; } // name of the variable for splitting or NO_VARIABLE if terminal node
    50       public double Val { get; private set; } // threshold
    51       public int LeftIdx { get; private set; }
    52       public int RightIdx { get; private set; }
    53 
    54       internal IList<double> Data { get; set; } // only necessary to improve efficiency of evaluation
     51        WeightLeft = weightLeft;
     52      }
     53
     54      public string VarName { get; internal set; } // name of the variable for splitting or NO_VARIABLE if terminal node
     55      public double Val { get; internal set; } // threshold
     56      public int LeftIdx { get; internal set; }
     57      public int RightIdx { get; internal set; }
     58      public double WeightLeft { get; internal set; } // for partial dependence plots (value in range [0..1] describes the fraction of training samples for the left sub-tree
     59
    5560
    5661      // necessary because the default implementation of GetHashCode for structs in .NET would only return the hashcode of val here
     
    6570            LeftIdx.Equals(other.LeftIdx) &&
    6671            RightIdx.Equals(other.RightIdx) &&
     72            WeightLeft.Equals(other.WeightLeft) &&
    6773            EqualStrings(VarName, other.VarName);
    6874        } else {
     
    8086    private TreeNode[] tree;
    8187
    82     [Storable]
     88    #region old storable format
     89    // remove with HL 3.4
     90    [Storable(AllowOneWay = true)]
    8391    // to prevent storing the references to data caches in nodes
    84     // seemingly it is bad (performance-wise) to persist tuples (tuples are used as keys in a dictionary) TODO
     92    // seemingly, it is bad (performance-wise) to persist tuples (tuples are used as keys in a dictionary)
    8593    private Tuple<string, double, int, int>[] SerializedTree {
    86       get { return tree.Select(t => Tuple.Create(t.VarName, t.Val, t.LeftIdx, t.RightIdx)).ToArray(); }
    87       set { this.tree = value.Select(t => new TreeNode(t.Item1, t.Item2, t.Item3, t.Item4)).ToArray(); }
    88     }
     94      // get { return tree.Select(t => Tuple.Create(t.VarName, t.Val, t.LeftIdx, t.RightIdx)).ToArray(); }
     95      set { this.tree = value.Select(t => new TreeNode(t.Item1, t.Item2, t.Item3, t.Item4, -1.0)).ToArray(); } // use a weight of -1.0 to indicate that partial dependence cannot be calculated for old models
     96    }
     97    #endregion
     98    #region new storable format
     99    [Storable]
     100    private string[] SerializedTreeVarNames {
     101      get { return tree.Select(t => t.VarName).ToArray(); }
     102      set {
     103        if (tree == null) tree = new TreeNode[value.Length];
     104        for (int i = 0; i < value.Length; i++) {
     105          tree[i].VarName = value[i];
     106        }
     107      }
     108    }
     109    [Storable]
     110    private double[] SerializedTreeValues {
     111      get { return tree.Select(t => t.Val).ToArray(); }
     112      set {
     113        if (tree == null) tree = new TreeNode[value.Length];
     114        for (int i = 0; i < value.Length; i++) {
     115          tree[i].Val = value[i];
     116        }
     117      }
     118    }
     119    [Storable]
     120    private int[] SerializedTreeLeftIdx {
     121      get { return tree.Select(t => t.LeftIdx).ToArray(); }
     122      set {
     123        if (tree == null) tree = new TreeNode[value.Length];
     124        for (int i = 0; i < value.Length; i++) {
     125          tree[i].LeftIdx = value[i];
     126        }
     127      }
     128    }
     129    [Storable]
     130    private int[] SerializedTreeRightIdx {
     131      get { return tree.Select(t => t.RightIdx).ToArray(); }
     132      set {
     133        if (tree == null) tree = new TreeNode[value.Length];
     134        for (int i = 0; i < value.Length; i++) {
     135          tree[i].RightIdx = value[i];
     136        }
     137      }
     138    }
     139    [Storable]
     140    private double[] SerializedTreeWeightLeft {
     141      get { return tree.Select(t => t.WeightLeft).ToArray(); }
     142      set {
     143        if (tree == null) tree = new TreeNode[value.Length];
     144        for (int i = 0; i < value.Length; i++) {
     145          tree[i].WeightLeft = value[i];
     146        }
     147      }
     148    }
     149    #endregion
    89150
    90151    [StorableConstructor]
     
    99160    }
    100161
    101     internal RegressionTreeModel(TreeNode[] tree)
    102       : base("RegressionTreeModel", "Represents a decision tree for regression.") {
     162    internal RegressionTreeModel(TreeNode[] tree, string targetVariable)
     163      : base(targetVariable, "RegressionTreeModel", "Represents a decision tree for regression.") {
    103164      this.tree = tree;
    104165    }
    105166
    106     private static double GetPredictionForRow(TreeNode[] t, int nodeIdx, int row) {
     167    private static double GetPredictionForRow(TreeNode[] t, ReadOnlyCollection<double>[] columnCache, int nodeIdx, int row) {
    107168      while (nodeIdx != -1) {
    108169        var node = t[nodeIdx];
    109170        if (node.VarName == TreeNode.NO_VARIABLE)
    110171          return node.Val;
    111 
    112         if (node.Data[row] <= node.Val)
     172        if (columnCache[nodeIdx] == null || double.IsNaN(columnCache[nodeIdx][row])) {
     173          if (node.WeightLeft.IsAlmost(-1.0)) throw new InvalidOperationException("Cannot calculate partial dependence for trees loaded from older versions of HeuristicLab.");
     174          // weighted average for partial dependence plot (recursive here because we need to calculate both sub-trees)
     175          return node.WeightLeft * GetPredictionForRow(t, columnCache, node.LeftIdx, row) +
     176                 (1.0 - node.WeightLeft) * GetPredictionForRow(t, columnCache, node.RightIdx, row);
     177        } else if (columnCache[nodeIdx][row] <= node.Val)
    113178          nodeIdx = node.LeftIdx;
    114179        else
     
    122187    }
    123188
    124     public IEnumerable<double> GetEstimatedValues(IDataset ds, IEnumerable<int> rows) {
     189    public override IEnumerable<double> GetEstimatedValues(IDataset ds, IEnumerable<int> rows) {
    125190      // lookup columns for variableNames in one pass over the tree to speed up evaluation later on
     191      ReadOnlyCollection<double>[] columnCache = new ReadOnlyCollection<double>[tree.Length];
     192
    126193      for (int i = 0; i < tree.Length; i++) {
    127194        if (tree[i].VarName != TreeNode.NO_VARIABLE) {
    128           tree[i].Data = ds.GetReadOnlyDoubleValues(tree[i].VarName);
    129         }
    130       }
    131       return rows.Select(r => GetPredictionForRow(tree, 0, r));
    132     }
    133 
    134     public IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
     195          // tree models also support calculating estimations if not all variables used for training are available in the dataset
     196          if (ds.ColumnNames.Contains(tree[i].VarName))
     197            columnCache[i] = ds.GetReadOnlyDoubleValues(tree[i].VarName);
     198        }
     199      }
     200      return rows.Select(r => GetPredictionForRow(tree, columnCache, 0, r));
     201    }
     202
     203    public override IRegressionSolution CreateRegressionSolution(IRegressionProblemData problemData) {
    135204      return new RegressionSolution(this, new RegressionProblemData(problemData));
    136205    }
     
    147216      } else {
    148217        return
    149           TreeToString(n.LeftIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} <= {3:F}", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val))
    150         + TreeToString(n.RightIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} >  {3:F}", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val));
    151       }
    152     }
     218          TreeToString(n.LeftIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2} <= {3:F} ({4:N3})", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, n.WeightLeft))
     219        + TreeToString(n.RightIdx, string.Format(CultureInfo.InvariantCulture, "{0}{1}{2}  >  {3:F} ({4:N3}))", part, string.IsNullOrEmpty(part) ? "" : " and ", n.VarName, n.Val, 1.0 - n.WeightLeft));
     220      }
     221    }
     222
    153223  }
    154224}
Note: See TracChangeset for help on using the changeset viewer.