Free cookie consent management tool by TermsFeed Policy Generator

Ignore:
Timestamp:
03/09/10 09:40:52 (15 years ago)
Author:
gkronber
Message:

Moved linear scaling functionality out of tree evaluator into a separate operator. #823 (Implement tree evaluator with linear scaling to improve convergence in symbolic regression.)

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.GP.StructureIdentification/3.3/LinearScalingPredictorBuilder.cs

    r2722 r2977  
    3535      : base() {
    3636      AddVariableInfo(new VariableInfo("FunctionTree", "The function tree", typeof(IGeneticProgrammingModel), VariableKind.In));
    37       AddVariableInfo(new VariableInfo("PunishmentFactor", "The punishment factor limits the estimated values to a certain range", typeof(DoubleData), VariableKind.In));
    38       AddVariableInfo(new VariableInfo("Dataset", "The dataset", typeof(Dataset), VariableKind.In));
    39       AddVariableInfo(new VariableInfo("TrainingSamplesStart", "Start index of training set", typeof(DoubleData), VariableKind.In));
    40       AddVariableInfo(new VariableInfo("TrainingSamplesEnd", "End index of training set", typeof(DoubleData), VariableKind.In));
    41       AddVariableInfo(new VariableInfo("TargetVariable", "Name of the target variable", typeof(StringData), VariableKind.In));
     37      AddVariableInfo(new VariableInfo("Beta", "Beta parameter for linear scaling as calculated by LinearScaler", typeof(DoubleData), VariableKind.In));
     38      AddVariableInfo(new VariableInfo("Alpha", "Alpha parameter for linear scaling as calculated by LinearScaler", typeof(DoubleData), VariableKind.In));
     39      AddVariableInfo(new VariableInfo("UpperEstimationLimit", "Upper limit for estimated value (optional)", typeof(DoubleData), VariableKind.In));
     40      AddVariableInfo(new VariableInfo("LowerEstimationLimit", "Lower limit for estimated value (optional)", typeof(DoubleData), VariableKind.In));
    4241      AddVariableInfo(new VariableInfo("Predictor", "The predictor combines the function tree and the evaluator and can be used to generate estimated values", typeof(IPredictor), VariableKind.New));
    4342    }
     
    4948    public override IOperation Apply(IScope scope) {
    5049      IGeneticProgrammingModel model = GetVariableValue<IGeneticProgrammingModel>("FunctionTree", scope, true);
    51       double punishmentFactor = GetVariableValue<DoubleData>("PunishmentFactor", scope, true).Data;
    52       Dataset dataset = GetVariableValue<Dataset>("Dataset", scope, true);
    53       int start = GetVariableValue<IntData>("TrainingSamplesStart", scope, true).Data;
    54       int end = GetVariableValue<IntData>("TrainingSamplesEnd", scope, true).Data;
    55       string targetVariable = GetVariableValue<StringData>("TargetVariable", scope, true).Data;
    56       IPredictor predictor = CreatePredictor(model, punishmentFactor, dataset, targetVariable, start, end);
     50      //double punishmentFactor = GetVariableValue<DoubleData>("PunishmentFactor", scope, true).Data;
     51      //Dataset dataset = GetVariableValue<Dataset>("Dataset", scope, true);
     52      //int start = GetVariableValue<IntData>("TrainingSamplesStart", scope, true).Data;
     53      //int end = GetVariableValue<IntData>("TrainingSamplesEnd", scope, true).Data;
     54      //string targetVariable = GetVariableValue<StringData>("TargetVariable", scope, true).Data;
     55      double alpha = GetVariableValue<DoubleData>("Alpha", scope, true).Data;
     56      double beta = GetVariableValue<DoubleData>("Beta", scope, true).Data;
     57      DoubleData lowerLimit = GetVariableValue<DoubleData>("LowerEstimationLimit", scope, true, false);
     58      DoubleData upperLimit = GetVariableValue<DoubleData>("UpperEstimationLimit", scope, true, false);
     59      IPredictor predictor;
     60      if (lowerLimit == null || upperLimit == null)
     61        predictor = CreatePredictor(model, beta, alpha, double.NegativeInfinity, double.PositiveInfinity);
     62      else
     63        predictor = CreatePredictor(model, beta, alpha, lowerLimit.Data, upperLimit.Data);
    5764      scope.AddVariable(new HeuristicLab.Core.Variable(scope.TranslateName("Predictor"), predictor));
    5865      return null;
    5966    }
    6067
    61     public static IPredictor CreatePredictor(IGeneticProgrammingModel model, double punishmentFactor,
    62   Dataset dataset, string targetVariable, int start, int end) {
    63       return CreatePredictor(model, punishmentFactor, dataset, dataset.GetVariableIndex(targetVariable), start, end);
    64     }
    65 
    66 
    67     public static IPredictor CreatePredictor(IGeneticProgrammingModel model, double punishmentFactor,
    68       Dataset dataset, int targetVariable, int start, int end) {
     68    public static IPredictor CreatePredictor(IGeneticProgrammingModel model, double beta, double alpha, double lowerLimit, double upperLimit) {
    6969
    7070      var evaluator = new HL3TreeEvaluator();
    71       // evaluate for all rows
    72       evaluator.PrepareForEvaluation(dataset, model.FunctionTree);
    73       var result = from row in Enumerable.Range(start, end - start)
    74                    let y = evaluator.Evaluate(row)
    75                    let y_ = dataset.GetValue(row, targetVariable)
    76                    select new { Row = row, Estimation = y, Target = y_ };
    77 
    78       // calculate alpha and beta on the subset of rows with valid values
    79       var filteredResult = result.Where(x => IsValidValue(x.Target) && IsValidValue(x.Estimation));
    80       var target = filteredResult.Select(x => x.Target);
    81       var estimation = filteredResult.Select(x => x.Estimation);
    82       double a, b;
    83       if (filteredResult.Count() > 2) {
    84         double tMean = target.Sum() / target.Count();
    85         double xMean = estimation.Sum() / estimation.Count();
    86         double sumXT = 0;
    87         double sumXX = 0;
    88         foreach (var r in result) {
    89           double x = r.Estimation;
    90           double t = r.Target;
    91           sumXT += (x - xMean) * (t - tMean);
    92           sumXX += (x - xMean) * (x - xMean);
    93         }
    94         b = sumXT / sumXX;
    95         a = tMean - b * xMean;
    96       } else {
    97         b = 1.0;
    98         a = 0.0;
    99       }
    100       double mean = dataset.GetMean(targetVariable, start, end);
    101       double range = dataset.GetRange(targetVariable, start, end);
    102       double minEstimatedValue = mean - punishmentFactor * range;
    103       double maxEstimatedValue = mean + punishmentFactor * range;
    104       evaluator.LowerEvaluationLimit = minEstimatedValue;
    105       evaluator.UpperEvaluationLimit = maxEstimatedValue;
    106       var resultModel = new GeneticProgrammingModel(MakeSum(MakeProduct(model.FunctionTree, b), a));
    107       return new Predictor(evaluator, resultModel, minEstimatedValue, maxEstimatedValue);
     71      evaluator.LowerEvaluationLimit = lowerLimit;
     72      evaluator.UpperEvaluationLimit = upperLimit;
     73      var resultModel = new GeneticProgrammingModel(MakeSum(MakeProduct(model.FunctionTree, beta), alpha));
     74      return new Predictor(evaluator, resultModel, lowerLimit, upperLimit);
    10875    }
    109 
    110     private static bool IsValidValue(double d) {
    111       return !double.IsInfinity(d) && !double.IsNaN(d);
    112     }
    113 
    11476
    11577    private static IFunctionTree MakeSum(IFunctionTree tree, double x) {
     
    13496      return constX;
    13597    }
    136 
    137     private static void CalculateScalingParameters(IEnumerable<double> xs, IEnumerable<double> ys, out double k, out double d) {
    138       if (xs.Count() != ys.Count()) throw new ArgumentException();
    139       double xMean = xs.Sum() / xs.Count();
    140       double yMean = ys.Sum() / ys.Count();
    141 
    142       var yEnumerator = ys.GetEnumerator();
    143       var xEnumerator = xs.GetEnumerator();
    144 
    145       double sumXY = 0.0;
    146       double sumXX = 0.0;
    147       while (xEnumerator.MoveNext() && yEnumerator.MoveNext()) {
    148         sumXY += (xEnumerator.Current - xMean) * (yEnumerator.Current - yMean);
    149         sumXX += (xEnumerator.Current - xMean) * (xEnumerator.Current - xMean);
    150       }
    151 
    152       k = sumXY / sumXX;
    153       d = yMean - k * xMean;
    154     }
    15598  }
    15699}
Note: See TracChangeset for help on using the changeset viewer.