Free cookie consent management tool by TermsFeed Policy Generator

source: branches/MCTS-SymbReg-2796/HeuristicLab.Algorithms.DataAnalysis/3.4/MctsSymbolicRegression/MctsSymbolicRegressionAlgorithm.cs @ 15606

Last change on this file since 15606 was 15606, checked in by gkronber, 7 years ago

#2796: comments and typos

File size: 16.8 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Linq;
24using System.Threading;
25using HeuristicLab.Analysis;
26using HeuristicLab.Common;
27using HeuristicLab.Core;
28using HeuristicLab.Data;
29using HeuristicLab.Optimization;
30using HeuristicLab.Parameters;
31using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
32using HeuristicLab.Problems.DataAnalysis;
33using HeuristicLab.Problems.DataAnalysis.Symbolic.Regression;
34
35namespace HeuristicLab.Algorithms.DataAnalysis.MctsSymbolicRegression {
36  // TODO: support pause (persisting/cloning the state)
37  [Item("Symbolic Regression Tree Search", "tree search for symbolic regression.")]
38  [StorableClass]
39  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 250)]
40  public class MctsSymbolicRegressionAlgorithm : FixedDataAnalysisAlgorithm<IRegressionProblem> {
41
42    #region ParameterNames
43    private const string IterationsParameterName = "Iterations";
44    private const string MaxVariablesParameterName = "Maximum variables";
45    private const string ScaleVariablesParameterName = "Scale variables";
46    private const string AllowedFactorsParameterName = "Allowed factors";
47    private const string ConstantOptimizationIterationsParameterName = "Iterations (constant optimization)";
48    private const string PolicyParameterName = "Policy";
49    private const string SeedParameterName = "Seed";
50    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
51    private const string UpdateIntervalParameterName = "UpdateInterval";
52    private const string CreateSolutionParameterName = "CreateSolution";
53    private const string PunishmentFactorParameterName = "PunishmentFactor";
54    private const string CollectParetoOptimalSolutionsParameterName = "CollectParetoOptimalSolutions";
55    private const string LambdaParameterName = "Lambda";
56
57    private const string VariableProductFactorName = "x * y * ...";
58    private const string ExpFactorName = "exp(c * x * y ...)";
59    private const string LogFactorName = "log(c + c1 x + c2 x + ...)";
60    private const string InvFactorName = "1 / (1 + c1 x + c2 x + ...)";
61    private const string FactorSumsName = "t1(x) + t2(x) + ... ";
62    #endregion
63
64    #region ParameterProperties
65    public IFixedValueParameter<IntValue> IterationsParameter {
66      get { return (IFixedValueParameter<IntValue>)Parameters[IterationsParameterName]; }
67    }
68    public IFixedValueParameter<IntValue> MaxVariableReferencesParameter {
69      get { return (IFixedValueParameter<IntValue>)Parameters[MaxVariablesParameterName]; }
70    }
71    public IFixedValueParameter<BoolValue> ScaleVariablesParameter {
72      get { return (IFixedValueParameter<BoolValue>)Parameters[ScaleVariablesParameterName]; }
73    }
74    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
75      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
76    }
77    public IFixedValueParameter<DoubleValue> PunishmentFactorParameter {
78      get { return (IFixedValueParameter<DoubleValue>)Parameters[PunishmentFactorParameterName]; }
79    }
80    public IValueParameter<ICheckedItemList<StringValue>> AllowedFactorsParameter {
81      get { return (IValueParameter<ICheckedItemList<StringValue>>)Parameters[AllowedFactorsParameterName]; }
82    }
83    public IFixedValueParameter<IntValue> SeedParameter {
84      get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; }
85    }
86    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
87      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
88    }
89    public IFixedValueParameter<IntValue> UpdateIntervalParameter {
90      get { return (IFixedValueParameter<IntValue>)Parameters[UpdateIntervalParameterName]; }
91    }
92    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
93      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
94    }
95    public IFixedValueParameter<BoolValue> CollectParetoOptimalSolutionsParameter {
96      get { return (IFixedValueParameter<BoolValue>)Parameters[CollectParetoOptimalSolutionsParameterName]; }
97    }
98    public IFixedValueParameter<DoubleValue> LambdaParameter {
99      get { return (IFixedValueParameter<DoubleValue>)Parameters[LambdaParameterName]; }
100    }
101    #endregion
102
103    #region Properties
104    public int Iterations {
105      get { return IterationsParameter.Value.Value; }
106      set { IterationsParameter.Value.Value = value; }
107    }
108    public int Seed {
109      get { return SeedParameter.Value.Value; }
110      set { SeedParameter.Value.Value = value; }
111    }
112    public bool SetSeedRandomly {
113      get { return SetSeedRandomlyParameter.Value.Value; }
114      set { SetSeedRandomlyParameter.Value.Value = value; }
115    }
116    public int MaxVariableReferences {
117      get { return MaxVariableReferencesParameter.Value.Value; }
118      set { MaxVariableReferencesParameter.Value.Value = value; }
119    }
120    public double PunishmentFactor {
121      get { return PunishmentFactorParameter.Value.Value; }
122      set { PunishmentFactorParameter.Value.Value = value; }
123    }
124    public ICheckedItemList<StringValue> AllowedFactors {
125      get { return AllowedFactorsParameter.Value; }
126    }
127    public int ConstantOptimizationIterations {
128      get { return ConstantOptimizationIterationsParameter.Value.Value; }
129      set { ConstantOptimizationIterationsParameter.Value.Value = value; }
130    }
131    public bool ScaleVariables {
132      get { return ScaleVariablesParameter.Value.Value; }
133      set { ScaleVariablesParameter.Value.Value = value; }
134    }
135    public bool CreateSolution {
136      get { return CreateSolutionParameter.Value.Value; }
137      set { CreateSolutionParameter.Value.Value = value; }
138    }
139    public bool CollectParetoOptimalSolutions {
140      get { return CollectParetoOptimalSolutionsParameter.Value.Value; }
141      set { CollectParetoOptimalSolutionsParameter.Value.Value = value; }
142    }
143    public double Lambda {
144      get { return LambdaParameter.Value.Value; }
145      set { LambdaParameter.Value.Value = value; }
146    }
147    #endregion
148
149    [StorableConstructor]
150    protected MctsSymbolicRegressionAlgorithm(bool deserializing) : base(deserializing) { }
151
152    protected MctsSymbolicRegressionAlgorithm(MctsSymbolicRegressionAlgorithm original, Cloner cloner)
153      : base(original, cloner) {
154    }
155
156    public override IDeepCloneable Clone(Cloner cloner) {
157      return new MctsSymbolicRegressionAlgorithm(this, cloner);
158    }
159
160    public MctsSymbolicRegressionAlgorithm() {
161      Problem = new RegressionProblem(); // default problem
162
163      var defaultFactorsList = new CheckedItemList<StringValue>(
164        new string[] { VariableProductFactorName, ExpFactorName, LogFactorName, InvFactorName, FactorSumsName }
165        .Select(s => new StringValue(s).AsReadOnly())
166        ).AsReadOnly();
167      defaultFactorsList.SetItemCheckedState(defaultFactorsList.First(s => s.Value == FactorSumsName), false);
168
169      Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName,
170        "Number of iterations", new IntValue(100000)));
171      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName,
172        "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
173      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName,
174        "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
175      Parameters.Add(new FixedValueParameter<IntValue>(MaxVariablesParameterName,
176        "Maximal number of variables references in the symbolic regression models (multiple usages of the same variable are counted)", new IntValue(5)));
177      Parameters.Add(new ValueParameter<ICheckedItemList<StringValue>>(AllowedFactorsParameterName,
178        "Choose which expressions are allowed as factors in the model.", defaultFactorsList));
179
180      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName,
181        "Number of iterations for constant optimization. A small number of iterations should be sufficient for most models. " +
182        "Set to 0 to let the algorithm stop automatically when it converges. Set to -1 to disable constants optimization.", new IntValue(10)));
183      Parameters.Add(new FixedValueParameter<BoolValue>(ScaleVariablesParameterName,
184        "Set to true to all input variables to the range [0..1]", new BoolValue(true)));
185      Parameters[ScaleVariablesParameterName].Hidden = true;
186      Parameters.Add(new FixedValueParameter<DoubleValue>(PunishmentFactorParameterName, "Estimations of models can be bounded. The estimation limits are calculated in the following way (lb = mean(y) - punishmentFactor*range(y), ub = mean(y) + punishmentFactor*range(y))", new DoubleValue(10)));
187      Parameters[PunishmentFactorParameterName].Hidden = true;
188      Parameters.Add(new FixedValueParameter<IntValue>(UpdateIntervalParameterName,
189        "Number of iterations until the results are updated", new IntValue(100)));
190      Parameters[UpdateIntervalParameterName].Hidden = true;
191      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName,
192        "Optionally produce a solution at the end of the run", new BoolValue(true)));
193      Parameters[CreateSolutionParameterName].Hidden = true;
194
195      Parameters.Add(new FixedValueParameter<BoolValue>(CollectParetoOptimalSolutionsParameterName,
196        "Optionally collect a set of Pareto-optimal solutions minimizing error and complexity.", new BoolValue(false)));
197      Parameters[CollectParetoOptimalSolutionsParameterName].Hidden = true;
198
199      Parameters.Add(new FixedValueParameter<DoubleValue>(LambdaParameterName,
200        "Lambda is the factor for the regularization term in the objective function (Obj = (y - f(x,p))² + lambda * |p|²)", new DoubleValue(0.0)));
201    }
202
203    [StorableHook(HookType.AfterDeserialization)]
204    private void AfterDeserialization() {
205    }
206
207    // TODO: support pause and restart
208    protected override void Run(CancellationToken cancellationToken) {
209      // Set up the algorithm
210      if (SetSeedRandomly) Seed = new System.Random().Next();
211      var collectPareto = CollectParetoOptimalSolutions;
212
213      // Set up the results display
214      var iterations = new IntValue(0);
215      Results.Add(new Result("Iterations", iterations));
216
217      var bestSolutionIteration = new IntValue(0);
218      Results.Add(new Result("Best solution iteration", bestSolutionIteration));
219
220      var table = new DataTable("Qualities");
221      table.Rows.Add(new DataRow("Best quality"));
222      table.Rows.Add(new DataRow("Current best quality"));
223      table.Rows.Add(new DataRow("Average quality"));
224      Results.Add(new Result("Qualities", table));
225
226      var bestQuality = new DoubleValue();
227      Results.Add(new Result("Best quality", bestQuality));
228
229      var curQuality = new DoubleValue();
230      Results.Add(new Result("Current best quality", curQuality));
231
232      var avgQuality = new DoubleValue();
233      Results.Add(new Result("Average quality", avgQuality));
234
235      var totalRollouts = new IntValue();
236      Results.Add(new Result("Total rollouts", totalRollouts));
237      var effRollouts = new IntValue();
238      Results.Add(new Result("Effective rollouts", effRollouts));
239      var funcEvals = new IntValue();
240      Results.Add(new Result("Function evaluations", funcEvals));
241      var gradEvals = new IntValue();
242      Results.Add(new Result("Gradient evaluations", gradEvals));
243
244      Result paretoBestModelsResult = new Result("ParetoBestModels", typeof(ItemList<ISymbolicRegressionSolution>));
245      if (collectPareto) {
246        Results.Add(paretoBestModelsResult);
247      }
248
249      // same as in SymbolicRegressionSingleObjectiveProblem
250      var y = Problem.ProblemData.Dataset.GetDoubleValues(Problem.ProblemData.TargetVariable,
251        Problem.ProblemData.TrainingIndices);
252      var avgY = y.Average();
253      var minY = y.Min();
254      var maxY = y.Max();
255      var range = maxY - minY;
256      var lowerLimit = avgY - PunishmentFactor * range;
257      var upperLimit = avgY + PunishmentFactor * range;
258
259      // init
260      var problemData = (IRegressionProblemData)Problem.ProblemData.Clone();
261      if (!AllowedFactors.CheckedItems.Any()) throw new ArgumentException("At least on type of factor must be allowed");
262      var state = MctsSymbolicRegressionStatic.CreateState(problemData, (uint)Seed, MaxVariableReferences, ScaleVariables,
263        ConstantOptimizationIterations, Lambda,
264        collectPareto,
265        lowerLimit, upperLimit,
266        allowProdOfVars: AllowedFactors.CheckedItems.Any(s => s.Value.Value == VariableProductFactorName),
267        allowExp: AllowedFactors.CheckedItems.Any(s => s.Value.Value == ExpFactorName),
268        allowLog: AllowedFactors.CheckedItems.Any(s => s.Value.Value == LogFactorName),
269        allowInv: AllowedFactors.CheckedItems.Any(s => s.Value.Value == InvFactorName),
270        allowMultipleTerms: AllowedFactors.CheckedItems.Any(s => s.Value.Value == FactorSumsName)
271        );
272
273      var updateInterval = UpdateIntervalParameter.Value.Value;
274      double sumQ = 0.0;
275      double bestQ = 0.0;
276      double curBestQ = 0.0;
277      int n = 0;
278
279      // canceled before we actually started
280      cancellationToken.ThrowIfCancellationRequested();
281
282      // Loop until iteration limit reached or canceled.
283      for (int i = 0; i < Iterations && !state.Done && !cancellationToken.IsCancellationRequested; i++) {
284        var q = MctsSymbolicRegressionStatic.MakeStep(state);
285        sumQ += q; // sum of qs in the last updateinterval iterations
286        curBestQ = Math.Max(q, curBestQ); // the best q in the last updateinterval iterations
287        bestQ = Math.Max(q, bestQ); // the best q overall
288        n++;
289        // iteration results
290        if (n == updateInterval) {
291          if (bestQ > bestQuality.Value) {
292            bestSolutionIteration.Value = i;
293            if (state.BestSolutionTrainingQuality > 0.99999) break;
294          }
295          bestQuality.Value = bestQ;
296          curQuality.Value = curBestQ;
297          avgQuality.Value = sumQ / n;
298          sumQ = 0.0;
299          curBestQ = 0.0;
300
301          funcEvals.Value = state.FuncEvaluations;
302          gradEvals.Value = state.GradEvaluations;
303          effRollouts.Value = state.EffectiveRollouts;
304          totalRollouts.Value = state.TotalRollouts;
305
306          if (collectPareto) {
307            paretoBestModelsResult.Value = new ItemList<ISymbolicRegressionSolution>(state.ParetoBestModels);
308          }
309
310          table.Rows["Best quality"].Values.Add(bestQuality.Value);
311          table.Rows["Current best quality"].Values.Add(curQuality.Value);
312          table.Rows["Average quality"].Values.Add(avgQuality.Value);
313          iterations.Value += n;
314          n = 0;
315        }
316      }
317
318      // final results (assumes that at least one iteration was calculated)
319      if (n > 0) {
320        if (bestQ > bestQuality.Value) {
321          bestSolutionIteration.Value = iterations.Value + n;
322        }
323        bestQuality.Value = bestQ;
324        curQuality.Value = curBestQ;
325        avgQuality.Value = sumQ / n;
326
327        funcEvals.Value = state.FuncEvaluations;
328        gradEvals.Value = state.GradEvaluations;
329        effRollouts.Value = state.EffectiveRollouts;
330        totalRollouts.Value = state.TotalRollouts;
331
332        table.Rows["Best quality"].Values.Add(bestQuality.Value);
333        table.Rows["Current best quality"].Values.Add(curQuality.Value);
334        table.Rows["Average quality"].Values.Add(avgQuality.Value);
335        iterations.Value = iterations.Value + n;
336
337      }
338
339
340      Results.Add(new Result("Best solution quality (train)", new DoubleValue(state.BestSolutionTrainingQuality)));
341      Results.Add(new Result("Best solution quality (test)", new DoubleValue(state.BestSolutionTestQuality)));
342
343
344      // produce solution
345      if (CreateSolution) {
346        var model = state.BestModel;
347
348        // otherwise we produce a regression solution
349        Results.Add(new Result("Solution", model.CreateRegressionSolution(problemData)));
350      }
351    }
352  }
353}
Note: See TracBrowser for help on using the repository browser.