source: branches/RBFRegression/HeuristicLab.Algorithms.DataAnalysis/3.4/MctsSymbolicRegression/MctsSymbolicRegressionAlgorithm.cs @ 14385

Last change on this file since 14385 was 14185, checked in by swagner, 3 years ago

#2526: Updated year of copyrights in license headers

File size: 15.7 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Linq;
24using System.Runtime.CompilerServices;
25using System.Threading;
26using HeuristicLab.Algorithms.DataAnalysis.MctsSymbolicRegression.Policies;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Optimization;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.Problems.DataAnalysis;
35
36namespace HeuristicLab.Algorithms.DataAnalysis.MctsSymbolicRegression {
37  [Item("MCTS Symbolic Regression", "Monte carlo tree search for symbolic regression. Useful mainly as a base learner in gradient boosting.")]
38  [StorableClass]
39  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 250)]
40  public class MctsSymbolicRegressionAlgorithm : BasicAlgorithm {
41    public override Type ProblemType {
42      get { return typeof(IRegressionProblem); }
43    }
44    public new IRegressionProblem Problem {
45      get { return (IRegressionProblem)base.Problem; }
46      set { base.Problem = value; }
47    }
48
49    #region ParameterNames
50    private const string IterationsParameterName = "Iterations";
51    private const string MaxVariablesParameterName = "Maximum variables";
52    private const string ScaleVariablesParameterName = "Scale variables";
53    private const string AllowedFactorsParameterName = "Allowed factors";
54    private const string ConstantOptimizationIterationsParameterName = "Iterations (constant optimization)";
55    private const string PolicyParameterName = "Policy";
56    private const string SeedParameterName = "Seed";
57    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
58    private const string UpdateIntervalParameterName = "UpdateInterval";
59    private const string CreateSolutionParameterName = "CreateSolution";
60    private const string PunishmentFactorParameterName = "PunishmentFactor";
61
62    private const string VariableProductFactorName = "product(xi)";
63    private const string ExpFactorName = "exp(c * product(xi))";
64    private const string LogFactorName = "log(c + sum(c*product(xi))";
65    private const string InvFactorName = "1 / (1 + sum(c*product(xi))";
66    private const string FactorSumsName = "sum of multiple terms";
67    #endregion
68
69    #region ParameterProperties
70    public IFixedValueParameter<IntValue> IterationsParameter {
71      get { return (IFixedValueParameter<IntValue>)Parameters[IterationsParameterName]; }
72    }
73    public IFixedValueParameter<IntValue> MaxVariableReferencesParameter {
74      get { return (IFixedValueParameter<IntValue>)Parameters[MaxVariablesParameterName]; }
75    }
76    public IFixedValueParameter<BoolValue> ScaleVariablesParameter {
77      get { return (IFixedValueParameter<BoolValue>)Parameters[ScaleVariablesParameterName]; }
78    }
79    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
80      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
81    }
82    public IValueParameter<IPolicy> PolicyParameter {
83      get { return (IValueParameter<IPolicy>)Parameters[PolicyParameterName]; }
84    }
85    public IFixedValueParameter<DoubleValue> PunishmentFactorParameter {
86      get { return (IFixedValueParameter<DoubleValue>)Parameters[PunishmentFactorParameterName]; }
87    }
88    public IValueParameter<ICheckedItemList<StringValue>> AllowedFactorsParameter {
89      get { return (IValueParameter<ICheckedItemList<StringValue>>)Parameters[AllowedFactorsParameterName]; }
90    }
91    public IFixedValueParameter<IntValue> SeedParameter {
92      get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; }
93    }
94    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
95      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
96    }
97    public IFixedValueParameter<IntValue> UpdateIntervalParameter {
98      get { return (IFixedValueParameter<IntValue>)Parameters[UpdateIntervalParameterName]; }
99    }
100    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
101      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
102    }
103    #endregion
104
105    #region Properties
106    public int Iterations {
107      get { return IterationsParameter.Value.Value; }
108      set { IterationsParameter.Value.Value = value; }
109    }
110    public int Seed {
111      get { return SeedParameter.Value.Value; }
112      set { SeedParameter.Value.Value = value; }
113    }
114    public bool SetSeedRandomly {
115      get { return SetSeedRandomlyParameter.Value.Value; }
116      set { SetSeedRandomlyParameter.Value.Value = value; }
117    }
118    public int MaxVariableReferences {
119      get { return MaxVariableReferencesParameter.Value.Value; }
120      set { MaxVariableReferencesParameter.Value.Value = value; }
121    }
122    public IPolicy Policy {
123      get { return PolicyParameter.Value; }
124      set { PolicyParameter.Value = value; }
125    }
126    public double PunishmentFactor {
127      get { return PunishmentFactorParameter.Value.Value; }
128      set { PunishmentFactorParameter.Value.Value = value; }
129    }
130    public ICheckedItemList<StringValue> AllowedFactors {
131      get { return AllowedFactorsParameter.Value; }
132    }
133    public int ConstantOptimizationIterations {
134      get { return ConstantOptimizationIterationsParameter.Value.Value; }
135      set { ConstantOptimizationIterationsParameter.Value.Value = value; }
136    }
137    public bool ScaleVariables {
138      get { return ScaleVariablesParameter.Value.Value; }
139      set { ScaleVariablesParameter.Value.Value = value; }
140    }
141    public bool CreateSolution {
142      get { return CreateSolutionParameter.Value.Value; }
143      set { CreateSolutionParameter.Value.Value = value; }
144    }
145    #endregion
146
147    [StorableConstructor]
148    protected MctsSymbolicRegressionAlgorithm(bool deserializing) : base(deserializing) { }
149
150    protected MctsSymbolicRegressionAlgorithm(MctsSymbolicRegressionAlgorithm original, Cloner cloner)
151      : base(original, cloner) {
152    }
153
154    public override IDeepCloneable Clone(Cloner cloner) {
155      return new MctsSymbolicRegressionAlgorithm(this, cloner);
156    }
157
158    public MctsSymbolicRegressionAlgorithm() {
159      Problem = new RegressionProblem(); // default problem
160
161      var defaultFactorsList = new CheckedItemList<StringValue>(
162        new string[] { VariableProductFactorName, ExpFactorName, LogFactorName, InvFactorName, FactorSumsName }
163        .Select(s => new StringValue(s).AsReadOnly())
164        ).AsReadOnly();
165      defaultFactorsList.SetItemCheckedState(defaultFactorsList.First(s => s.Value == FactorSumsName), false);
166
167      Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName,
168        "Number of iterations", new IntValue(100000)));
169      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName,
170        "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
171      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName,
172        "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
173      Parameters.Add(new FixedValueParameter<IntValue>(MaxVariablesParameterName,
174        "Maximal number of variables references in the symbolic regression models (multiple usages of the same variable are counted)", new IntValue(5)));
175      // Parameters.Add(new FixedValueParameter<DoubleValue>(CParameterName,
176      //   "Balancing parameter in UCT formula (0 < c < 1000). Small values: greedy search. Large values: enumeration. Default: 1.0", new DoubleValue(1.0)));
177      Parameters.Add(new ValueParameter<IPolicy>(PolicyParameterName,
178        "The policy to use for selecting nodes in MCTS (e.g. Ucb)", new Ucb()));
179      PolicyParameter.Hidden = true;
180      Parameters.Add(new ValueParameter<ICheckedItemList<StringValue>>(AllowedFactorsParameterName,
181        "Choose which expressions are allowed as factors in the model.", defaultFactorsList));
182
183      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName,
184        "Number of iterations for constant optimization. A small number of iterations should be sufficient for most models. " +
185        "Set to 0 to disable constants optimization.", new IntValue(10)));
186      Parameters.Add(new FixedValueParameter<BoolValue>(ScaleVariablesParameterName,
187        "Set to true to scale all input variables to the range [0..1]", new BoolValue(false)));
188      Parameters[ScaleVariablesParameterName].Hidden = true;
189      Parameters.Add(new FixedValueParameter<DoubleValue>(PunishmentFactorParameterName, "Estimations of models can be bounded. The estimation limits are calculated in the following way (lb = mean(y) - punishmentFactor*range(y), ub = mean(y) + punishmentFactor*range(y))", new DoubleValue(10)));
190      Parameters[PunishmentFactorParameterName].Hidden = true;
191      Parameters.Add(new FixedValueParameter<IntValue>(UpdateIntervalParameterName,
192        "Number of iterations until the results are updated", new IntValue(100)));
193      Parameters[UpdateIntervalParameterName].Hidden = true;
194      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName,
195        "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
196      Parameters[CreateSolutionParameterName].Hidden = true;
197    }
198
199    [StorableHook(HookType.AfterDeserialization)]
200    private void AfterDeserialization() {
201    }
202
203    protected override void Run(CancellationToken cancellationToken) {
204      // Set up the algorithm
205      if (SetSeedRandomly) Seed = new System.Random().Next();
206
207      // Set up the results display
208      var iterations = new IntValue(0);
209      Results.Add(new Result("Iterations", iterations));
210
211      var bestSolutionIteration = new IntValue(0);
212      Results.Add(new Result("Best solution iteration", bestSolutionIteration));
213
214      var table = new DataTable("Qualities");
215      table.Rows.Add(new DataRow("Best quality"));
216      table.Rows.Add(new DataRow("Current best quality"));
217      table.Rows.Add(new DataRow("Average quality"));
218      Results.Add(new Result("Qualities", table));
219
220      var bestQuality = new DoubleValue();
221      Results.Add(new Result("Best quality", bestQuality));
222
223      var curQuality = new DoubleValue();
224      Results.Add(new Result("Current best quality", curQuality));
225
226      var avgQuality = new DoubleValue();
227      Results.Add(new Result("Average quality", avgQuality));
228
229      var totalRollouts = new IntValue();
230      Results.Add(new Result("Total rollouts", totalRollouts));
231      var effRollouts = new IntValue();
232      Results.Add(new Result("Effective rollouts", effRollouts));
233      var funcEvals = new IntValue();
234      Results.Add(new Result("Function evaluations", funcEvals));
235      var gradEvals = new IntValue();
236      Results.Add(new Result("Gradient evaluations", gradEvals));
237
238
239      // same as in SymbolicRegressionSingleObjectiveProblem
240      var y = Problem.ProblemData.Dataset.GetDoubleValues(Problem.ProblemData.TargetVariable,
241        Problem.ProblemData.TrainingIndices);
242      var avgY = y.Average();
243      var minY = y.Min();
244      var maxY = y.Max();
245      var range = maxY - minY;
246      var lowerLimit = avgY - PunishmentFactor * range;
247      var upperLimit = avgY + PunishmentFactor * range;
248
249      // init
250      var problemData = (IRegressionProblemData)Problem.ProblemData.Clone();
251      if (!AllowedFactors.CheckedItems.Any()) throw new ArgumentException("At least on type of factor must be allowed");
252      var state = MctsSymbolicRegressionStatic.CreateState(problemData, (uint)Seed, MaxVariableReferences, ScaleVariables, ConstantOptimizationIterations,
253        Policy,
254        lowerLimit, upperLimit,
255        allowProdOfVars: AllowedFactors.CheckedItems.Any(s => s.Value.Value == VariableProductFactorName),
256        allowExp: AllowedFactors.CheckedItems.Any(s => s.Value.Value == ExpFactorName),
257        allowLog: AllowedFactors.CheckedItems.Any(s => s.Value.Value == LogFactorName),
258        allowInv: AllowedFactors.CheckedItems.Any(s => s.Value.Value == InvFactorName),
259        allowMultipleTerms: AllowedFactors.CheckedItems.Any(s => s.Value.Value == FactorSumsName)
260        );
261
262      var updateInterval = UpdateIntervalParameter.Value.Value;
263      double sumQ = 0.0;
264      double bestQ = 0.0;
265      double curBestQ = 0.0;
266      int n = 0;
267      // Loop until iteration limit reached or canceled.
268      for (int i = 0; i < Iterations && !state.Done; i++) {
269        cancellationToken.ThrowIfCancellationRequested();
270
271        var q = MctsSymbolicRegressionStatic.MakeStep(state);
272        sumQ += q; // sum of qs in the last updateinterval iterations
273        curBestQ = Math.Max(q, curBestQ); // the best q in the last updateinterval iterations
274        bestQ = Math.Max(q, bestQ); // the best q overall
275        n++;
276        // iteration results
277        if (n == updateInterval) {
278          if (bestQ > bestQuality.Value) {
279            bestSolutionIteration.Value = i;
280          }
281          bestQuality.Value = bestQ;
282          curQuality.Value = curBestQ;
283          avgQuality.Value = sumQ / n;
284          sumQ = 0.0;
285          curBestQ = 0.0;
286
287          funcEvals.Value = state.FuncEvaluations;
288          gradEvals.Value = state.GradEvaluations;
289          effRollouts.Value = state.EffectiveRollouts;
290          totalRollouts.Value = state.TotalRollouts;
291
292          table.Rows["Best quality"].Values.Add(bestQuality.Value);
293          table.Rows["Current best quality"].Values.Add(curQuality.Value);
294          table.Rows["Average quality"].Values.Add(avgQuality.Value);
295          iterations.Value += n;
296          n = 0;
297        }
298      }
299
300      // final results
301      if (n > 0) {
302        if (bestQ > bestQuality.Value) {
303          bestSolutionIteration.Value = iterations.Value + n;
304        }
305        bestQuality.Value = bestQ;
306        curQuality.Value = curBestQ;
307        avgQuality.Value = sumQ / n;
308
309        funcEvals.Value = state.FuncEvaluations;
310        gradEvals.Value = state.GradEvaluations;
311        effRollouts.Value = state.EffectiveRollouts;
312        totalRollouts.Value = state.TotalRollouts;
313
314        table.Rows["Best quality"].Values.Add(bestQuality.Value);
315        table.Rows["Current best quality"].Values.Add(curQuality.Value);
316        table.Rows["Average quality"].Values.Add(avgQuality.Value);
317        iterations.Value = iterations.Value + n;
318
319      }
320
321
322      Results.Add(new Result("Best solution quality (train)", new DoubleValue(state.BestSolutionTrainingQuality)));
323      Results.Add(new Result("Best solution quality (test)", new DoubleValue(state.BestSolutionTestQuality)));
324
325
326      // produce solution
327      if (CreateSolution) {
328        var model = state.BestModel;
329
330        // otherwise we produce a regression solution
331        Results.Add(new Result("Solution", model.CreateRegressionSolution(problemData)));
332      }
333    }
334  }
335}
Note: See TracBrowser for help on using the repository browser.