Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/MctsSymbolicRegression/MctsSymbolicRegressionAlgorithm.cs @ 13645

Last change on this file since 13645 was 13645, checked in by gkronber, 8 years ago

#2581: added an MCTS for symbolic regression models

File size: 14.2 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Linq;
25using System.Runtime.CompilerServices;
26using System.Threading;
27using HeuristicLab.Analysis;
28using HeuristicLab.Common;
29using HeuristicLab.Core;
30using HeuristicLab.Data;
31using HeuristicLab.Optimization;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.Problems.DataAnalysis;
35
36namespace HeuristicLab.Algorithms.DataAnalysis.MctsSymbolicRegression {
37  [Item("MCTS Symbolic Regression", "Monte carlo tree search for symbolic regression. Useful mainly as a base learner in gradient boosting.")]
38  [StorableClass]
39  [Creatable(CreatableAttribute.Categories.DataAnalysisRegression, Priority = 250)]
40  public class MctsSymbolicRegressionAlgorithm : BasicAlgorithm {
41    public override Type ProblemType {
42      get { return typeof(IRegressionProblem); }
43    }
44    public new IRegressionProblem Problem {
45      get { return (IRegressionProblem)base.Problem; }
46      set { base.Problem = value; }
47    }
48
49    #region ParameterNames
50    private const string IterationsParameterName = "Iterations";
51    private const string MaxVariablesParameterName = "Maximum variables";
52    private const string ScaleVariablesParameterName = "Scale variables";
53    private const string AllowedFactorsParameterName = "Allowed factors";
54    private const string ConstantOptimizationIterationsParameterName = "Iterations (constant optimization)";
55    private const string CParameterName = "C";
56    private const string SeedParameterName = "Seed";
57    private const string SetSeedRandomlyParameterName = "SetSeedRandomly";
58    private const string UpdateIntervalParameterName = "UpdateInterval";
59    private const string CreateSolutionParameterName = "CreateSolution";
60    private const string PunishmentFactorParameterName = "PunishmentFactor";
61
62    private const string VariableProductFactorName = "product(xi)";
63    private const string ExpFactorName = "exp(c * product(xi))";
64    private const string LogFactorName = "log(c + sum(c*product(xi))";
65    private const string InvFactorName = "1 / (1 + sum(c*product(xi))";
66    private const string FactorSumsName = "sum of multiple terms";
67    #endregion
68
69    #region ParameterProperties
70    public IFixedValueParameter<IntValue> IterationsParameter {
71      get { return (IFixedValueParameter<IntValue>)Parameters[IterationsParameterName]; }
72    }
73    public IFixedValueParameter<IntValue> MaxSizeParameter {
74      get { return (IFixedValueParameter<IntValue>)Parameters[MaxVariablesParameterName]; }
75    }
76    public IFixedValueParameter<BoolValue> ScaleVariablesParameter {
77      get { return (IFixedValueParameter<BoolValue>)Parameters[ScaleVariablesParameterName]; }
78    }
79    public IFixedValueParameter<IntValue> ConstantOptimizationIterationsParameter {
80      get { return (IFixedValueParameter<IntValue>)Parameters[ConstantOptimizationIterationsParameterName]; }
81    }
82    public IFixedValueParameter<DoubleValue> CParameter {
83      get { return (IFixedValueParameter<DoubleValue>)Parameters[CParameterName]; }
84    }
85    public IFixedValueParameter<DoubleValue> PunishmentFactorParameter {
86      get { return (IFixedValueParameter<DoubleValue>)Parameters[PunishmentFactorParameterName]; }
87    }
88    public IValueParameter<ICheckedItemList<StringValue>> AllowedFactorsParameter {
89      get { return (IValueParameter<ICheckedItemList<StringValue>>)Parameters[AllowedFactorsParameterName]; }
90    }
91    public IFixedValueParameter<IntValue> SeedParameter {
92      get { return (IFixedValueParameter<IntValue>)Parameters[SeedParameterName]; }
93    }
94    public FixedValueParameter<BoolValue> SetSeedRandomlyParameter {
95      get { return (FixedValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
96    }
97    public IFixedValueParameter<IntValue> UpdateIntervalParameter {
98      get { return (IFixedValueParameter<IntValue>)Parameters[UpdateIntervalParameterName]; }
99    }
100    public IFixedValueParameter<BoolValue> CreateSolutionParameter {
101      get { return (IFixedValueParameter<BoolValue>)Parameters[CreateSolutionParameterName]; }
102    }
103    #endregion
104
105    #region Properties
106    public int Iterations {
107      get { return IterationsParameter.Value.Value; }
108      set { IterationsParameter.Value.Value = value; }
109    }
110    public int Seed {
111      get { return SeedParameter.Value.Value; }
112      set { SeedParameter.Value.Value = value; }
113    }
114    public bool SetSeedRandomly {
115      get { return SetSeedRandomlyParameter.Value.Value; }
116      set { SetSeedRandomlyParameter.Value.Value = value; }
117    }
118    public int MaxSize {
119      get { return MaxSizeParameter.Value.Value; }
120      set { MaxSizeParameter.Value.Value = value; }
121    }
122    public double C {
123      get { return CParameter.Value.Value; }
124      set { CParameter.Value.Value = value; }
125    }
126
127    public double PunishmentFactor {
128      get { return PunishmentFactorParameter.Value.Value; }
129      set { PunishmentFactorParameter.Value.Value = value; }
130    }
131    public ICheckedItemList<StringValue> AllowedFactors {
132      get { return AllowedFactorsParameter.Value; }
133    }
134    public int ConstantOptimizationIterations {
135      get { return ConstantOptimizationIterationsParameter.Value.Value; }
136      set { ConstantOptimizationIterationsParameter.Value.Value = value; }
137    }
138    public bool ScaleVariables {
139      get { return ScaleVariablesParameter.Value.Value; }
140      set { ScaleVariablesParameter.Value.Value = value; }
141    }
142    public bool CreateSolution {
143      get { return CreateSolutionParameter.Value.Value; }
144      set { CreateSolutionParameter.Value.Value = value; }
145    }
146    #endregion
147
148    [StorableConstructor]
149    protected MctsSymbolicRegressionAlgorithm(bool deserializing) : base(deserializing) { }
150
151    protected MctsSymbolicRegressionAlgorithm(MctsSymbolicRegressionAlgorithm original, Cloner cloner)
152      : base(original, cloner) {
153    }
154
155    public override IDeepCloneable Clone(Cloner cloner) {
156      return new MctsSymbolicRegressionAlgorithm(this, cloner);
157    }
158
159    public MctsSymbolicRegressionAlgorithm() {
160      Problem = new RegressionProblem(); // default problem
161
162      var defaultFactorsList = new CheckedItemList<StringValue>(
163        new string[] { VariableProductFactorName, ExpFactorName, LogFactorName, InvFactorName, FactorSumsName }
164        .Select(s => new StringValue(s).AsReadOnly())
165        ).AsReadOnly();
166      defaultFactorsList.SetItemCheckedState(defaultFactorsList.First(s => s.Value == FactorSumsName), false);
167
168      Parameters.Add(new FixedValueParameter<IntValue>(IterationsParameterName,
169        "Number of iterations", new IntValue(100000)));
170      Parameters.Add(new FixedValueParameter<IntValue>(SeedParameterName,
171        "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
172      Parameters.Add(new FixedValueParameter<BoolValue>(SetSeedRandomlyParameterName,
173        "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
174      Parameters.Add(new FixedValueParameter<IntValue>(MaxVariablesParameterName,
175        "Maximal number of variables references in the symbolic regression models (multiple usages of the same variable are counted)", new IntValue(5)));
176      Parameters.Add(new FixedValueParameter<DoubleValue>(CParameterName,
177        "Balancing parameter in UCT formula (0 < c < 1000). Small values: greedy search. Large values: enumeration. Default: 1.0", new DoubleValue(1.0)));
178      Parameters.Add(new ValueParameter<ICheckedItemList<StringValue>>(AllowedFactorsParameterName,
179        "Choose which expressions are allowed as factors in the model.", defaultFactorsList));
180
181      Parameters.Add(new FixedValueParameter<IntValue>(ConstantOptimizationIterationsParameterName,
182        "Number of iterations for constant optimization. A small number of iterations should be sufficient for most models. " +
183        "Set to 0 to disable constants optimization.", new IntValue(10)));
184      Parameters.Add(new FixedValueParameter<BoolValue>(ScaleVariablesParameterName,
185        "Set to true to scale all input variables to the range [0..1]", new BoolValue(false)));
186      Parameters[ScaleVariablesParameterName].Hidden = true;
187      Parameters.Add(new FixedValueParameter<DoubleValue>(PunishmentFactorParameterName, "Estimations of models can be bounded. The estimation limits are calculated in the following way (lb = mean(y) - punishmentFactor*range(y), ub = mean(y) + punishmentFactor*range(y))", new DoubleValue(10)));
188      Parameters[PunishmentFactorParameterName].Hidden = true;
189      Parameters.Add(new FixedValueParameter<IntValue>(UpdateIntervalParameterName,
190        "Number of iterations until the results are updated", new IntValue(100)));
191      Parameters[UpdateIntervalParameterName].Hidden = true;
192      Parameters.Add(new FixedValueParameter<BoolValue>(CreateSolutionParameterName,
193        "Flag that indicates if a solution should be produced at the end of the run", new BoolValue(true)));
194      Parameters[CreateSolutionParameterName].Hidden = true;
195    }
196
197    [StorableHook(HookType.AfterDeserialization)]
198    private void AfterDeserialization() {
199    }
200
201    protected override void Run(CancellationToken cancellationToken) {
202      // Set up the algorithm
203      if (SetSeedRandomly) Seed = new System.Random().Next();
204
205      // Set up the results display
206      var iterations = new IntValue(0);
207      Results.Add(new Result("Iterations", iterations));
208
209      var table = new DataTable("Qualities");
210      table.Rows.Add(new DataRow("Best quality"));
211      table.Rows.Add(new DataRow("Current best quality"));
212      table.Rows.Add(new DataRow("Average quality"));
213      Results.Add(new Result("Qualities", table));
214
215      var bestQuality = new DoubleValue();
216      Results.Add(new Result("Best quality", bestQuality));
217
218      var curQuality = new DoubleValue();
219      Results.Add(new Result("Current best quality", curQuality));
220
221      var avgQuality = new DoubleValue();
222      Results.Add(new Result("Average quality", avgQuality));
223
224      // same as in SymbolicRegressionSingleObjectiveProblem
225      var y = Problem.ProblemData.Dataset.GetDoubleValues(Problem.ProblemData.TargetVariable,
226        Problem.ProblemData.TrainingIndices);
227      var avgY = y.Average();
228      var minY = y.Min();
229      var maxY = y.Max();
230      var range = maxY - minY;
231      var lowerLimit = avgY - PunishmentFactor * range;
232      var upperLimit = avgY + PunishmentFactor * range;
233
234      // init
235      var problemData = (IRegressionProblemData)Problem.ProblemData.Clone();
236      if (!AllowedFactors.CheckedItems.Any()) throw new ArgumentException("At least on type of factor must be allowed");
237      var state = MctsSymbolicRegressionStatic.CreateState(problemData, (uint)Seed, MaxSize, C, ScaleVariables, ConstantOptimizationIterations,
238        lowerLimit, upperLimit,
239        allowProdOfVars: AllowedFactors.CheckedItems.Any(s => s.Value.Value == VariableProductFactorName),
240        allowExp: AllowedFactors.CheckedItems.Any(s => s.Value.Value == ExpFactorName),
241        allowLog: AllowedFactors.CheckedItems.Any(s => s.Value.Value == LogFactorName),
242        allowInv: AllowedFactors.CheckedItems.Any(s => s.Value.Value == InvFactorName),
243        allowMultipleTerms: AllowedFactors.CheckedItems.Any(s => s.Value.Value == FactorSumsName)
244        );
245
246      var updateInterval = UpdateIntervalParameter.Value.Value;
247      double sumQ = 0.0;
248      double bestQ = 0.0;
249      double curBestQ = 0.0;
250      double q = 0.0;
251      int n = 0;
252      // Loop until iteration limit reached or canceled.
253      for (int i = 0; i < Iterations && !state.Done; i++) {
254        cancellationToken.ThrowIfCancellationRequested();
255
256        q = MctsSymbolicRegressionStatic.MakeStep(state);
257        sumQ += q; // sum of qs in the last updateinterval iterations
258        curBestQ = Math.Max(q, curBestQ); // the best q in the last updateinterval iterations
259        bestQ = Math.Max(q, bestQ); // the best q overall
260        n++;
261        // iteration results
262        if (n == updateInterval) {
263          bestQuality.Value = bestQ;
264          curQuality.Value = curBestQ;
265          avgQuality.Value = sumQ / n;
266          sumQ = 0.0;
267          curBestQ = 0.0;
268
269          table.Rows["Best quality"].Values.Add(bestQuality.Value);
270          table.Rows["Current best quality"].Values.Add(curQuality.Value);
271          table.Rows["Average quality"].Values.Add(avgQuality.Value);
272          iterations.Value += n;
273          n = 0;
274        }
275      }
276
277      // final results
278      if (n > 0) {
279        bestQuality.Value = bestQ;
280        curQuality.Value = curBestQ;
281        avgQuality.Value = sumQ / n;
282
283        table.Rows["Best quality"].Values.Add(bestQuality.Value);
284        table.Rows["Current best quality"].Values.Add(curQuality.Value);
285        table.Rows["Average quality"].Values.Add(avgQuality.Value);
286        iterations.Value = iterations.Value + n;
287      }
288
289
290      Results.Add(new Result("Best solution quality (train)", new DoubleValue(state.BestSolutionTrainingQuality)));
291      Results.Add(new Result("Best solution quality (test)", new DoubleValue(state.BestSolutionTestQuality)));
292
293      // produce solution
294      if (CreateSolution) {
295        var model = state.BestModel;
296
297        // otherwise we produce a regression solution
298        Results.Add(new Result("Solution", model.CreateRegressionSolution(problemData)));
299      }
300    }
301  }
302}
Note: See TracBrowser for help on using the repository browser.