Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessBase.cs @ 16167

Last change on this file since 16167 was 15583, checked in by swagner, 6 years ago

#2640: Updated year of copyrights in license headers

File size: 11.6 KB
Line 
1
2#region License Information
3/* HeuristicLab
4 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System.Linq;
24using HeuristicLab.Algorithms.GradientDescent;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Data;
28using HeuristicLab.Operators;
29using HeuristicLab.Optimization;
30using HeuristicLab.Parameters;
31using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
32using HeuristicLab.Problems.DataAnalysis;
33
34namespace HeuristicLab.Algorithms.DataAnalysis {
35  /// <summary>
36  /// Base class for Gaussian process data analysis algorithms (regression and classification).
37  /// </summary>
38  [StorableClass]
39  public abstract class GaussianProcessBase : EngineAlgorithm {
40    protected const string MeanFunctionParameterName = "MeanFunction";
41    protected const string CovarianceFunctionParameterName = "CovarianceFunction";
42    protected const string MinimizationIterationsParameterName = "Iterations";
43    protected const string ApproximateGradientsParameterName = "ApproximateGradients";
44    protected const string SeedParameterName = "Seed";
45    protected const string SetSeedRandomlyParameterName = "SetSeedRandomly";
46    protected const string ModelCreatorParameterName = "GaussianProcessModelCreator";
47    protected const string NegativeLogLikelihoodParameterName = "NegativeLogLikelihood";
48    protected const string HyperparameterParameterName = "Hyperparameter";
49    protected const string HyperparameterGradientsParameterName = "HyperparameterGradients";
50    protected const string SolutionCreatorParameterName = "GaussianProcessSolutionCreator";
51    protected const string ScaleInputValuesParameterName = "ScaleInputValues";
52
53    public new IDataAnalysisProblem Problem {
54      get { return (IDataAnalysisProblem)base.Problem; }
55      set { base.Problem = value; }
56    }
57
58    #region parameter properties
59    public IValueParameter<IMeanFunction> MeanFunctionParameter {
60      get { return (IValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
61    }
62    public IValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
63      get { return (IValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
64    }
65    public IValueParameter<IntValue> MinimizationIterationsParameter {
66      get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }
67    }
68    public IValueParameter<IntValue> SeedParameter {
69      get { return (IValueParameter<IntValue>)Parameters[SeedParameterName]; }
70    }
71    public IValueParameter<BoolValue> SetSeedRandomlyParameter {
72      get { return (IValueParameter<BoolValue>)Parameters[SetSeedRandomlyParameterName]; }
73    }
74    public IFixedValueParameter<BoolValue> ScaleInputValuesParameter {
75      get { return (IFixedValueParameter<BoolValue>)Parameters[ScaleInputValuesParameterName]; }
76    }
77    #endregion
78    #region properties
79    public IMeanFunction MeanFunction {
80      set { MeanFunctionParameter.Value = value; }
81      get { return MeanFunctionParameter.Value; }
82    }
83    public ICovarianceFunction CovarianceFunction {
84      set { CovarianceFunctionParameter.Value = value; }
85      get { return CovarianceFunctionParameter.Value; }
86    }
87    public int MinimizationIterations {
88      set { MinimizationIterationsParameter.Value.Value = value; }
89      get { return MinimizationIterationsParameter.Value.Value; }
90    }
91    public int Seed { get { return SeedParameter.Value.Value; } set { SeedParameter.Value.Value = value; } }
92    public bool SetSeedRandomly { get { return SetSeedRandomlyParameter.Value.Value; } set { SetSeedRandomlyParameter.Value.Value = value; } }
93
94    public bool ScaleInputValues {
95      get { return ScaleInputValuesParameter.Value.Value; }
96      set { ScaleInputValuesParameter.Value.Value = value; }
97    }
98    #endregion
99
100    [StorableConstructor]
101    protected GaussianProcessBase(bool deserializing) : base(deserializing) { }
102    protected GaussianProcessBase(GaussianProcessBase original, Cloner cloner)
103      : base(original, cloner) {
104    }
105    protected GaussianProcessBase(IDataAnalysisProblem problem)
106      : base() {
107      Problem = problem;
108      Parameters.Add(new ValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.", new MeanConst()));
109      Parameters.Add(new ValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.", new CovarianceSquaredExponentialIso()));
110      Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20)));
111      Parameters.Add(new ValueParameter<IntValue>(SeedParameterName, "The random seed used to initialize the new pseudo random number generator.", new IntValue(0)));
112      Parameters.Add(new ValueParameter<BoolValue>(SetSeedRandomlyParameterName, "True if the random seed should be set to a random value, otherwise false.", new BoolValue(true)));
113
114      Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false)));
115      Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed
116
117      Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName,
118        "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true)));
119      Parameters[ScaleInputValuesParameterName].Hidden = true;
120
121      // necessary for BFGS
122      Parameters.Add(new FixedValueParameter<BoolValue>("Maximization (BFGS)", new BoolValue(false)));
123      Parameters["Maximization (BFGS)"].Hidden = true;
124
125      var randomCreator = new HeuristicLab.Random.RandomCreator();
126      var gpInitializer = new GaussianProcessHyperparameterInitializer();
127      var bfgsInitializer = new LbfgsInitializer();
128      var makeStep = new LbfgsMakeStep();
129      var branch = new ConditionalBranch();
130      var modelCreator = new Placeholder();
131      var updateResults = new LbfgsUpdateResults();
132      var analyzer = new LbfgsAnalyzer();
133      var finalModelCreator = new Placeholder();
134      var finalAnalyzer = new LbfgsAnalyzer();
135      var solutionCreator = new Placeholder();
136
137      OperatorGraph.InitialOperator = randomCreator;
138      randomCreator.SeedParameter.ActualName = SeedParameterName;
139      randomCreator.SeedParameter.Value = null;
140      randomCreator.SetSeedRandomlyParameter.ActualName = SetSeedRandomlyParameterName;
141      randomCreator.SetSeedRandomlyParameter.Value = null;
142      randomCreator.Successor = gpInitializer;
143
144      gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
145      gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
146      gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
147      gpInitializer.HyperparameterParameter.ActualName = HyperparameterParameterName;
148      gpInitializer.RandomParameter.ActualName = randomCreator.RandomParameter.Name;
149      gpInitializer.Successor = bfgsInitializer;
150
151      bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName;
152      bfgsInitializer.PointParameter.ActualName = HyperparameterParameterName;
153      bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
154      bfgsInitializer.Successor = makeStep;
155
156      makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
157      makeStep.PointParameter.ActualName = HyperparameterParameterName;
158      makeStep.Successor = branch;
159
160      branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
161      branch.FalseBranch = modelCreator;
162      branch.TrueBranch = finalModelCreator;
163
164      modelCreator.OperatorParameter.ActualName = ModelCreatorParameterName;
165      modelCreator.Successor = updateResults;
166
167      updateResults.MaximizationParameter.ActualName = "Maximization (BFGS)";
168      updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
169      updateResults.QualityParameter.ActualName = NegativeLogLikelihoodParameterName;
170      updateResults.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName;
171      updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
172      updateResults.Successor = analyzer;
173
174      analyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName;
175      analyzer.PointParameter.ActualName = HyperparameterParameterName;
176      analyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName;
177      analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
178      analyzer.PointsTableParameter.ActualName = "Hyperparameter table";
179      analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
180      analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table";
181      analyzer.Successor = makeStep;
182
183      finalModelCreator.OperatorParameter.ActualName = ModelCreatorParameterName;
184      finalModelCreator.Successor = finalAnalyzer;
185
186      finalAnalyzer.QualityParameter.ActualName = NegativeLogLikelihoodParameterName;
187      finalAnalyzer.PointParameter.ActualName = HyperparameterParameterName;
188      finalAnalyzer.QualityGradientsParameter.ActualName = HyperparameterGradientsParameterName;
189      finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName;
190      finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
191      finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName;
192      finalAnalyzer.Successor = solutionCreator;
193
194      solutionCreator.OperatorParameter.ActualName = SolutionCreatorParameterName;
195    }
196
197    [StorableHook(HookType.AfterDeserialization)]
198    private void AfterDeserialization() {
199      // BackwardsCompatibility3.4
200      #region Backwards compatible code, remove with 3.5
201      if (Parameters.ContainsKey("Maximization")) {
202        Parameters.Remove("Maximization");
203      }
204
205      if (!Parameters.ContainsKey("Maximization (BFGS)")) {
206        Parameters.Add(new FixedValueParameter<BoolValue>("Maximization (BFGS)", new BoolValue(false)));
207        Parameters["Maximization (BFGS)"].Hidden = true;
208        OperatorGraph.Operators.OfType<LbfgsUpdateResults>().First().MaximizationParameter.ActualName = "Maximization BFGS";
209      }
210
211      if (!Parameters.ContainsKey(ScaleInputValuesParameterName)) {
212        Parameters.Add(new FixedValueParameter<BoolValue>(ScaleInputValuesParameterName,
213          "Determines if the input variable values are scaled to the range [0..1] for training.", new BoolValue(true)));
214        Parameters[ScaleInputValuesParameterName].Hidden = true;
215      }
216      #endregion
217    }
218  }
219}
Note: See TracBrowser for help on using the repository browser.