Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs @ 8417

Last change on this file since 8417 was 8401, checked in by gkronber, 12 years ago

#1423 moved LM-BFGS implementation from data-analysis into the gradient descent algorithm plugin.

File size: 9.5 KB
Line 
1
2#region License Information
3/* HeuristicLab
4 * Copyright (C) 2002-2012 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Linq;
26using HeuristicLab.Algorithms.GradientDescent;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Data;
30using HeuristicLab.Operators;
31using HeuristicLab.Optimization;
32using HeuristicLab.Parameters;
33using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
34using HeuristicLab.PluginInfrastructure;
35using HeuristicLab.Problems.DataAnalysis;
36
37namespace HeuristicLab.Algorithms.DataAnalysis {
38  /// <summary>
39  ///Gaussian process regression data analysis algorithm.
40  /// </summary>
41  [Item("Gaussian Process Regression", "Gaussian process regression data analysis algorithm.")]
42  [Creatable("Data Analysis")]
43  [StorableClass]
44  public sealed class GaussianProcessRegression : EngineAlgorithm, IStorableContent {
45    public string Filename { get; set; }
46
47    public override Type ProblemType { get { return typeof(IRegressionProblem); } }
48    public new IRegressionProblem Problem {
49      get { return (IRegressionProblem)base.Problem; }
50      set { base.Problem = value; }
51    }
52
53    private const string MeanFunctionParameterName = "MeanFunction";
54    private const string CovarianceFunctionParameterName = "CovarianceFunction";
55    private const string MinimizationIterationsParameterName = "Iterations";
56    private const string ApproximateGradientsParameterName = "ApproximateGradients";
57
58    #region parameter properties
59    public IConstrainedValueParameter<IMeanFunction> MeanFunctionParameter {
60      get { return (IConstrainedValueParameter<IMeanFunction>)Parameters[MeanFunctionParameterName]; }
61    }
62    public IConstrainedValueParameter<ICovarianceFunction> CovarianceFunctionParameter {
63      get { return (IConstrainedValueParameter<ICovarianceFunction>)Parameters[CovarianceFunctionParameterName]; }
64    }
65    public IValueParameter<IntValue> MinimizationIterationsParameter {
66      get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }
67    }
68    #endregion
69    #region properties
70    public IMeanFunction MeanFunction {
71      set { MeanFunctionParameter.Value = value; }
72      get { return MeanFunctionParameter.Value; }
73    }
74    public ICovarianceFunction CovarianceFunction {
75      set { CovarianceFunctionParameter.Value = value; }
76      get { return CovarianceFunctionParameter.Value; }
77    }
78    public int MinimizationIterations {
79      set { MinimizationIterationsParameter.Value.Value = value; }
80      get { return MinimizationIterationsParameter.Value.Value; }
81    }
82    #endregion
83    [StorableConstructor]
84    private GaussianProcessRegression(bool deserializing) : base(deserializing) { }
85    private GaussianProcessRegression(GaussianProcessRegression original, Cloner cloner)
86      : base(original, cloner) {
87    }
88    public GaussianProcessRegression()
89      : base() {
90      this.name = ItemName;
91      this.description = ItemDescription;
92
93      Problem = new RegressionProblem();
94
95      List<IMeanFunction> meanFunctions = ApplicationManager.Manager.GetInstances<IMeanFunction>().ToList();
96      List<ICovarianceFunction> covFunctions = ApplicationManager.Manager.GetInstances<ICovarianceFunction>().ToList();
97
98      Parameters.Add(new ConstrainedValueParameter<IMeanFunction>(MeanFunctionParameterName, "The mean function to use.",
99        new ItemSet<IMeanFunction>(meanFunctions), meanFunctions.First()));
100      Parameters.Add(new ConstrainedValueParameter<ICovarianceFunction>(CovarianceFunctionParameterName, "The covariance function to use.",
101        new ItemSet<ICovarianceFunction>(covFunctions), covFunctions.First()));
102      Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with LM-BFGS.", new IntValue(20)));
103      Parameters.Add(new ValueParameter<BoolValue>(ApproximateGradientsParameterName, "Indicates that gradients should not be approximated (necessary for LM-BFGS).", new BoolValue(false)));
104      Parameters[ApproximateGradientsParameterName].Hidden = true; // should not be changed
105
106      var gpInitializer = new GaussianProcessHyperparameterInitializer();
107      var bfgsInitializer = new LbfgsInitializer();
108      var makeStep = new LbfgsMakeStep();
109      var branch = new ConditionalBranch();
110      var modelCreator = new GaussianProcessRegressionModelCreator();
111      var updateResults = new LbfgsUpdateResults();
112      var analyzer = new LbfgsAnalyzer();
113      var finalModelCreator = new GaussianProcessRegressionModelCreator();
114      var finalAnalyzer = new LbfgsAnalyzer();
115      var solutionCreator = new GaussianProcessRegressionSolutionCreator();
116
117      OperatorGraph.InitialOperator = gpInitializer;
118
119      gpInitializer.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
120      gpInitializer.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
121      gpInitializer.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
122      gpInitializer.HyperparameterParameter.ActualName = modelCreator.HyperparameterParameter.Name;
123      gpInitializer.Successor = bfgsInitializer;
124
125      bfgsInitializer.IterationsParameter.ActualName = MinimizationIterationsParameterName;
126      bfgsInitializer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
127      bfgsInitializer.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
128      bfgsInitializer.Successor = makeStep;
129
130      makeStep.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
131      makeStep.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
132      makeStep.Successor = branch;
133
134      branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
135      branch.FalseBranch = modelCreator;
136      branch.TrueBranch = finalModelCreator;
137
138      modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
139      modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
140      modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
141      modelCreator.Successor = updateResults;
142
143      updateResults.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
144      updateResults.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
145      updateResults.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
146      updateResults.ApproximateGradientsParameter.ActualName = ApproximateGradientsParameterName;
147      updateResults.Successor = analyzer;
148
149      analyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
150      analyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
151      analyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
152      analyzer.StateParameter.ActualName = bfgsInitializer.StateParameter.Name;
153      analyzer.PointsTableParameter.ActualName = "Hyperparameter table";
154      analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
155      analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table";
156      analyzer.Successor = makeStep;
157
158      finalModelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
159      finalModelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
160      finalModelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
161      finalModelCreator.HyperparameterParameter.ActualName = bfgsInitializer.PointParameter.ActualName;
162      finalModelCreator.Successor = finalAnalyzer;
163
164      finalAnalyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
165      finalAnalyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
166      finalAnalyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
167      finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName;
168      finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
169      finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName;
170      finalAnalyzer.Successor = solutionCreator;
171
172      solutionCreator.ModelParameter.ActualName = finalModelCreator.ModelParameter.Name;
173      solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
174    }
175
176    [StorableHook(HookType.AfterDeserialization)]
177    private void AfterDeserialization() { }
178
179    public override IDeepCloneable Clone(Cloner cloner) {
180      return new GaussianProcessRegression(this, cloner);
181    }
182  }
183}
Note: See TracBrowser for help on using the repository browser.