Free cookie consent management tool by TermsFeed Policy Generator

Changeset 8375 for trunk/sources


Ignore:
Timestamp:
08/01/12 13:46:17 (12 years ago)
Author:
gkronber
Message:

#1902 implemented Gaussian process regression operators and analyzers

Location:
trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4
Files:
2 added
9 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/BFGSInitializer.cs

    r8371 r8375  
    3535  [Item(Name = "BFGSInitializer", Description = "Initializes the necessary data structures for the BFGS algorithm.")]
    3636  public sealed class BFGSInitializer : SingleSuccessorOperator {
    37     private const string NumberOfHyperparameterParameterName = "NumberOfHyperparameter";
    38     private const string HyperparameterParameterName = "Hyperparameter";
     37    private const string DimensionParameterName = "Dimension";
     38    private const string PointParameterName = "Point";
    3939    private const string BFGSStateParameterName = "BFGSState";
    4040    private const string IterationsParameterName = "Iterations";
     
    4242    #region Parameter Properties
    4343    // in
    44     public ILookupParameter<IntValue> NumberOfHyperparameterParameter {
    45       get { return (ILookupParameter<IntValue>)Parameters[NumberOfHyperparameterParameterName]; }
     44    public ILookupParameter<IntValue> DimensionParameter {
     45      get { return (ILookupParameter<IntValue>)Parameters[DimensionParameterName]; }
    4646    }
    4747    public ILookupParameter<IntValue> IterationsParameter {
     
    4949    }
    5050    // out
    51     public ILookupParameter<DoubleArray> HyperparameterParameter {
    52       get { return (ILookupParameter<DoubleArray>)Parameters[HyperparameterParameterName]; }
     51    public ILookupParameter<DoubleArray> PointParameter {
     52      get { return (ILookupParameter<DoubleArray>)Parameters[PointParameterName]; }
    5353    }
    5454    public ILookupParameter<BFGSState> BFGSStateParameter {
     
    6060
    6161    #region Properties
    62     public IntValue NumberOfHyperparameter { get { return NumberOfHyperparameterParameter.ActualValue; } }
    63     public IntValue Iterations { get { return IterationsParameter.ActualValue; } }
     62    private IntValue Dimension { get { return DimensionParameter.ActualValue; } }
     63    private IntValue Iterations { get { return IterationsParameter.ActualValue; } }
    6464    #endregion
    6565
     
    7070      : base() {
    7171      // in
    72       Parameters.Add(new LookupParameter<IntValue>(NumberOfHyperparameterParameterName, "The number of parameters to optimize."));
     72      Parameters.Add(new LookupParameter<IntValue>(DimensionParameterName, "The length of the vector to optimize."));
    7373      Parameters.Add(new LookupParameter<IntValue>(IterationsParameterName, "The maximal number of iterations for the BFGS algorithm."));
    7474      // out
    75       Parameters.Add(new LookupParameter<DoubleArray>(HyperparameterParameterName, "The hyperparameters for the Gaussian process model."));
     75      Parameters.Add(new LookupParameter<DoubleArray>(PointParameterName, "The initial point for the BFGS algorithm."));
    7676      Parameters.Add(new LookupParameter<BFGSState>(BFGSStateParameterName, "The state of the BFGS algorithm."));
    7777    }
     
    8282
    8383    public override IOperation Apply() {
    84       int n = NumberOfHyperparameter.Value;
    85       double[] initialHyp = Enumerable.Repeat(0.0, n).ToArray();
     84      int n = Dimension.Value;
     85      double[] initialPoint = Enumerable.Repeat(0.0, n).ToArray();
    8686      alglib.minlbfgs.minlbfgsstate state = new alglib.minlbfgs.minlbfgsstate();
    87       alglib.minlbfgs.minlbfgscreate(n, Math.Min(n, 5), initialHyp, state);
     87      alglib.minlbfgs.minlbfgscreate(n, Math.Min(n, 7), initialPoint, state);
    8888      alglib.minlbfgs.minlbfgssetcond(state, 0, 0, 0, Iterations.Value);
     89      alglib.minlbfgs.minlbfgssetxrep(state, true);
    8990
    90       HyperparameterParameter.ActualValue = new DoubleArray(initialHyp);
     91      PointParameter.ActualValue = new DoubleArray(initialPoint);
    9192      BFGSStateParameter.ActualValue = new BFGSState(state);
    9293      return base.Apply();
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/BFGSMakeStep.cs

    r8371 r8375  
    3535  public sealed class BFGSMakeStep : SingleSuccessorOperator {
    3636    private const string TerminationCriterionParameterName = "TerminationCriterion";
    37     private const string HyperparameterParameterName = "Hyperparameter";
     37    private const string PointParameterName = "Point";
    3838    private const string BFGSStateParameterName = "BFGSState";
    3939
     
    4545      get { return (ILookupParameter<BoolValue>)Parameters[TerminationCriterionParameterName]; }
    4646    }
    47     public ILookupParameter<DoubleArray> HyperparameterParameter {
    48       get { return (ILookupParameter<DoubleArray>)Parameters[HyperparameterParameterName]; }
     47    public ILookupParameter<DoubleArray> PointParameter {
     48      get { return (ILookupParameter<DoubleArray>)Parameters[PointParameterName]; }
    4949    }
    5050    #endregion
     
    5252
    5353    #region Properties
    54     public BFGSState BFGSState { get { return BFGSStateParameter.ActualValue; } }
     54    private BFGSState BFGSState { get { return BFGSStateParameter.ActualValue; } }
    5555    #endregion
    5656
     
    6464      // out
    6565      Parameters.Add(new LookupParameter<BoolValue>(TerminationCriterionParameterName, "The termination criterion indicating that the BFGS optimization algorithm should stop."));
    66       Parameters.Add(new LookupParameter<DoubleArray>(HyperparameterParameterName, "The parameters of the function to optimize."));
     66      Parameters.Add(new LookupParameter<DoubleArray>(PointParameterName, "The next point that should be evaluated in the BFGS algorithm."));
    6767    }
    6868
     
    7373    public override IOperation Apply() {
    7474      var state = BFGSState;
    75       bool stop = alglib.minlbfgs.minlbfgsiteration(state.State);
    76       TerminationCriterionParameter.ActualValue = new BoolValue(stop);
    77       if (!stop) {
    78         HyperparameterParameter.ActualValue = new DoubleArray(state.State.x);
     75      bool @continue = alglib.minlbfgs.minlbfgsiteration(state.State);
     76      TerminationCriterionParameter.ActualValue = new BoolValue(!@continue);
     77      if (@continue) {
     78        PointParameter.ActualValue = new DoubleArray(state.State.x);
    7979      } else {
    8080        double[] x = new double[state.State.x.Length];
    8181        alglib.minlbfgs.minlbfgsreport rep = new alglib.minlbfgs.minlbfgsreport();
    8282        alglib.minlbfgs.minlbfgsresults(state.State, ref x, rep);
    83         HyperparameterParameter.ActualValue = new DoubleArray(x);
     83        PointParameter.ActualValue = new DoubleArray(x);
    8484      }
    8585      return base.Apply();
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/BFGSUpdateResults.cs

    r8371 r8375  
    3434  [Item(Name = "BFGSUpdateResults", Description = "Sets the results (function value and gradients) for the next optimization step in the BFGS algorithm.")]
    3535  public sealed class BFGSUpdateResults : SingleSuccessorOperator {
    36     private const string HyperparameterGradientsParameterName = "HyperparameterGradients";
    37     private const string FunctionValueParameterName = "NegativeLogLikelihood";
     36    private const string QualityGradientsParameterName = "QualityGradients";
     37    private const string QualityParameterName = "Quality";
    3838    private const string BFGSStateParameterName = "BFGSState";
    3939
    4040    #region Parameter Properties
    41     public ILookupParameter<DoubleArray> HyperparameterGradientsParameter {
    42       get { return (ILookupParameter<DoubleArray>)Parameters[HyperparameterGradientsParameterName]; }
     41    public ILookupParameter<DoubleArray> QualityGradientsParameter {
     42      get { return (ILookupParameter<DoubleArray>)Parameters[QualityGradientsParameterName]; }
    4343    }
    44     public ILookupParameter<DoubleValue> FunctionValueParameter {
    45       get { return (ILookupParameter<DoubleValue>)Parameters[FunctionValueParameterName]; }
     44    public ILookupParameter<DoubleValue> QualityParameter {
     45      get { return (ILookupParameter<DoubleValue>)Parameters[QualityParameterName]; }
    4646    }
    4747    public ILookupParameter<BFGSState> BFGSStateParameter {
     
    5151
    5252    #region Properties
    53     public DoubleArray HyperparameterGradients { get { return HyperparameterGradientsParameter.ActualValue; } }
    54     public DoubleValue FunctionValue { get { return FunctionValueParameter.ActualValue; } }
    55     public BFGSState BFGSState { get { return BFGSStateParameter.ActualValue; } }
     53    private DoubleArray QualityGradients { get { return QualityGradientsParameter.ActualValue; } }
     54    private DoubleValue Quality { get { return QualityParameter.ActualValue; } }
     55    private BFGSState BFGSState { get { return BFGSStateParameter.ActualValue; } }
    5656    #endregion
    5757
     
    6262      : base() {
    6363      // in
    64       Parameters.Add(new LookupParameter<DoubleArray>(HyperparameterGradientsParameterName, "The function gradients for the parameters of the function to optimize."));
    65       Parameters.Add(new LookupParameter<DoubleValue>(FunctionValueParameterName, "The value of the function to optimize."));
     64      Parameters.Add(new LookupParameter<DoubleArray>(QualityGradientsParameterName, "The gradients at the evaluated point of the function to optimize."));
     65      Parameters.Add(new LookupParameter<DoubleValue>(QualityParameterName, "The value at the evaluated point of the function to optimize."));
    6666      // in & out
    6767      Parameters.Add(new LookupParameter<BFGSState>(BFGSStateParameterName, "The state of the BFGS algorithm."));
     
    7474    public override IOperation Apply() {
    7575      var state = BFGSState;
    76       var f = FunctionValue.Value;
    77       var g = HyperparameterGradients.ToArray();
     76      var f = Quality.Value;
     77      var g = QualityGradients.ToArray();
    7878      state.State.f = f;
    7979      state.State.g = g;
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModelCreator.cs

    r8371 r8375  
    6565
    6666    #region Properties
    67     public DoubleArray Hyperparameter { get { return HyperparameterParameter.ActualValue; } }
    68     public IMeanFunction MeanFunction { get { return MeanFunctionParameter.ActualValue; } }
    69     public ICovarianceFunction CovarianceFunction { get { return CovarianceFunctionParameter.ActualValue; } }
     67    protected DoubleArray Hyperparameter { get { return HyperparameterParameter.ActualValue; } }
     68    protected IMeanFunction MeanFunction { get { return MeanFunctionParameter.ActualValue; } }
     69    protected ICovarianceFunction CovarianceFunction { get { return CovarianceFunctionParameter.ActualValue; } }
    7070    #endregion
    7171
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegression.cs

    r8371 r8375  
    5555    private const string CovarianceFunctionParameterName = "CovarianceFunction";
    5656    private const string MinimizationIterationsParameterName = "Iterations";
    57     //private const string NegativeLogLikelihoodTableParameterName = "NegativeLogLikelihoodTable";
    58     //private const string HyperParametersTableParameterName = "HyperParametersTable";
    5957
    6058    #region parameter properties
     
    6866      get { return (IValueParameter<IntValue>)Parameters[MinimizationIterationsParameterName]; }
    6967    }
    70     //public ILookupParameter<DataTable> NegativeLogLikelihoodTableParameter {
    71     //  get { return (ILookupParameter<DataTable>)Parameters[NegativeLogLikelihoodTableParameterName]; }
    72     //}
    73     //public ILookupParameter<DataTable> HyperParametersTableParameter {
    74     //  get { return (ILookupParameter<DataTable>)Parameters[HyperParametersTableParameterName]; }
    75     //}
    7668    #endregion
    7769    #region properties
     
    10698        new ItemSet<ICovarianceFunction>(covFunctions), covFunctions.First()));
    10799      Parameters.Add(new ValueParameter<IntValue>(MinimizationIterationsParameterName, "The number of iterations for likelihood optimization with BFGS.", new IntValue(20)));
    108       //Parameters.Add(new LookupParameter<DataTable>(NegativeLogLikelihoodTableParameterName, "The negative log likelihood values over the whole run."));
    109       //Parameters.Add(new LookupParameter<DataTable>(HyperParametersTableParameterName, "The values of the hyper-parameters over the whole run."));
    110100
    111101      var setParameterLength = new GaussianProcessSetHyperparameterLength();
     
    115105      var modelCreator = new GaussianProcessRegressionModelCreator();
    116106      var updateResults = new BFGSUpdateResults();
     107      var analyzer = new BFGSAnalyzer();
     108      var finalModelCreator = new GaussianProcessRegressionModelCreator();
     109      var finalAnalyzer = new BFGSAnalyzer();
     110      var solutionCreator = new GaussianProcessRegressionSolutionCreator();
    117111
    118112      OperatorGraph.InitialOperator = setParameterLength;
     
    124118
    125119      initializer.IterationsParameter.ActualName = MinimizationIterationsParameterName;
    126       initializer.NumberOfHyperparameterParameter.ActualName = setParameterLength.NumberOfHyperparameterParameter.Name;
     120      initializer.DimensionParameter.ActualName = setParameterLength.NumberOfHyperparameterParameter.Name;
     121      initializer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    127122      initializer.Successor = makeStep;
    128123
    129124      makeStep.BFGSStateParameter.ActualName = initializer.BFGSStateParameter.Name;
    130       makeStep.HyperparameterParameter.ActualName = initializer.NumberOfHyperparameterParameter.Name;
     125      makeStep.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
    131126      makeStep.Successor = branch;
    132127
    133128      branch.ConditionParameter.ActualName = makeStep.TerminationCriterionParameter.Name;
    134129      branch.FalseBranch = modelCreator;
     130      branch.TrueBranch = finalModelCreator;
    135131
    136132      modelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    137133      modelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
    138134      modelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
    139       modelCreator.HyperparameterParameter.ActualName = initializer.HyperparameterParameter.Name;
    140135      modelCreator.Successor = updateResults;
    141136
    142137      updateResults.BFGSStateParameter.ActualName = initializer.BFGSStateParameter.Name;
    143       updateResults.FunctionValueParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
    144       updateResults.HyperparameterGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
    145       updateResults.Successor = makeStep;
     138      updateResults.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
     139      updateResults.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
     140      updateResults.Successor = analyzer;
    146141
     142      analyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
     143      analyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
     144      analyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
     145      analyzer.BFGSStateParameter.ActualName = initializer.BFGSStateParameter.Name;
     146      analyzer.PointsTableParameter.ActualName = "Hyperparameter table";
     147      analyzer.QualityGradientsTableParameter.ActualName = "Gradients table";
     148      analyzer.QualitiesTableParameter.ActualName = "Negative log likelihood table";
     149      analyzer.Successor = makeStep;
     150
     151      finalModelCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
     152      finalModelCreator.MeanFunctionParameter.ActualName = MeanFunctionParameterName;
     153      finalModelCreator.CovarianceFunctionParameter.ActualName = CovarianceFunctionParameterName;
     154      finalModelCreator.HyperparameterParameter.ActualName = initializer.PointParameter.ActualName;
     155      finalModelCreator.Successor = finalAnalyzer;
     156
     157      finalAnalyzer.QualityParameter.ActualName = modelCreator.NegativeLogLikelihoodParameter.Name;
     158      finalAnalyzer.PointParameter.ActualName = modelCreator.HyperparameterParameter.Name;
     159      finalAnalyzer.QualityGradientsParameter.ActualName = modelCreator.HyperparameterGradientsParameter.Name;
     160      finalAnalyzer.PointsTableParameter.ActualName = analyzer.PointsTableParameter.ActualName;
     161      finalAnalyzer.QualityGradientsTableParameter.ActualName = analyzer.QualityGradientsTableParameter.ActualName;
     162      finalAnalyzer.QualitiesTableParameter.ActualName = analyzer.QualitiesTableParameter.ActualName;
     163      finalAnalyzer.Successor = solutionCreator;
     164
     165      solutionCreator.ModelParameter.ActualName = finalModelCreator.ModelParameter.Name;
     166      solutionCreator.ProblemDataParameter.ActualName = Problem.ProblemDataParameter.Name;
    147167    }
     168
    148169    [StorableHook(HookType.AfterDeserialization)]
    149170    private void AfterDeserialization() { }
     
    152173      return new GaussianProcessRegression(this, cloner);
    153174    }
    154     /*
    155     #region Gaussian process regression
    156     protected override void Run() {
    157       IRegressionProblemData problemData = Problem.ProblemData;
    158 
    159       int nAllowedVariables = problemData.AllowedInputVariables.Count();
    160       var mt = new MersenneTwister();
    161 
    162       var hyp0 =
    163         Enumerable.Range(0,
    164                          1 + MeanFunction.GetNumberOfParameters(nAllowedVariables) +
    165                          CovarianceFunction.GetNumberOfParameters(nAllowedVariables))
    166           .Select(i => mt.NextDouble())
    167           .ToArray();
    168 
    169       double[] hyp;
    170 
    171       // find hyperparameters
    172 
    173       double epsg = 0;
    174       double epsf = 0.00001;
    175       double epsx = 0;
    176 
    177       alglib.minlbfgsstate state;
    178       alglib.minlbfgsreport rep;
    179 
    180       alglib.minlbfgscreate(1, hyp0, out state);
    181       alglib.minlbfgssetcond(state, epsg, epsf, epsx, MinimizationIterations);
    182       alglib.minlbfgssetxrep(state, true);
    183       alglib.minlbfgsoptimize(state, OptimizeGaussianProcessParameters, Report, new object[] { MeanFunction, CovarianceFunction, problemData });
    184       alglib.minlbfgsresults(state, out hyp, out rep);
    185 
    186 
    187       double trainR2, testR2, negativeLogLikelihood;
    188       var solution = CreateGaussianProcessSolution(problemData, hyp, MeanFunction, CovarianceFunction,
    189         out negativeLogLikelihood, out trainR2, out testR2);
    190 
    191       Results.Add(new Result("Gaussian process regression solution", "The Gaussian process regression solution.", solution));
    192       Results.Add(new Result("Training R²", "The Pearson's R² of the Gaussian process solution on the training partition.", new DoubleValue(trainR2)));
    193       Results.Add(new Result("Test R²", "The Pearson's R² of the Gaussian process solution on the test partition.", new DoubleValue(testR2)));
    194       Results.Add(new Result("Negative log likelihood", "The negative log likelihood of the Gaussian process.", new DoubleValue(negativeLogLikelihood)));
    195     }
    196 
    197     public static GaussianProcessRegressionSolution CreateGaussianProcessSolution(IRegressionProblemData problemData,
    198       IEnumerable<double> hyp, IMeanFunction mean, ICovarianceFunction cov,
    199       out double negativeLogLikelihood, out double trainingR2, out double testR2) {
    200 
    201       Dataset dataset = problemData.Dataset;
    202       var allowedInputVariables = problemData.AllowedInputVariables;
    203       string targetVariable = problemData.TargetVariable;
    204       IEnumerable<int> rows = problemData.TrainingIndices;
    205 
    206       var model = new GaussianProcessModel(dataset, targetVariable, allowedInputVariables, rows, hyp, mean, cov);
    207       var solution = new GaussianProcessRegressionSolution(model, (IRegressionProblemData)problemData.Clone());
    208       negativeLogLikelihood = model.NegativeLogLikelihood;
    209       trainingR2 = solution.TrainingRSquared;
    210       testR2 = solution.TestRSquared;
    211       return solution;
    212     }
    213 
    214     private static void OptimizeGaussianProcessParameters(double[] hyp, ref double func, double[] grad, object obj) {
    215       var objArr = (object[])obj;
    216       var meanFunction = (IMeanFunction)objArr[0];
    217       var covarianceFunction = (ICovarianceFunction)objArr[1];
    218       var problemData = (RegressionProblemData)objArr[2];
    219       IEnumerable<string> allowedInputVariables = problemData.AllowedInputVariables;
    220 
    221       Dataset ds = problemData.Dataset;
    222       string targetVariable = problemData.TargetVariable;
    223       IEnumerable<int> rows = problemData.TrainingIndices;
    224 
    225 
    226       IEnumerable<double> dHyp;
    227       var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, rows, hyp, meanFunction,
    228                                            covarianceFunction);
    229       dHyp = model.GetHyperparameterGradients();
    230 
    231       int i = 0;
    232       foreach (var e in dHyp) {
    233         grad[i++] = e;
    234       }
    235       func = model.NegativeLogLikelihood;
    236     }
    237 
    238     public void Report(double[] arg, double func, object obj) {
    239       if (!Results.ContainsKey(NegativeLogLikelihoodTableParameterName)) {
    240         Results.Add(new Result(NegativeLogLikelihoodTableParameterName, new DataTable()));
    241       }
    242       if (!Results.ContainsKey(HyperParametersTableParameterName)) {
    243         Results.Add(new Result(HyperParametersTableParameterName, new DataTable()));
    244       }
    245 
    246       var nllTable = (DataTable)Results[NegativeLogLikelihoodTableParameterName].Value;
    247       if (!nllTable.Rows.ContainsKey("Negative log likelihood"))
    248         nllTable.Rows.Add(new DataRow("Negative log likelihood"));
    249       var nllRow = nllTable.Rows["Negative log likelihood"];
    250 
    251       nllRow.Values.Add(func);
    252 
    253       var hypTable = (DataTable)Results[HyperParametersTableParameterName].Value;
    254       if (hypTable.Rows.Count == 0) {
    255         for (int i = 0; i < arg.Length; i++)
    256           hypTable.Rows.Add(new DataRow(i.ToString()));
    257       }
    258       for (int i = 0; i < arg.Length; i++) {
    259         hypTable.Rows[i.ToString()].Values.Add(arg[i]);
    260       }
    261     }
    262 
    263     #endregion
    264      */
    265175  }
    266176}
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessRegressionModelCreator.cs

    r8371 r8375  
    4545
    4646    #region Properties
    47     public IRegressionProblemData ProblemData {
     47    private IRegressionProblemData ProblemData {
    4848      get { return ProblemDataParameter.ActualValue; }
    4949    }
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessSetHyperparameterLength.cs

    r8371 r8375  
    5858
    5959    #region Properties
    60     public IMeanFunction MeanFunction { get { return MeanFunctionParameter.ActualValue; } }
    61     public ICovarianceFunction CovarianceFunction { get { return CovarianceFunctionParameter.ActualValue; } }
    62     public IDataAnalysisProblemData ProblemData { get { return ProblemDataParameter.ActualValue; } }
     60    private IMeanFunction MeanFunction { get { return MeanFunctionParameter.ActualValue; } }
     61    private ICovarianceFunction CovarianceFunction { get { return CovarianceFunctionParameter.ActualValue; } }
     62    private IDataAnalysisProblemData ProblemData { get { return ProblemDataParameter.ActualValue; } }
    6363    #endregion
    6464
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/HeuristicLab.Algorithms.DataAnalysis-3.4.csproj

    r8371 r8375  
    126126    <Compile Include="GaussianProcess\BFGSUpdateResults.cs" />
    127127    <Compile Include="GaussianProcess\BFGSMakeStep.cs" />
     128    <Compile Include="GaussianProcess\BFGSAnalyzer.cs" />
     129    <Compile Include="GaussianProcess\GaussianProcessRegressionSolutionCreator.cs" />
    128130    <Compile Include="GaussianProcess\GaussianProcessSetHyperparameterLength.cs" />
    129131    <Compile Include="GaussianProcess\GaussianProcessRegressionModelCreator.cs" />
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/Plugin.cs.frame

    r8372 r8375  
    2929  [PluginFile("HeuristicLab.Algorithms.DataAnalysis-3.4.dll", PluginFileType.Assembly)]
    3030  [PluginDependency("HeuristicLab.ALGLIB", "3.5.0")]
     31  [PluginDependency("HeuristicLab.Analysis", "3.3")]
    3132  [PluginDependency("HeuristicLab.LibSVM", "1.6.3")]
    3233  [PluginDependency("HeuristicLab.Collections", "3.3")]
Note: See TracChangeset for help on using the changeset viewer.