Free cookie consent management tool by TermsFeed Policy Generator

Changeset 13160


Ignore:
Timestamp:
11/15/15 13:54:43 (9 years ago)
Author:
gkronber
Message:

#2504: svn:ignore

Location:
trunk/sources
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis.Views/3.4/HeuristicLab.Algorithms.DataAnalysis.Views-3.4.csproj

    r13100 r13160  
    167167      <DependentUpon>NcaDimensionReductionView.cs</DependentUpon>
    168168    </Compile>
    169     <Compile Include="OneRClassificationModelView.cs" />
     169    <Compile Include="OneRClassificationModelView.cs">
     170      <SubType>UserControl</SubType>
     171    </Compile>
    170172    <Compile Include="OneRClassificationModelView.Designer.cs">
    171173      <DependentUpon>OneRClassificationModelView.cs</DependentUpon>
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessCovarianceOptimizationProblem.cs

    r12954 r13160  
    2626using HeuristicLab.Data;
    2727using HeuristicLab.Encodings.SymbolicExpressionTreeEncoding;
     28using HeuristicLab.Optimization;
    2829using HeuristicLab.Parameters;
    2930using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
     
    233234    }
    234235
    235     public void ObjectiveFunction(double[] x, ref double func, double[] grad, object obj) {
     236    public override void Analyze(ISymbolicExpressionTree[] trees, double[] qualities, ResultCollection results, IRandom random) {
     237      if (!results.ContainsKey("Best Solution Quality")) {
     238        results.Add(new Result("Best Solution Quality", typeof(DoubleValue)));
     239      }
     240      if (!results.ContainsKey("Best Tree")) {
     241        results.Add(new Result("Best Tree", typeof(ISymbolicExpressionTree)));
     242      }
     243      if (!results.ContainsKey("Best Solution")) {
     244        results.Add(new Result("Best Solution", typeof(GaussianProcessRegressionSolution)));
     245      }
     246
     247      var bestQuality = qualities.Max();
     248
     249      if (results["Best Solution Quality"].Value == null || bestQuality > ((DoubleValue)results["Best Solution Quality"].Value).Value) {
     250        var bestIdx = Array.IndexOf(qualities, bestQuality);
     251        var bestClone = (ISymbolicExpressionTree)trees[bestIdx].Clone();
     252        results["Best Tree"].Value = bestClone;
     253        results["Best Solution Quality"].Value = new DoubleValue(bestQuality);
     254        results["Best Solution"].Value = CreateSolution(bestClone, random);
     255      }
     256    }
     257
     258    private IItem CreateSolution(ISymbolicExpressionTree tree, IRandom random) {
     259      // again tune the hyper-parameters.
     260      // this is suboptimal because 1) more effort and 2) we cannot be sure to find the same local optimum
     261      var meanFunction = new MeanConst();
     262      var problemData = ProblemData;
     263      var ds = problemData.Dataset;
     264      var targetVariable = problemData.TargetVariable;
     265      var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
     266      var nVars = allowedInputVariables.Length;
     267      var trainingRows = problemData.TrainingIndices.ToArray();
     268      var bestObjValue = new double[1] { double.MinValue };
     269
     270      // use the same covariance function for each restart
     271      var covarianceFunction = TreeToCovarianceFunction(tree);
     272      // data that is necessary for the objective function
     273      var data = Tuple.Create(ds, targetVariable, allowedInputVariables, trainingRows, (IMeanFunction)meanFunction, covarianceFunction, bestObjValue);
     274
     275      // allocate hyperparameters
     276      var hyperParameters = new double[meanFunction.GetNumberOfParameters(nVars) + covarianceFunction.GetNumberOfParameters(nVars) + 1]; // mean + cov + noise
     277
     278      // initialize hyperparameters
     279      hyperParameters[0] = ds.GetDoubleValues(targetVariable).Average(); // mean const
     280
     281      for (int i = 0; i < covarianceFunction.GetNumberOfParameters(nVars); i++) {
     282        hyperParameters[1 + i] = random.NextDouble() * 2.0 - 1.0;
     283      }
     284      hyperParameters[hyperParameters.Length - 1] = 1.0; // s² = exp(2), TODO: other inits better?
     285
     286      // use alglib.bfgs for hyper-parameter optimization ...
     287      double epsg = 0;
     288      double epsf = 0.00001;
     289      double epsx = 0;
     290      double stpmax = 1;
     291      int maxits = ConstantOptIterations;
     292      alglib.mincgstate state;
     293      alglib.mincgreport rep;
     294
     295      alglib.mincgcreate(hyperParameters, out state);
     296      alglib.mincgsetcond(state, epsg, epsf, epsx, maxits);
     297      alglib.mincgsetstpmax(state, stpmax);
     298      alglib.mincgoptimize(state, ObjectiveFunction, null, data);
     299
     300      alglib.mincgresults(state, out hyperParameters, out rep);
     301
     302      if (rep.terminationtype >= 0) {
     303
     304        var model = new GaussianProcessModel(ds, targetVariable, allowedInputVariables, trainingRows, hyperParameters, meanFunction, covarianceFunction);
     305        return model.CreateRegressionSolution(ProblemData);
     306      } else return null;
     307    }
     308
     309    private void ObjectiveFunction(double[] x, ref double func, double[] grad, object obj) {
    236310      // we want to optimize the model likelihood by changing the hyperparameters and also return the gradient for each hyperparameter
    237311      var data = (Tuple<IDataset, string, string[], int[], IMeanFunction, ICovarianceFunction, double[]>)obj;
     
    252326        var gradients = model.HyperparameterGradients;
    253327        Array.Copy(gradients, grad, gradients.Length);
    254       } catch (Exception) {
     328      } catch (ArgumentException) {
    255329        // building the GaussianProcessModel might fail, in this case we return the worst possible objective value
    256330        func = 1.0E+300;
  • trunk/sources/HeuristicLab.Algorithms.DataAnalysis/3.4/GaussianProcess/GaussianProcessModel.cs

    r13121 r13160  
    165165                                             .ToArray();
    166166      sqrSigmaNoise = Math.Exp(2.0 * hyp.Last());
    167       CalculateModel(ds, rows, scaleInputs);
     167      try {
     168        CalculateModel(ds, rows, scaleInputs);
     169      } catch (alglib.alglibexception ae) {
     170        // wrap exception so that calling code doesn't have to know about alglib implementation
     171        throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae);
     172      }
    168173    }
    169174
     
    306311
    307312    private IEnumerable<double> GetEstimatedValuesHelper(IDataset dataset, IEnumerable<int> rows) {
    308       if (x == null) {
    309         x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
    310       }
    311       int n = x.GetLength(0);
    312 
    313       double[,] newX = GetData(dataset, allowedInputVariables, rows, inputScaling);
    314       int newN = newX.GetLength(0);
    315 
    316       var Ks = new double[newN, n];
    317       var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, newX.GetLength(1)));
    318       var ms = Enumerable.Range(0, newX.GetLength(0))
    319       .Select(r => mean.Mean(newX, r))
    320       .ToArray();
    321       var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));
    322       for (int i = 0; i < newN; i++) {
    323         for (int j = 0; j < n; j++) {
    324           Ks[i, j] = cov.CrossCovariance(x, newX, j, i);
    325         }
    326       }
    327 
    328       return Enumerable.Range(0, newN)
    329         .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha));
     313      try {
     314        if (x == null) {
     315          x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
     316        }
     317        int n = x.GetLength(0);
     318
     319        double[,] newX = GetData(dataset, allowedInputVariables, rows, inputScaling);
     320        int newN = newX.GetLength(0);
     321
     322        var Ks = new double[newN, n];
     323        var mean = meanFunction.GetParameterizedMeanFunction(meanParameter, Enumerable.Range(0, newX.GetLength(1)));
     324        var ms = Enumerable.Range(0, newX.GetLength(0))
     325        .Select(r => mean.Mean(newX, r))
     326        .ToArray();
     327        var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, newX.GetLength(1)));
     328        for (int i = 0; i < newN; i++) {
     329          for (int j = 0; j < n; j++) {
     330            Ks[i, j] = cov.CrossCovariance(x, newX, j, i);
     331          }
     332        }
     333
     334        return Enumerable.Range(0, newN)
     335          .Select(i => ms[i] + Util.ScalarProd(Util.GetRow(Ks, i), alpha));
     336      } catch (alglib.alglibexception ae) {
     337        // wrap exception so that calling code doesn't have to know about alglib implementation
     338        throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae);
     339      }
    330340    }
    331341
    332342    public IEnumerable<double> GetEstimatedVariance(IDataset dataset, IEnumerable<int> rows) {
    333       if (x == null) {
    334         x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
    335       }
    336       int n = x.GetLength(0);
    337 
    338       var newX = GetData(dataset, allowedInputVariables, rows, inputScaling);
    339       int newN = newX.GetLength(0);
    340 
    341       var kss = new double[newN];
    342       double[,] sWKs = new double[n, newN];
    343       var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1)));
    344 
    345       if (l == null) {
    346         l = CalculateL(x, cov, sqrSigmaNoise);
    347       }
    348 
    349       // for stddev
    350       for (int i = 0; i < newN; i++)
    351         kss[i] = cov.Covariance(newX, i, i);
    352 
    353       for (int i = 0; i < newN; i++) {
    354         for (int j = 0; j < n; j++) {
    355           sWKs[j, i] = cov.CrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
    356         }
    357       }
    358 
    359       // for stddev
    360       alglib.ablas.rmatrixlefttrsm(n, newN, l, 0, 0, false, false, 0, ref sWKs, 0, 0);
    361 
    362       for (int i = 0; i < newN; i++) {
    363         var sumV = Util.ScalarProd(Util.GetCol(sWKs, i), Util.GetCol(sWKs, i));
    364         kss[i] += sqrSigmaNoise; // kss is V(f), add noise variance of predictive distibution to get V(y)
    365         kss[i] -= sumV;
    366         if (kss[i] < 0) kss[i] = 0;
    367       }
    368       return kss;
     343      try {
     344        if (x == null) {
     345          x = GetData(trainingDataset, allowedInputVariables, trainingRows, inputScaling);
     346        }
     347        int n = x.GetLength(0);
     348
     349        var newX = GetData(dataset, allowedInputVariables, rows, inputScaling);
     350        int newN = newX.GetLength(0);
     351
     352        var kss = new double[newN];
     353        double[,] sWKs = new double[n, newN];
     354        var cov = covarianceFunction.GetParameterizedCovarianceFunction(covarianceParameter, Enumerable.Range(0, x.GetLength(1)));
     355
     356        if (l == null) {
     357          l = CalculateL(x, cov, sqrSigmaNoise);
     358        }
     359
     360        // for stddev
     361        for (int i = 0; i < newN; i++)
     362          kss[i] = cov.Covariance(newX, i, i);
     363
     364        for (int i = 0; i < newN; i++) {
     365          for (int j = 0; j < n; j++) {
     366            sWKs[j, i] = cov.CrossCovariance(x, newX, j, i) / Math.Sqrt(sqrSigmaNoise);
     367          }
     368        }
     369
     370        // for stddev
     371        alglib.ablas.rmatrixlefttrsm(n, newN, l, 0, 0, false, false, 0, ref sWKs, 0, 0);
     372
     373        for (int i = 0; i < newN; i++) {
     374          var sumV = Util.ScalarProd(Util.GetCol(sWKs, i), Util.GetCol(sWKs, i));
     375          kss[i] += sqrSigmaNoise; // kss is V(f), add noise variance of predictive distibution to get V(y)
     376          kss[i] -= sumV;
     377          if (kss[i] < 0) kss[i] = 0;
     378        }
     379        return kss;
     380      } catch (alglib.alglibexception ae) {
     381        // wrap exception so that calling code doesn't have to know about alglib implementation
     382        throw new ArgumentException("There was a problem in the calculation of the Gaussian process model", ae);
     383      }
    369384    }
    370385  }
  • trunk/sources/HeuristicLab.ExtLibs/HeuristicLab.SharpDX/2.6.3/HeuristicLab.SharpDX-2.6.3

    • Property svn:ignore set to
      *.DotSettings
      *.user
      obj
      Plugin.cs
  • trunk/sources/HeuristicLab.ExtLibs/HeuristicLab.SharpDX/2.6.3/HeuristicLab.SharpDX-2.6.3/Properties

    • Property svn:ignore set to
      AssemblyInfo.cs
  • trunk/sources/HeuristicLab.Tests/HeuristicLab-3.3/Samples/GPMultiplexerSampleTest.cs

    r12012 r13160  
    9898
    9999      var osga = new OffspringSelectionGeneticAlgorithm();
    100       osga.Name = "Genetic Programming - Multiplexer 11 problem";
     100      osga.Name = "Genetic Programming - Multiplexer 11 Problem";
    101101      osga.Description = "A genetic programming algorithm that solves the 11-bit multiplexer problem.";
    102102      osga.Problem = problem;
Note: See TracChangeset for help on using the changeset viewer.