Free cookie consent management tool by TermsFeed Policy Generator

source: branches/2839_HiveProjectManagement/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/Friedman/FriedmanRandomFunction.cs

Last change on this file was 16057, checked in by jkarder, 6 years ago

#2839:

File size: 5.6 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2018 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Linq;
25using HeuristicLab.Common;
26using HeuristicLab.Core;
27using HeuristicLab.Random;
28
29namespace HeuristicLab.Problems.Instances.DataAnalysis {
30  public class FriedmanRandomFunction : ArtificialRegressionDataDescriptor {
31    private readonly int nTrainingSamples;
32    private readonly int nTestSamples;
33
34    private readonly int numberOfFeatures;
35    private readonly double noiseRatio;
36    private readonly IRandom random;
37
38    public override string Name { get { return string.Format("FriedmanRandomFunction-{0:0%} ({1} dim)", noiseRatio, numberOfFeatures); } }
39    public override string Description {
40      get {
41        return "The data are generated using the random function generator described in 'Friedman: Greedy Function Approximation: A Gradient Boosting Machine, 1999'.";
42      }
43    }
44
45    public FriedmanRandomFunction(int numberOfFeatures, double noiseRatio,
46      IRandom rand)
47      : this(500, 5000, numberOfFeatures, noiseRatio, rand) { }
48
49    public FriedmanRandomFunction(int nTrainingSamples, int nTestSamples,
50      int numberOfFeatures, double noiseRatio, IRandom rand) {
51      this.nTrainingSamples = nTrainingSamples;
52      this.nTestSamples = nTestSamples;
53      this.noiseRatio = noiseRatio;
54      this.random = rand;
55      this.numberOfFeatures = numberOfFeatures;
56    }
57
58    protected override string TargetVariable { get { return "Y"; } }
59
60    protected override string[] VariableNames {
61      get { return AllowedInputVariables.Concat(new string[] { "Y" }).ToArray(); }
62    }
63
64    protected override string[] AllowedInputVariables {
65      get {
66        return Enumerable.Range(1, numberOfFeatures)
67          .Select(i => string.Format("X{0:000}", i))
68          .ToArray();
69      }
70    }
71
72    protected override int TrainingPartitionStart { get { return 0; } }
73    protected override int TrainingPartitionEnd { get { return nTrainingSamples; } }
74    protected override int TestPartitionStart { get { return nTrainingSamples; } }
75    protected override int TestPartitionEnd { get { return nTrainingSamples + nTestSamples; } }
76
77
78    protected override List<List<double>> GenerateValues() {
79      List<List<double>> data = new List<List<double>>();
80
81      var nrand = new NormalDistributedRandom(random, 0, 1);
82      for (int c = 0; c < numberOfFeatures; c++) {
83        var datai = Enumerable.Range(0, TestPartitionEnd).Select(_ => nrand.NextDouble()).ToList();
84        data.Add(datai);
85      }
86      var y = GenerateRandomFunction(random, data);
87
88      var targetSigma = y.StandardDeviation();
89      var noisePrng = new NormalDistributedRandom(random, 0, targetSigma * Math.Sqrt(noiseRatio / (1.0 - noiseRatio)));
90
91      data.Add(y.Select(t => t + noisePrng.NextDouble()).ToList());
92
93      return data;
94    }
95
96    // as described in Greedy Function Approximation paper
97    private IEnumerable<double> GenerateRandomFunction(IRandom rand, List<List<double>> xs, int nTerms = 20) {
98      int nRows = xs.First().Count;
99
100      var gz = new List<double[]>();
101      for (int i = 0; i < nTerms; i++) {
102
103        // alpha ~ U(-1, 1)
104        double alpha = rand.NextDouble() * 2 - 1;
105        double r = -Math.Log(1.0 - rand.NextDouble()) * 2.0; // r is exponentially distributed with lambda = 2
106        int nl = (int)Math.Floor(1.5 + r); // number of selected vars is likely to be between three and four
107
108
109        var selectedVars = xs.Shuffle(random).Take(nl).ToArray();
110        gz.Add(SampleRandomFunction(random, selectedVars)
111          .Select(f => alpha * f)
112          .ToArray());
113      }
114      // sum up
115      return Enumerable.Range(0, nRows)
116        .Select(r => gz.Sum(gzi => gzi[r]));
117    }
118
119    private IEnumerable<double> SampleRandomFunction(IRandom random, List<double>[] xs) {
120      int nl = xs.Length;
121      // mu is generated from same distribution as x
122      double[] mu = Enumerable.Range(0, nl).Select(_ => random.NextDouble() * 2 - 1).ToArray();
123      double[,] v = new double[nl, nl];
124      var condNum = 4.0 / 0.01; // as given in the paper for max and min eigen values
125
126      // temporarily use different random number generator in alglib
127      var curRand = alglib.math.rndobject;
128      alglib.math.rndobject = new System.Random(random.Next());
129
130      alglib.matgen.spdmatrixrndcond(nl, condNum, ref v);
131      // restore
132      alglib.math.rndobject = curRand;
133
134      int nRows = xs.First().Count;
135      var z = new double[nl];
136      var y = new double[nl];
137      for (int i = 0; i < nRows; i++) {
138        for (int j = 0; j < nl; j++) z[j] = xs[j][i] - mu[j];
139        alglib.ablas.rmatrixmv(nl, nl, v, 0, 0, 0, z, 0, ref y, 0);
140
141        // dot prod
142        var s = 0.0;
143        for (int j = 0; j < nl; j++) s += z[j] * y[j];
144
145        yield return s;
146      }
147    }
148  }
149}
Note: See TracBrowser for help on using the repository browser.