Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/HeuristicLab.Problems.Instances.DataAnalysis/3.3/Regression/VariableNetworks/VariableNetwork.cs @ 16755

Last change on this file since 16755 was 16565, checked in by gkronber, 6 years ago

#2520: merged changes from PersistenceOverhaul branch (r16451:16564) into trunk

File size: 9.2 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2019 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Globalization;
25using System.Linq;
26using HeuristicLab.Common;
27using HeuristicLab.Core;
28using HeuristicLab.Problems.DataAnalysis;
29using HeuristicLab.Random;
30
31namespace HeuristicLab.Problems.Instances.DataAnalysis {
32  public abstract class VariableNetwork : ArtificialRegressionDataDescriptor {
33    private int nTrainingSamples;
34    private int nTestSamples;
35
36    private int numberOfFeatures;
37    private double noiseRatio;
38    private IRandom random;
39
40    private string networkDefinition;
41    public string NetworkDefinition { get { return networkDefinition; } }
42    public override string Description {
43      get {
44        return "The data are generated specifically to test methods for variable network analysis.";
45      }
46    }
47
48    protected VariableNetwork(int nTrainingSamples, int nTestSamples,
49      int numberOfFeatures, double noiseRatio, IRandom rand) {
50      this.nTrainingSamples = nTrainingSamples;
51      this.nTestSamples = nTestSamples;
52      this.noiseRatio = noiseRatio;
53      this.random = rand;
54      this.numberOfFeatures = numberOfFeatures;
55      // default variable names
56      variableNames = Enumerable.Range(1, numberOfFeatures)
57        .Select(i => string.Format("X{0:000}", i))
58        .ToArray();
59
60      variableRelevances = new Dictionary<string, IEnumerable<KeyValuePair<string, double>>>();
61    }
62
63    private string[] variableNames;
64    protected override string[] VariableNames {
65      get {
66        return variableNames;
67      }
68    }
69
70    // there is no specific target variable in variable network analysis but we still need to specify one
71    protected override string TargetVariable { get { return VariableNames.Last(); } }
72
73    protected override string[] AllowedInputVariables {
74      get {
75        return VariableNames.Take(numberOfFeatures - 1).ToArray();
76      }
77    }
78
79    protected override int TrainingPartitionStart { get { return 0; } }
80    protected override int TrainingPartitionEnd { get { return nTrainingSamples; } }
81    protected override int TestPartitionStart { get { return nTrainingSamples; } }
82    protected override int TestPartitionEnd { get { return nTrainingSamples + nTestSamples; } }
83
84    private Dictionary<string, IEnumerable<KeyValuePair<string, double>>> variableRelevances;
85    public IEnumerable<KeyValuePair<string, double>> GetVariableRelevance(string targetVar) {
86      return variableRelevances[targetVar];
87    }
88
89    protected override List<List<double>> GenerateValues() {
90      // variable names are shuffled in the beginning (and sorted at the end)
91      variableNames = variableNames.Shuffle(random).ToArray();
92
93      // a third of all variables are independent vars
94      List<List<double>> lvl0 = new List<List<double>>();
95      int numLvl0 = (int)Math.Ceiling(numberOfFeatures * 0.33);
96
97      List<string> description = new List<string>(); // store information how the variable is actually produced
98      List<string[]> inputVarNames = new List<string[]>(); // store information to produce graphviz file
99      List<double[]> relevances = new List<double[]>(); // stores variable relevance information (same order as given in inputVarNames)
100
101      var nrand = new NormalDistributedRandom(random, 0, 1);
102      for(int c = 0; c < numLvl0; c++) {
103        inputVarNames.Add(new string[] { });
104        relevances.Add(new double[] { });
105        description.Add(" ~ N(0, 1 + noiseLvl)");
106        // use same generation procedure for all variables
107        var x = Enumerable.Range(0, TestPartitionEnd).Select(_ => nrand.NextDouble()).ToList();
108        var sigma = x.StandardDeviationPop();
109        var mean = x.Average();
110        for(int i = 0; i < x.Count; i++) x[i] = (x[i] - mean) / sigma;
111        var noisePrng = new NormalDistributedRandom(random, 0, Math.Sqrt(noiseRatio / (1.0 - noiseRatio)));
112        lvl0.Add(x.Select(t => t + noisePrng.NextDouble()).ToList());
113      }
114
115      // lvl1 contains variables which are functions of vars in lvl0 (+ noise)
116      int numLvl1 = (int)Math.Ceiling(numberOfFeatures * 0.33);
117      List<List<double>> lvl1 = CreateVariables(lvl0, numLvl1, inputVarNames, description, relevances);
118
119      // lvl2 contains variables which are functions of vars in lvl0 and lvl1 (+ noise)
120      int numLvl2 = (int)Math.Ceiling(numberOfFeatures * 0.2);
121      List<List<double>> lvl2 = CreateVariables(lvl0.Concat(lvl1).ToList(), numLvl2, inputVarNames, description, relevances);
122
123      // lvl3 contains variables which are functions of vars in lvl0, lvl1 and lvl2 (+ noise)
124      int numLvl3 = numberOfFeatures - numLvl0 - numLvl1 - numLvl2;
125      List<List<double>> lvl3 = CreateVariables(lvl0.Concat(lvl1).Concat(lvl2).ToList(), numLvl3, inputVarNames, description, relevances);
126
127      this.variableRelevances.Clear();
128      for(int i = 0; i < variableNames.Length; i++) {
129        var targetVarName = variableNames[i];
130        var targetRelevantInputs =
131          inputVarNames[i].Zip(relevances[i], (inputVar, rel) => new KeyValuePair<string, double>(inputVar, rel))
132            .ToArray();
133        variableRelevances.Add(targetVarName, targetRelevantInputs);
134      }
135
136      networkDefinition = string.Join(Environment.NewLine, variableNames.Zip(description, (n, d) => n + d).OrderBy(x => x));
137      // for graphviz
138      networkDefinition += Environment.NewLine + "digraph G {";
139      for(int i = 0; i < variableNames.Length; i++) {
140        var name = variableNames[i];
141        var selectedVarNames = inputVarNames[i];
142        var selectedRelevances = relevances[i];
143        for(int j = 0; j < selectedVarNames.Length; j++) {
144          var selectedVarName = selectedVarNames[j];
145          var selectedRelevance = selectedRelevances[j];
146          networkDefinition += Environment.NewLine + selectedVarName + " -> " + name +
147            string.Format(CultureInfo.InvariantCulture, " [label={0:N3}]", selectedRelevance);
148        }
149      }
150      networkDefinition += Environment.NewLine + "}";
151
152      // return a random permutation of all variables (to mix lvl0, lvl1, ... variables)
153      var allVars = lvl0.Concat(lvl1).Concat(lvl2).Concat(lvl3).ToList();
154      var orderedVars = allVars.Zip(variableNames, Tuple.Create).OrderBy(t => t.Item2).Select(t => t.Item1).ToList();
155      variableNames = variableNames.OrderBy(n => n).ToArray();
156      return orderedVars;
157    }
158
159    private List<List<double>> CreateVariables(List<List<double>> allowedInputs, int numVars, List<string[]> inputVarNames, List<string> description, List<double[]> relevances) {
160      var newVariables = new List<List<double>>();
161      for(int c = 0; c < numVars; c++) {
162        string[] selectedVarNames;
163        double[] relevance;
164        var x = GenerateRandomFunction(random, allowedInputs, out selectedVarNames, out relevance).ToArray();
165        // standardize x
166        var sigma = x.StandardDeviation();
167        var mean = x.Average();
168        for(int i = 0; i < x.Length; i++) x[i] = (x[i] - mean) / sigma;
169
170        var noisePrng = new NormalDistributedRandom(random, 0, Math.Sqrt(noiseRatio / (1.0 - noiseRatio)));
171        newVariables.Add(x.Select(t => t + noisePrng.NextDouble()).ToList());
172        Array.Sort(selectedVarNames, relevance);
173        inputVarNames.Add(selectedVarNames);
174        relevances.Add(relevance);
175        var desc = string.Format("f({0})", string.Join(",", selectedVarNames));
176        // for the relevance information order variables by decreasing relevance
177        var relevanceStr = string.Join(", ",
178          selectedVarNames.Zip(relevance, Tuple.Create)
179          .OrderByDescending(t => t.Item2)
180          .Select(t => string.Format(CultureInfo.InvariantCulture, "{0}: {1:N3}", t.Item1, t.Item2)));
181        description.Add(string.Format(" ~ N({0}, {1:N3}) [Relevances: {2}]", desc, noisePrng.Sigma, relevanceStr));
182      }
183      return newVariables;
184    }
185
186    public int SampleNumberOfVariables(IRandom rand, int maxNumberOfVariables) {
187      double r = -Math.Log(1.0 - rand.NextDouble()) * 2.0; // r is exponentially distributed with lambda = 2
188      int nl = (int)Math.Floor(1.5 + r); // number of selected vars is likely to be between three and four
189      return Math.Min(maxNumberOfVariables, nl);
190    }
191
192    // sample a random function and calculate the variable relevances
193    protected abstract IEnumerable<double> GenerateRandomFunction(IRandom rand, List<List<double>> xs, out string[] selectedVarNames, out double[] relevance);
194  }
195}
Note: See TracBrowser for help on using the repository browser.