Free cookie consent management tool by TermsFeed Policy Generator

source: trunk/sources/HeuristicLab.DataAnalysis/Dataset.cs @ 321

Last change on this file since 321 was 312, checked in by gkronber, 16 years ago

added possibility to manually scale data. (ticket #167)

File size: 12.1 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2008 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 *
5 * This file is part of HeuristicLab.
6 *
7 * HeuristicLab is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
11 *
12 * HeuristicLab is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
19 */
20#endregion
21
22using System;
23using System.Collections.Generic;
24using System.Xml;
25using HeuristicLab.Core;
26using HeuristicLab.Data;
27using System.Globalization;
28using System.Text;
29
30namespace HeuristicLab.DataAnalysis {
31  public sealed class Dataset : ItemBase {
32
33    private string name;
34    public string Name {
35      get { return name; }
36      set { name = value; }
37    }
38
39    private double[] samples;
40    private int rows;
41    private Dictionary<int, Dictionary<int, double>>[] cachedMeans;
42    private Dictionary<int, Dictionary<int, double>>[] cachedRanges;
43    private double[] scalingFactor;
44
45    public double[] ScalingFactor {
46      get { return scalingFactor; }
47    }
48    private double[] scalingOffset;
49
50    public double[] ScalingOffset {
51      get { return scalingOffset; }
52    }
53
54    public int Rows {
55      get { return rows; }
56      set { rows = value; }
57    }
58    private int columns;
59
60    public int Columns {
61      get { return columns; }
62      set { columns = value; }
63    }
64
65    public double GetValue(int i, int j) {
66      return samples[columns * i + j];
67    }
68
69    public void SetValue(int i, int j, double v) {
70      if(v != samples[columns * i + j]) {
71        samples[columns * i + j] = v;
72        CreateDictionaries();
73        FireChanged();
74      }
75    }
76
77    public double[] Samples {
78      get { return samples; }
79      set {
80        scalingFactor = new double[columns];
81        scalingOffset = new double[columns];
82        for(int i = 0; i < scalingFactor.Length; i++) {
83          scalingFactor[i] = 1.0;
84          scalingOffset[i] = 0.0;
85        }
86        samples = value;
87        CreateDictionaries();
88        FireChanged();
89      }
90    }
91
92    private string[] variableNames;
93    public string[] VariableNames {
94      get { return variableNames; }
95      set { variableNames = value; }
96    }
97
98    public Dataset() {
99      Name = "-";
100      VariableNames = new string[] { "Var0" };
101      Columns = 1;
102      Rows = 1;
103      Samples = new double[1];
104      scalingOffset = new double[] { 0.0 };
105      scalingFactor = new double[] { 1.0 };
106    }
107
108    private void CreateDictionaries() {
109      // keep a means and ranges dictionary for each column (possible target variable) of the dataset.
110      cachedMeans = new Dictionary<int, Dictionary<int, double>>[columns];
111      cachedRanges = new Dictionary<int, Dictionary<int, double>>[columns];
112      for(int i = 0; i < columns; i++) {
113        cachedMeans[i] = new Dictionary<int, Dictionary<int, double>>();
114        cachedRanges[i] = new Dictionary<int, Dictionary<int, double>>();
115      }
116    }
117
118    public override IView CreateView() {
119      return new DatasetView(this);
120    }
121
122    public override object Clone(IDictionary<Guid, object> clonedObjects) {
123      Dataset clone = new Dataset();
124      clonedObjects.Add(Guid, clone);
125      double[] cloneSamples = new double[rows * columns];
126      Array.Copy(samples, cloneSamples, samples.Length);
127      clone.rows = rows;
128      clone.columns = columns;
129      clone.Samples = cloneSamples;
130      clone.Name = Name;
131      clone.VariableNames = new string[VariableNames.Length];
132      Array.Copy(VariableNames, clone.VariableNames, VariableNames.Length);
133      Array.Copy(scalingFactor, clone.scalingFactor, columns);
134      Array.Copy(scalingOffset, clone.scalingOffset, columns);
135      return clone;
136    }
137
138    public override XmlNode GetXmlNode(string name, XmlDocument document, IDictionary<Guid, IStorable> persistedObjects) {
139      XmlNode node = base.GetXmlNode(name, document, persistedObjects);
140      XmlAttribute problemName = document.CreateAttribute("Name");
141      problemName.Value = Name;
142      node.Attributes.Append(problemName);
143      XmlAttribute dim1 = document.CreateAttribute("Dimension1");
144      dim1.Value = rows.ToString(CultureInfo.InvariantCulture.NumberFormat);
145      node.Attributes.Append(dim1);
146      XmlAttribute dim2 = document.CreateAttribute("Dimension2");
147      dim2.Value = columns.ToString(CultureInfo.InvariantCulture.NumberFormat);
148      node.Attributes.Append(dim2);
149      XmlAttribute variableNames = document.CreateAttribute("VariableNames");
150      variableNames.Value = GetVariableNamesString();
151      node.Attributes.Append(variableNames);
152      XmlAttribute scalingFactorsAttribute = document.CreateAttribute("ScalingFactors");
153      scalingFactorsAttribute.Value = GetString(scalingFactor);
154      node.Attributes.Append(scalingFactorsAttribute);
155      XmlAttribute scalingOffsetsAttribute = document.CreateAttribute("ScalingOffsets");
156      scalingOffsetsAttribute.Value = GetString(scalingOffset);
157      node.Attributes.Append(scalingOffsetsAttribute);
158      node.InnerText = ToString(CultureInfo.InvariantCulture.NumberFormat);
159      return node;
160    }
161
162    public override void Populate(XmlNode node, IDictionary<Guid, IStorable> restoredObjects) {
163      base.Populate(node, restoredObjects);
164      Name = node.Attributes["Name"].Value;
165      rows = int.Parse(node.Attributes["Dimension1"].Value, CultureInfo.InvariantCulture.NumberFormat);
166      columns = int.Parse(node.Attributes["Dimension2"].Value, CultureInfo.InvariantCulture.NumberFormat);
167
168      VariableNames = ParseVariableNamesString(node.Attributes["VariableNames"].Value);
169      if(node.Attributes["ScalingFactors"] != null)
170        scalingFactor = ParseDoubleString(node.Attributes["ScalingFactors"].Value);
171      else {
172        scalingFactor = new double[columns]; // compatibility with old serialization format
173        for(int i = 0; i < scalingFactor.Length; i++) scalingFactor[i] = 1.0;
174      }
175      if(node.Attributes["ScalingOffsets"] != null)
176        scalingOffset = ParseDoubleString(node.Attributes["ScalingOffsets"].Value);
177      else {
178        scalingOffset = new double[columns]; // compatibility with old serialization format
179        for(int i = 0; i < scalingOffset.Length; i++) scalingOffset[i] = 0.0;
180      }
181
182      string[] tokens = node.InnerText.Split(';');
183      if(tokens.Length != rows * columns) throw new FormatException();
184      samples = new double[rows * columns];
185      for(int row = 0; row < rows; row++) {
186        for(int column = 0; column < columns; column++) {
187          if(double.TryParse(tokens[row * columns + column], NumberStyles.Float, CultureInfo.InvariantCulture.NumberFormat, out samples[row * columns + column]) == false) {
188            throw new FormatException("Can't parse " + tokens[row * columns + column] + " as double value.");
189          }
190        }
191      }
192      CreateDictionaries();
193    }
194
195    public override string ToString() {
196      return ToString(CultureInfo.CurrentCulture.NumberFormat);
197    }
198
199    private string ToString(NumberFormatInfo format) {
200      StringBuilder builder = new StringBuilder();
201      for(int row = 0; row < rows; row++) {
202        for(int column = 0; column < columns; column++) {
203          builder.Append(";");
204          builder.Append(samples[row * columns + column].ToString(format));
205        }
206      }
207      if(builder.Length > 0) builder.Remove(0, 1);
208      return builder.ToString();
209    }
210
211    private string GetVariableNamesString() {
212      string s = "";
213      for(int i = 0; i < variableNames.Length; i++) {
214        s += variableNames[i] + "; ";
215      }
216
217      if(variableNames.Length > 0) {
218        s = s.TrimEnd(';', ' ');
219      }
220      return s;
221    }
222    private string GetString(double[] xs) {
223      string s = "";
224      for(int i = 0; i < xs.Length; i++) {
225        s += xs[i].ToString(CultureInfo.InvariantCulture) + "; ";
226      }
227
228      if(xs.Length > 0) {
229        s = s.TrimEnd(';', ' ');
230      }
231      return s;
232    }
233
234    private string[] ParseVariableNamesString(string p) {
235      p = p.Trim();
236      string[] tokens = p.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
237      return tokens;
238    }
239    private double[] ParseDoubleString(string s) {
240      s = s.Trim();
241      string[] ss = s.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
242      double[] xs = new double[ss.Length];
243      for(int i = 0; i < xs.Length; i++) {
244        xs[i] = double.Parse(ss[i], CultureInfo.InvariantCulture);
245      }
246      return xs;
247    }
248
249    public double GetMean(int column) {
250      return GetMean(column, 0, Rows - 1);
251    }
252
253    public double GetMean(int column, int from, int to) {
254      if(!cachedMeans[column].ContainsKey(from) || !cachedMeans[column][from].ContainsKey(to)) {
255        double[] values = new double[to - from + 1];
256        for(int sample = from; sample <= to; sample++) {
257          values[sample - from] = GetValue(sample, column);
258        }
259        double mean = Statistics.Mean(values);
260        if(!cachedMeans[column].ContainsKey(from)) cachedMeans[column][from] = new Dictionary<int, double>();
261        cachedMeans[column][from][to] = mean;
262        return mean;
263      } else {
264        return cachedMeans[column][from][to];
265      }
266    }
267
268    public double GetRange(int column) {
269      return GetRange(column, 0, Rows - 1);
270    }
271
272    public double GetRange(int column, int from, int to) {
273      if(!cachedRanges[column].ContainsKey(from) || !cachedRanges[column][from].ContainsKey(to)) {
274        double[] values = new double[to - from + 1];
275        for(int sample = from; sample <= to; sample++) {
276          values[sample - from] = GetValue(sample, column);
277        }
278        double range = Statistics.Range(values);
279        if(!cachedRanges[column].ContainsKey(from)) cachedRanges[column][from] = new Dictionary<int, double>();
280        cachedRanges[column][from][to] = range;
281        return range;
282      } else {
283        return cachedRanges[column][from][to];
284      }
285    }
286
287    public double GetMaximum(int column) {
288      double max = Double.NegativeInfinity;
289      for(int i = 0; i < Rows; i++) {
290        double val = GetValue(i, column);
291        if(val > max) max = val;
292      }
293      return max;
294    }
295
296    public double GetMinimum(int column) {
297      double min = Double.PositiveInfinity;
298      for(int i = 0; i < Rows; i++) {
299        double val = GetValue(i, column);
300        if(val < min) min = val;
301      }
302      return min;
303    }
304
305    internal void ScaleVariable(int column) {
306      if(scalingFactor[column] == 1.0 && scalingOffset[column] == 0.0) {
307        double min = GetMinimum(column);
308        double max = GetMaximum(column);
309        double range = max - min;
310        if(range == 0) ScaleVariable(column, 1.0, -min);
311        else ScaleVariable(column, 1.0 / range, -min);
312      }
313      CreateDictionaries();
314      FireChanged();
315    }
316
317    internal void ScaleVariable(int column, double factor, double offset) {
318      scalingFactor[column] = factor;
319      scalingOffset[column] = offset;
320      for(int i = 0; i < Rows; i++) {
321        double origValue = samples[i * columns + column];
322        samples[i * columns + column] = (origValue + offset) * factor;
323      }
324      CreateDictionaries();
325      FireChanged();
326    }
327
328    internal void UnscaleVariable(int column) {
329      if(scalingFactor[column] != 1.0 || scalingOffset[column]!=0.0) {
330        for(int i = 0; i < rows; i++) {
331          double scaledValue = samples[i * columns + column];
332          samples[i * columns + column] = scaledValue / scalingFactor[column] - scalingOffset[column];
333        }
334        scalingFactor[column] = 1.0;
335        scalingOffset[column] = 0.0;
336      }
337    }
338  }
339}
Note: See TracBrowser for help on using the repository browser.