Free cookie consent management tool by TermsFeed Policy Generator

source: stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/LossFunctions/RelativeErrorLoss.cs @ 13757

Last change on this file since 13757 was 13184, checked in by gkronber, 9 years ago

#2450: merged r12868,r12873,r12875,r13065:13066,r13157:13158 from trunk to stable

File size: 5.0 KB
Line 
1#region License Information
2/* HeuristicLab
3 * Copyright (C) 2002-2015 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
24using System.Collections.Generic;
25using System.Diagnostics;
26using System.Linq;
27using HeuristicLab.Common;
28using HeuristicLab.Core;
29using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
30
31namespace HeuristicLab.Algorithms.DataAnalysis {
32  // relative error loss is a special case of weighted absolute error loss with weights = (1/target)
33  [StorableClass]
34  [Item("Relative error loss", "")]
35  public sealed class RelativeErrorLoss : Item, ILossFunction {
36    public RelativeErrorLoss() { }
37
38    public double GetLoss(IEnumerable<double> target, IEnumerable<double> pred) {
39      var targetEnum = target.GetEnumerator();
40      var predEnum = pred.GetEnumerator();
41
42      double s = 0;
43      while (targetEnum.MoveNext() & predEnum.MoveNext()) {
44        double res = targetEnum.Current - predEnum.Current;
45        s += Math.Abs(res) * Math.Abs(1.0 / targetEnum.Current);
46      }
47      if (targetEnum.MoveNext() | predEnum.MoveNext())
48        throw new ArgumentException("target and pred have different lengths");
49
50      return s;
51    }
52
53    public IEnumerable<double> GetLossGradient(IEnumerable<double> target, IEnumerable<double> pred) {
54      var targetEnum = target.GetEnumerator();
55      var predEnum = pred.GetEnumerator();
56
57      while (targetEnum.MoveNext() & predEnum.MoveNext()) {
58        // sign(res) * abs(1 / target)
59        var res = targetEnum.Current - predEnum.Current;
60        if (res > 0) yield return 1.0 / Math.Abs(targetEnum.Current);
61        else if (res < 0) yield return -1.0 / Math.Abs(targetEnum.Current);
62        else yield return 0.0;
63      }
64      if (targetEnum.MoveNext() | predEnum.MoveNext())
65        throw new ArgumentException("target and pred have different lengths");
66    }
67
68    // targetArr and predArr are not changed by LineSearch
69    public double LineSearch(double[] targetArr, double[] predArr, int[] idx, int startIdx, int endIdx) {
70      if (targetArr.Length != predArr.Length)
71        throw new ArgumentException("target and pred have different lengths");
72
73      // line search for relative error
74      // weighted median (weight = 1/target)
75      int nRows = endIdx - startIdx + 1; // startIdx and endIdx are inclusive
76      if (nRows == 1) return targetArr[idx[startIdx]] - predArr[idx[startIdx]]; // res
77      else if (nRows == 2) {
78        // weighted average of two residuals
79        var w0 = Math.Abs(1.0 / targetArr[idx[startIdx]]);
80        var w1 = Math.Abs(1.0 / targetArr[idx[endIdx]]);
81        if (w0 > w1) {
82          return targetArr[idx[startIdx]] - predArr[idx[startIdx]];
83        } else if (w0 < w1) {
84          return targetArr[idx[endIdx]] - predArr[idx[endIdx]];
85        } else {
86          // same weight -> return average of both residuals
87          return ((targetArr[idx[startIdx]] - predArr[idx[startIdx]]) + (targetArr[idx[endIdx]] - predArr[idx[endIdx]])) / 2;
88        }
89      } else {
90        // create an array of key-value pairs to be sorted (instead of using Array.Sort(res, weights))
91        var res_w = new KeyValuePair<double, double>[nRows];
92        var totalWeight = 0.0;
93        for (int i = startIdx; i <= endIdx; i++) {
94          int row = idx[i];
95          var res = targetArr[row] - predArr[row];
96          var w = Math.Abs(1.0 / targetArr[row]);
97          res_w[i - startIdx] = new KeyValuePair<double, double>(res, w);
98          totalWeight += w;
99        }
100        // TODO: improve efficiency (find median without sort)
101        res_w.StableSort((a, b) => Math.Sign(a.Key - b.Key));
102
103        int k = 0;
104        double sum = totalWeight - res_w[k].Value; // total - first weight
105        while (sum > totalWeight / 2) {
106          k++;
107          sum -= res_w[k].Value;
108        }
109        return res_w[k].Key;
110      }
111    }
112
113    #region item implementation
114    [StorableConstructor]
115    private RelativeErrorLoss(bool deserializing) : base(deserializing) { }
116
117    private RelativeErrorLoss(RelativeErrorLoss original, Cloner cloner) : base(original, cloner) { }
118
119    public override IDeepCloneable Clone(Cloner cloner) {
120      return new RelativeErrorLoss(this, cloner);
121    }
122    #endregion
123  }
124}
Note: See TracBrowser for help on using the repository browser.