Free cookie consent management tool by TermsFeed Policy Generator

source: stable/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/LossFunctions/LogisticRegressionLoss.cs @ 14186

Last change on this file since 14186 was 14186, checked in by swagner, 8 years ago

#2526: Updated year of copyrights in license headers

File size: 4.0 KB
RevLine 
[12590]1#region License Information
2/* HeuristicLab
[14186]3 * Copyright (C) 2002-2016 Heuristic and Evolutionary Algorithms Laboratory (HEAL)
[12590]4 * and the BEACON Center for the Study of Evolution in Action.
5 *
6 * This file is part of HeuristicLab.
7 *
8 * HeuristicLab is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * HeuristicLab is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with HeuristicLab. If not, see <http://www.gnu.org/licenses/>.
20 */
21#endregion
22
23using System;
[12589]24using System.Collections.Generic;
25using System.Diagnostics;
26using System.Linq;
27using HeuristicLab.Common;
[13184]28using HeuristicLab.Core;
29using HeuristicLab.Persistence.Default.CompositeSerializers.Storable;
[12589]30
[12590]31namespace HeuristicLab.Algorithms.DataAnalysis {
[12607]32  // Greedy Function Approximation: A Gradient Boosting Machine (page 9)
[13184]33  [StorableClass]
34  [Item("Logistic regression loss", "")]
35  public sealed class LogisticRegressionLoss : Item, ILossFunction {
36    public LogisticRegressionLoss() { }
37
[12696]38    public double GetLoss(IEnumerable<double> target, IEnumerable<double> pred) {
[12589]39      var targetEnum = target.GetEnumerator();
40      var predEnum = pred.GetEnumerator();
41
42      double s = 0;
[12696]43      while (targetEnum.MoveNext() & predEnum.MoveNext()) {
[12607]44        Debug.Assert(targetEnum.Current.IsAlmost(0.0) || targetEnum.Current.IsAlmost(1.0), "labels must be 0 or 1 for logistic regression loss");
45
46        var y = targetEnum.Current * 2 - 1; // y in {-1,1}
[12696]47        s += Math.Log(1 + Math.Exp(-2 * y * predEnum.Current));
[12589]48      }
[12696]49      if (targetEnum.MoveNext() | predEnum.MoveNext())
50        throw new ArgumentException("target and pred have different lengths");
[12589]51
52      return s;
53    }
54
[12696]55    public IEnumerable<double> GetLossGradient(IEnumerable<double> target, IEnumerable<double> pred) {
[12589]56      var targetEnum = target.GetEnumerator();
57      var predEnum = pred.GetEnumerator();
58
[12696]59      while (targetEnum.MoveNext() & predEnum.MoveNext()) {
[12607]60        Debug.Assert(targetEnum.Current.IsAlmost(0.0) || targetEnum.Current.IsAlmost(1.0), "labels must be 0 or 1 for logistic regression loss");
61        var y = targetEnum.Current * 2 - 1; // y in {-1,1}
62
[12696]63        yield return 2 * y / (1 + Math.Exp(2 * y * predEnum.Current));
[12607]64
[12589]65      }
[12696]66      if (targetEnum.MoveNext() | predEnum.MoveNext())
67        throw new ArgumentException("target and pred have different lengths");
[12589]68    }
69
[12697]70    // targetArr and predArr are not changed by LineSearch
71    public double LineSearch(double[] targetArr, double[] predArr, int[] idx, int startIdx, int endIdx) {
[12696]72      if (targetArr.Length != predArr.Length)
73        throw new ArgumentException("target and pred have different lengths");
[12589]74
[12607]75      // "Simple Newton-Raphson step" of eqn. 23
[12697]76      double sumY = 0.0;
77      double sumDiff = 0.0;
78      for (int i = startIdx; i <= endIdx; i++) {
79        var row = idx[i];
80        var y = targetArr[row] * 2 - 1; // y in {-1,1}
81        var pseudoResponse = 2 * y / (1 + Math.Exp(2 * y * predArr[row]));
[12589]82
[12697]83        sumY += pseudoResponse;
84        sumDiff += Math.Abs(pseudoResponse) * (2 - Math.Abs(pseudoResponse));
85      }
86      // prevent divByZero
87      sumDiff = Math.Max(1E-12, sumDiff);
88      return sumY / sumDiff;
[12589]89    }
90
[13184]91    #region item implementation
92    [StorableConstructor]
93    private LogisticRegressionLoss(bool deserializing) : base(deserializing) { }
94
95    private LogisticRegressionLoss(LogisticRegressionLoss original, Cloner cloner) : base(original, cloner) { }
96
97    public override IDeepCloneable Clone(Cloner cloner) {
98      return new LogisticRegressionLoss(this, cloner);
[12589]99    }
[13184]100    #endregion
101
[12589]102  }
103}
Note: See TracBrowser for help on using the repository browser.