Changeset 12607 for branches/GBT-trunkintegration/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/LossFunctions/AbsoluteErrorLoss.cs
- Timestamp:
- 07/06/15 15:20:28 (9 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/GBT-trunkintegration/HeuristicLab.Algorithms.DataAnalysis/3.4/GradientBoostedTrees/LossFunctions/AbsoluteErrorLoss.cs
r12597 r12607 38 38 while (targetEnum.MoveNext() & predEnum.MoveNext() & weightEnum.MoveNext()) { 39 39 double res = targetEnum.Current - predEnum.Current; 40 s += weightEnum.Current * Math.Abs(res); 40 s += weightEnum.Current * Math.Abs(res); // w * |res| 41 41 } 42 42 if (targetEnum.MoveNext() | predEnum.MoveNext() | weightEnum.MoveNext()) … … 52 52 53 53 while (targetEnum.MoveNext() & predEnum.MoveNext() & weightEnum.MoveNext()) { 54 // weight * sign(res)54 // dL(y, f(x)) / df(x) = weight * sign(res) 55 55 var res = targetEnum.Current - predEnum.Current; 56 56 if (res > 0) yield return weightEnum.Current; … … 67 67 var predArr = pred.ToArray(); 68 68 var weightArr = weight.ToArray(); 69 // weights are not supported yet70 // w hen weights are supported we need to calculate a weighted median69 // the optimal constant value that should be added to the predictions is the median of the residuals 70 // weights are not supported yet (need to calculate a weighted median) 71 71 Debug.Assert(weightArr.All(w => w.IsAlmost(1.0))); 72 72 … … 80 80 int nRows = endIdx - startIdx + 1; 81 81 var res = new double[nRows]; 82 for (int offset = 0; offset < nRows; offset++) { 83 var i = startIdx + offset; 82 for (int i = startIdx; i <= endIdx; i++) { 84 83 var row = idx[i]; 85 res[ offset] = targetArr[row] - predArr[row];84 res[i - startIdx] = targetArr[row] - predArr[row]; 86 85 } 87 86 return res.Median();
Note: See TracChangeset
for help on using the changeset viewer.