Changeset 9363 for branches/OaaS/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ThresholdCalculators
- Timestamp:
- 04/16/13 13:13:41 (12 years ago)
- Location:
- branches/OaaS
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/OaaS
- Property svn:ignore
-
old new 21 21 protoc.exe 22 22 _ReSharper.HeuristicLab 3.3 Tests 23 Google.ProtocolBuffers-2.4.1.473.dll 23 24 packages
-
- Property svn:mergeinfo changed
- Property svn:ignore
-
branches/OaaS/HeuristicLab.Problems.DataAnalysis
- Property svn:mergeinfo changed
-
branches/OaaS/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ThresholdCalculators/AccuracyMaximizationThresholdCalculator.cs
r8126 r9363 53 53 54 54 public static void CalculateThresholds(IClassificationProblemData problemData, IEnumerable<double> estimatedValues, IEnumerable<double> targetClassValues, out double[] classValues, out double[] thresholds) { 55 int slices = 100;56 double minThresholdInc = 10e-5; // necessary to prevent infinite loop when maxEstimated - minEstimated is effectively zero (constant model)55 const int slices = 100; 56 const double minThresholdInc = 10e-5; // necessary to prevent infinite loop when maxEstimated - minEstimated is effectively zero (constant model) 57 57 List<double> estimatedValuesList = estimatedValues.ToList(); 58 58 double maxEstimatedValue = estimatedValuesList.Max(); … … 61 61 var estimatedAndTargetValuePairs = 62 62 estimatedValuesList.Zip(targetClassValues, (x, y) => new { EstimatedValue = x, TargetClassValue = y }) 63 .OrderBy(x => x.EstimatedValue) 64 .ToList(); 63 .OrderBy(x => x.EstimatedValue).ToList(); 65 64 66 classValues = problemData.ClassValues.OrderBy(x => x).ToArray(); 65 classValues = estimatedAndTargetValuePairs.GroupBy(x => x.TargetClassValue) 66 .Select(x => new { Median = x.Select(y => y.EstimatedValue).Median(), Class = x.Key }) 67 .OrderBy(x => x.Median).Select(x => x.Class).ToArray(); 68 67 69 int nClasses = classValues.Length; 68 70 thresholds = new double[nClasses]; 69 71 thresholds[0] = double.NegativeInfinity; 70 // thresholds[thresholds.Length - 1] = double.PositiveInfinity;71 72 72 73 // incrementally calculate accuracy of all possible thresholds … … 85 86 //all positives 86 87 if (pair.TargetClassValue.IsAlmost(classValues[i - 1])) { 87 if (pair.EstimatedValue > lowerThreshold && pair.EstimatedValue < actualThreshold)88 if (pair.EstimatedValue > lowerThreshold && pair.EstimatedValue <= actualThreshold) 88 89 //true positive 89 classificationScore += problemData.GetClassificationPenalty( classValues[i - 1], classValues[i - 1]);90 classificationScore += problemData.GetClassificationPenalty(pair.TargetClassValue, pair.TargetClassValue); 90 91 else 91 92 //false negative 92 classificationScore += problemData.GetClassificationPenalty( classValues[i], classValues[i - 1]);93 classificationScore += problemData.GetClassificationPenalty(pair.TargetClassValue, classValues[i]); 93 94 } 94 95 //all negatives 95 96 else { 96 if (pair.EstimatedValue > lowerThreshold && pair.EstimatedValue < actualThreshold) 97 //false positive 98 classificationScore += problemData.GetClassificationPenalty(classValues[i - 1], classValues[i]); 99 else 100 //true negative, consider only upper class 101 classificationScore += problemData.GetClassificationPenalty(classValues[i], classValues[i]); 97 //false positive 98 if (pair.EstimatedValue > lowerThreshold && pair.EstimatedValue <= actualThreshold) 99 classificationScore += problemData.GetClassificationPenalty(pair.TargetClassValue, classValues[i - 1]); 100 else if (pair.EstimatedValue <= lowerThreshold) 101 classificationScore += problemData.GetClassificationPenalty(pair.TargetClassValue, classValues[i - 2]); 102 else if (pair.EstimatedValue > actualThreshold) { 103 if (pair.TargetClassValue < classValues[i - 1]) //negative in wrong class, consider upper class 104 classificationScore += problemData.GetClassificationPenalty(pair.TargetClassValue, classValues[i]); 105 else //true negative, must be optimized by the other thresholds 106 classificationScore += problemData.GetClassificationPenalty(pair.TargetClassValue, pair.TargetClassValue); 107 } 102 108 } 103 109 } -
branches/OaaS/HeuristicLab.Problems.DataAnalysis/3.4/Implementation/Classification/ThresholdCalculators/NormalDistributionCutPointsThresholdCalculator.cs
r7259 r9363 53 53 54 54 public static void CalculateThresholds(IClassificationProblemData problemData, IEnumerable<double> estimatedValues, IEnumerable<double> targetClassValues, out double[] classValues, out double[] thresholds) { 55 double maxEstimatedValue = estimatedValues.Max();56 double minEstimatedValue = estimatedValues.Min();57 55 var estimatedTargetValues = Enumerable.Zip(estimatedValues, targetClassValues, (e, t) => new { EstimatedValue = e, TargetValue = t }).ToList(); 56 double estimatedValuesRange = estimatedValues.Range(); 58 57 59 58 Dictionary<double, double> classMean = new Dictionary<double, double>(); … … 72 71 } 73 72 } 73 74 74 double[] originalClasses = classMean.Keys.OrderBy(x => x).ToArray(); 75 75 int nClasses = originalClasses.Length; … … 82 82 // calculate all thresholds 83 83 CalculateCutPoints(classMean[class0], classStdDev[class0], classMean[class1], classStdDev[class1], out x1, out x2); 84 if (!thresholdList.Any(x => x.IsAlmost(x1))) thresholdList.Add(x1); 85 if (!thresholdList.Any(x => x.IsAlmost(x2))) thresholdList.Add(x2); 84 85 // if the two cut points are too close (for instance because the stdDev=0) 86 // then move them by 0.1% of the range of estimated values 87 if (x1.IsAlmost(x2)) { 88 x1 -= 0.001 * estimatedValuesRange; 89 x2 += 0.001 * estimatedValuesRange; 90 } 91 if (!double.IsInfinity(x1) && !thresholdList.Any(x => x.IsAlmost(x1))) thresholdList.Add(x1); 92 if (!double.IsInfinity(x2) && !thresholdList.Any(x => x.IsAlmost(x2))) thresholdList.Add(x2); 86 93 } 87 94 } 88 95 thresholdList.Sort(); 96 97 // add small value and large value for the calculation of most influential class in each thresholded section 89 98 thresholdList.Insert(0, double.NegativeInfinity); 90 91 // determine class values for each partition separated by a threshold by calculating the density of all class distributions 92 // all points in the partition are classified as the class with the maximal density in the parition 93 List<double> classValuesList = new List<double>(); 94 for (int i = 0; i < thresholdList.Count; i++) { 95 double m; 96 if (double.IsNegativeInfinity(thresholdList[i])) { 97 m = thresholdList[i + 1] - 1.0; // smaller than the smalles non-infinity threshold 98 } else if (i == thresholdList.Count - 1) { 99 // last threshold 100 m = thresholdList[i] + 1.0; // larger than the last threshold 101 } else { 102 m = thresholdList[i] + (thresholdList[i + 1] - thresholdList[i]) / 2.0; // middle of partition 103 } 104 105 // determine class with maximal probability density in m 106 double maxDensity = double.MinValue; 107 double maxDensityClassValue = -1; 108 foreach (var classValue in originalClasses) { 109 double density = NormalDensity(m, classMean[classValue], classStdDev[classValue]); 99 thresholdList.Add(double.PositiveInfinity); 100 101 102 // find the most likely class for the points between thresholds m 103 List<double> filteredThresholds = new List<double>(); 104 List<double> filteredClassValues = new List<double>(); 105 for (int i = 0; i < thresholdList.Count - 1; i++) { 106 // determine class with maximal density mass between the thresholds 107 double maxDensity = DensityMass(thresholdList[i], thresholdList[i + 1], classMean[originalClasses[0]], classStdDev[originalClasses[0]]); 108 double maxDensityClassValue = originalClasses[0]; 109 foreach (var classValue in originalClasses.Skip(1)) { 110 double density = DensityMass(thresholdList[i], thresholdList[i + 1], classMean[classValue], classStdDev[classValue]); 110 111 if (density > maxDensity) { 111 112 maxDensity = density; … … 113 114 } 114 115 } 115 classValuesList.Add(maxDensityClassValue); 116 } 117 118 // only keep thresholds at which the class changes 119 // class B overrides threshold s. So only thresholds r and t are relevant and have to be kept 120 // 121 // A B C 122 // /\ /\/\ 123 // / r\/ /\t\ 124 // / /\/ \ \ 125 // / / /\s \ \ 126 // -/---/-/ -\---\-\---- 127 List<double> filteredThresholds = new List<double>(); 128 List<double> filteredClassValues = new List<double>(); 129 filteredThresholds.Add(thresholdList[0]); 130 filteredClassValues.Add(classValuesList[0]); 131 for (int i = 0; i < classValuesList.Count - 1; i++) { 132 if (classValuesList[i] != classValuesList[i + 1]) { 133 filteredThresholds.Add(thresholdList[i + 1]); 134 filteredClassValues.Add(classValuesList[i + 1]); 135 } 136 } 116 if (maxDensity > double.NegativeInfinity && 117 (filteredClassValues.Count == 0 || !maxDensityClassValue.IsAlmost(filteredClassValues.Last()))) { 118 filteredThresholds.Add(thresholdList[i]); 119 filteredClassValues.Add(maxDensityClassValue); 120 } 121 } 122 123 if (filteredThresholds.Count == 0 || !double.IsNegativeInfinity(filteredThresholds.First())) { 124 // this happens if there are no thresholds (distributions for all classes are exactly the same) 125 // or when the CDF up to the first threshold is zero 126 // -> all samples should be classified as the class with the most observations 127 // group observations by target class and select the class with largest count 128 double mostFrequentClass = targetClassValues.GroupBy(c => c) 129 .OrderBy(g => g.Count()) 130 .Last().Key; 131 filteredThresholds.Insert(0, double.NegativeInfinity); 132 filteredClassValues.Insert(0, mostFrequentClass); 133 } 134 137 135 thresholds = filteredThresholds.ToArray(); 138 136 classValues = filteredClassValues.ToArray(); 139 137 } 140 138 141 private static double NormalDensity(double x, double mu, double sigma) { 139 private static double sqr2 = Math.Sqrt(2.0); 140 // returns the density function of the standard normal distribution at x 141 private static double NormalCDF(double x) { 142 return 0.5 * (1 + alglib.errorfunction(x / sqr2)); 143 } 144 145 // approximation of the log of the normal cummulative distribution from the lightspeed toolbox by Tom Minka 146 // http://research.microsoft.com/en-us/um/people/minka/software/lightspeed/ 147 private static double[] c = new double[] { -1, 5 / 2.0, -37 / 3.0, 353 / 4.0, -4081 / 5.0, 55205 / 6.0, -854197 / 7.0 }; 148 private static double LogNormalCDF(double x) { 149 if (x >= -6.5) 150 // calculate the log directly if x is large enough 151 return Math.Log(NormalCDF(x)); 152 else { 153 double z = Math.Pow(x, -2); 154 // asymptotic series for logcdf 155 double y = z * (c[0] + z * (c[1] + z * (c[2] + z * (c[3] + z * (c[4] + z * (c[5] + z * c[6])))))); 156 return y - 0.5 * Math.Log(2 * Math.PI) - 0.5 * x * x - Math.Log(-x); 157 } 158 } 159 160 // determines the value NormalCDF(mu,sigma, upper) - NormalCDF(mu, sigma, lower) 161 // = the integral of the PDF of N(mu, sigma) in the range [lower, upper] 162 private static double DensityMass(double lower, double upper, double mu, double sigma) { 142 163 if (sigma.IsAlmost(0.0)) { 143 if (x.IsAlmost(mu)) return 1.0; else return 0.0; 164 if (lower < mu && mu < upper) return 0.0; // all mass is between lower and upper 165 else return double.NegativeInfinity; // no mass is between lower and upper 166 } 167 168 if (lower > mu) { 169 return DensityMass(-upper, -lower, -mu, sigma); 170 } 171 172 upper = (upper - mu) / sigma; 173 lower = (lower - mu) / sigma; 174 if (double.IsNegativeInfinity(lower)) return LogNormalCDF(upper); 175 176 return LogNormalCDF(upper) + Math.Log(1 - Math.Exp(LogNormalCDF(lower) - LogNormalCDF(upper))); 177 } 178 179 // Calculates the points x1 and x2 where the distributions N(m1, s1) == N(m2,s2). 180 // In the general case there should be two cut points. If either s1 or s2 is 0 then x1==x2. 181 // If both s1 and s2 are zero than there are no cut points but we should return something reasonable (e.g. (m1 + m2) / 2) then. 182 private static void CalculateCutPoints(double m1, double s1, double m2, double s2, out double x1, out double x2) { 183 if (s1.IsAlmost(s2)) { 184 if (m1.IsAlmost(m2)) { 185 x1 = double.NegativeInfinity; 186 x2 = double.NegativeInfinity; 187 } else { 188 // s1==s2 and m1 != m2 189 // return something reasonable. cut point should be half way between m1 and m2 190 x1 = (m1 + m2) / 2; 191 x2 = double.NegativeInfinity; 192 } 193 } else if (s1.IsAlmost(0.0)) { 194 // when s1 is 0.0 the cut points are exactly at m1 ... 195 x1 = m1; 196 x2 = m1; 197 } else if (s2.IsAlmost(0.0)) { 198 // ... same for s2 199 x1 = m2; 200 x2 = m2; 144 201 } else { 145 return (1.0 / Math.Sqrt(2.0 * Math.PI * sigma * sigma)) * Math.Exp(-((x - mu) * (x - mu)) / (2.0 * sigma * sigma)); 146 } 147 } 148 149 private static void CalculateCutPoints(double m1, double s1, double m2, double s2, out double x1, out double x2) { 150 double a = (s1 * s1 - s2 * s2); 151 x1 = -(-m2 * s1 * s1 + m1 * s2 * s2 + Math.Sqrt(s1 * s1 * s2 * s2 * ((m1 - m2) * (m1 - m2) + 2.0 * (-s1 * s1 + s2 * s2) * Math.Log(s2 / s1)))) / a; 152 x2 = (m2 * s1 * s1 - m1 * s2 * s2 + Math.Sqrt(s1 * s1 * s2 * s2 * ((m1 - m2) * (m1 - m2) + 2.0 * (-s1 * s1 + s2 * s2) * Math.Log(s2 / s1)))) / a; 202 if (s2 < s1) { 203 // make sure s2 is the larger std.dev. 204 CalculateCutPoints(m2, s2, m1, s1, out x1, out x2); 205 } else { 206 // general case 207 // calculate the solutions x1, x2 where N(m1,s1) == N(m2,s2) 208 double g = Math.Sqrt(2 * s2 * s2 * Math.Log(s2 / s1) - 2 * s1 * s1 * Math.Log(s2 / s1) - 2 * m1 * m2 + m1 * m1 + m2 * m2); 209 double s = (s1 * s1 - s2 * s2); 210 x1 = (m2 * s1 * s1 - m1 * s2 * s2 + s1 * s2 * g) / s; 211 x2 = -(m1 * s2 * s2 - m2 * s1 * s1 + s1 * s2 * g) / s; 212 } 213 } 153 214 } 154 215 }
Note: See TracChangeset
for help on using the changeset viewer.