Skip to content

Commit 4e3f296

Browse files
author
Guillaume Lemaitre
committed
Added the IBA metric
1 parent c8cb6d6 commit 4e3f296

File tree

1 file changed

+68
-8
lines changed

1 file changed

+68
-8
lines changed

imblearn/metrics/classification.py

Lines changed: 68 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -198,11 +198,6 @@ def sensitivity_specificity_support(y_true,
198198
pred_sum = pred_sum[indices]
199199
tn_sum = tn_sum[indices]
200200

201-
LOGGER.debug('tp: %s' % tp_sum)
202-
LOGGER.debug('tn: %s' % tn_sum)
203-
LOGGER.debug('pred_sum: %s' % pred_sum)
204-
LOGGER.debug('true_sum: %s' % true_sum)
205-
206201
if average == 'micro':
207202
tp_sum = np.array([tp_sum.sum()])
208203
pred_sum = np.array([pred_sum.sum()])
@@ -233,9 +228,6 @@ def sensitivity_specificity_support(y_true,
233228
else:
234229
weights = None
235230

236-
LOGGER.debug(specificity)
237-
LOGGER.debug(weights)
238-
239231
if average is not None:
240232
assert average != 'binary' or len(specificity) == 1
241233
specificity = np.average(specificity, weights=weights)
@@ -509,3 +501,71 @@ def geometric_mean_score(y_true,
509501
LOGGER.debug('The sensitivity and specificity are : %s - %s' % (sen, spe))
510502

511503
return np.sqrt(sen * spe)
504+
505+
506+
def indexed_balanced_accuracy_score(score_func,
507+
y_true,
508+
y_pred,
509+
alpha=0.1,
510+
squared=True,
511+
**kwargs):
512+
""" Compute the indexed balanced accuracy of a scoring function
513+
514+
The indexed balanced accuracy (IBA) tends to weight a scoring function
515+
to take into account the imbalancing of the data.
516+
517+
Parameters
518+
----------
519+
score_func : callable,
520+
Score function (or loss function) with signature
521+
``score_func(y, y_pred, **kwargs)``.
522+
523+
y_true : ndarray, shape (n_samples, )
524+
Ground truth (correct) target values.
525+
526+
y_pred : ndarray, shape (n_samples, )
527+
Estimated targets as returned by a classifier.
528+
529+
alpha : float, optional (default=0.1)
530+
Weighting factor.
531+
532+
squared : bool, optional (default=True)
533+
If ``squared`` is True, then the metric computed will be squared
534+
before to be weighted.
535+
536+
**kwargs : additional arguments
537+
Additional parameters to be passed to score_func.
538+
539+
Returns
540+
-------
541+
iba : float (if ``average`` = None) or ndarray, \
542+
shape (n_unique_labels, )
543+
544+
References
545+
----------
546+
.. [1] Garcia, V. and Mollineda, R.A. and Sanchez, J.S. "Theoretical
547+
analysis of a performance measure for imbalanced data" ICPR (2010)
548+
"""
549+
550+
score = score_func(**kwargs)
551+
552+
if squared:
553+
score = np.power(score, 2)
554+
555+
# Pop the arguments to have the proper average, etc. for the
556+
# sensitivity and specificity
557+
labels = kwargs.get('labels', None)
558+
pos_label = kwargs.get('pos_label', 1)
559+
average = kwargs.get('average', 'binary')
560+
sample_weight = kwargs.get('sample_weight', None)
561+
562+
# Compute the sensitivity and specificity
563+
sen = sensitivity_score(y_true, y_pred, labels, pos_label, average,
564+
sample_weight)
565+
spe = specificity_score(y_true, y_pred, labels, pos_label, average,
566+
sample_weight)
567+
568+
# Compute the dominance
569+
dom = sen - spe
570+
571+
return (1. + alpha * dom) * score

0 commit comments

Comments
 (0)