In [1]:
from sklearn.metrics import precision_score
from sklearn.metrics import confusion_matrix

In [2]:
y_true = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
y_pred = [0, 1, 1, 1, 1, 0, 0, 0, 1, 1]

In [3]:
print(precision_score(y_true, y_pred))


0.3333333333333333

In [4]:
print(precision_score(y_true, y_pred, pos_label=0))


0.25

In [5]:
print(precision_score(y_true, y_pred, average=None))


[0.25       0.33333333]

In [6]:
print(precision_score(y_true, y_pred, average='macro'))


0.29166666666666663

In [7]:
print(precision_score(y_true, y_pred, average='micro'))


0.3

In [8]:
print(confusion_matrix(y_true, y_pred))


[[1 4]
 [3 2]]

In [9]:
print(confusion_matrix(y_true, y_pred, labels=[1, 0]))


[[2 3]
 [4 1]]

In [10]:
print(precision_score(y_true, y_pred, average='weighted'))


0.29166666666666663

In [11]:
y_true_2 = [0, 1, 1, 1, 1]
y_pred_2 = [0, 0, 0, 0, 1]

In [12]:
print(confusion_matrix(y_true_2, y_pred_2))


[[1 0]
 [3 1]]

In [13]:
print(confusion_matrix(y_true_2, y_pred_2, labels=[1, 0]))


[[1 3]
 [0 1]]

In [14]:
print(precision_score(y_true_2, y_pred_2))


1.0

In [15]:
print(precision_score(y_true_2, y_pred_2, pos_label=0))


0.25

In [16]:
print(precision_score(y_true_2, y_pred_2, average='macro'))


0.625

In [17]:
print(precision_score(y_true_2, y_pred_2, average='micro'))


0.4

In [18]:
print(precision_score(y_true_2, y_pred_2, average='weighted'))


0.85

In [19]:
y_true_ab = ['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B']
y_pred_ab = ['A', 'B', 'B', 'B', 'B', 'A', 'A', 'A', 'B', 'B']

In [20]:
# print(precision_score(y_true_ab, y_pred_ab))
# ValueError: pos_label=1 is not a valid label: array(['A', 'B'], dtype='<U1')

In [21]:
print(precision_score(y_true_ab, y_pred_ab, pos_label='A'))


0.25