In [1]:
from sklearn.metrics import precision_score
from sklearn.metrics import confusion_matrix
In [2]:
y_true = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
y_pred = [0, 1, 1, 1, 1, 0, 0, 0, 1, 1]
In [3]:
print(precision_score(y_true, y_pred))
In [4]:
print(precision_score(y_true, y_pred, pos_label=0))
In [5]:
print(precision_score(y_true, y_pred, average=None))
In [6]:
print(precision_score(y_true, y_pred, average='macro'))
In [7]:
print(precision_score(y_true, y_pred, average='micro'))
In [8]:
print(confusion_matrix(y_true, y_pred))
In [9]:
print(confusion_matrix(y_true, y_pred, labels=[1, 0]))
In [10]:
print(precision_score(y_true, y_pred, average='weighted'))
In [11]:
y_true_2 = [0, 1, 1, 1, 1]
y_pred_2 = [0, 0, 0, 0, 1]
In [12]:
print(confusion_matrix(y_true_2, y_pred_2))
In [13]:
print(confusion_matrix(y_true_2, y_pred_2, labels=[1, 0]))
In [14]:
print(precision_score(y_true_2, y_pred_2))
In [15]:
print(precision_score(y_true_2, y_pred_2, pos_label=0))
In [16]:
print(precision_score(y_true_2, y_pred_2, average='macro'))
In [17]:
print(precision_score(y_true_2, y_pred_2, average='micro'))
In [18]:
print(precision_score(y_true_2, y_pred_2, average='weighted'))
In [19]:
y_true_ab = ['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B']
y_pred_ab = ['A', 'B', 'B', 'B', 'B', 'A', 'A', 'A', 'B', 'B']
In [20]:
# print(precision_score(y_true_ab, y_pred_ab))
# ValueError: pos_label=1 is not a valid label: array(['A', 'B'], dtype='<U1')
In [21]:
print(precision_score(y_true_ab, y_pred_ab, pos_label='A'))