1. Macro averaged metrics. Plotting precision, recall, f1 for K-nearest neighbors and random forests.


In [21]:
%matplotlib inline

import matplotlib
import numpy as np
import matplotlib.pyplot as plt

In [22]:
knn_vals = [(0.092770292723250922, 0.092041350566570285, 0.09202409728320253, None),
(0.11832440991910556, 0.016797663159520977, 0.027143064261264935, None),
(0.10995194097373315, 0.038509826090599115, 0.051938489696186856, None),
(0.097300754318851337, 0.013725929222916312, 0.021020057256131583, None),
(0.11115483175260653, 0.024043034270269808, 0.033355209379916435, None),
(0.10428809863783885, 0.011840138558186259, 0.017478715600425111, None),
(0.094522487339631409, 0.018265540609851107, 0.024938583127922926, None),
(0.072953692494120398, 0.010851172130099793, 0.015391532729486797, None),
(0.084114839498133448, 0.015315709975353193, 0.020675690070441708, None),
(0.064904710375669999, 0.010164700575695433, 0.014003320657094269, None)]

rf_vals = [(0.084607745113672389, 0.08842713416647166, 0.086053571202716561, None),
(0.096603215027402642, 0.01642155652845767, 0.025982571636623375, None),
(0.10885457526989055, 0.04211306697974438, 0.055790118980003647, None),
(0.15802728600520846, 0.015557084103243277, 0.024003820977707046, None),
(0.12045608768614563, 0.027656794562915851, 0.03812247368644197, None),
(0.12100389196941479, 0.013959326201527596, 0.020528015655332302, None),
(0.14719750469653542, 0.02197403157858592, 0.030354913537129498, None),
(0.13119883261464615, 0.013803492782909745, 0.019813886094346707, None),
(0.14575567621601707, 0.018934837459628959, 0.025940424725543833, None),
(0.13250796241961768, 0.012505817645627642, 0.017445480857577234, None)]

In [23]:
knn_precision = [x[0] for x in knn_vals]
knn_recall = [x[1] for x in knn_vals]
knn_f1 = [x[2] for x in knn_vals]

In [24]:
prec, = plt.plot(range(1,11), knn_precision, 'r-', label='Precision')
rec, = plt.plot(range(1,11), knn_recall, 'b-', label='Recall')
f, = plt.plot(range(1,11), knn_f1, 'y-', label='F1')
plt.legend(handles=[prec,rec,f], loc=0)
plt.ylabel('metric value between 0 and 1')
plt.xlabel('number of nearest neighbors')


Out[24]:
<matplotlib.text.Text at 0x7fa91a80c890>

In [25]:
rf_precision = [x[0] for x in rf_vals]
rf_recall = [x[1] for x in rf_vals]
rf_f1 = [x[2] for x in rf_vals]

In [26]:
prec, = plt.plot(range(1,11), rf_precision, 'r-', label='Precision')
rec, = plt.plot(range(1,11), rf_recall, 'b-', label='Recall')
f, = plt.plot(range(1,11), rf_f1, 'y-', label='F1')
plt.legend(handles=[prec,rec,f], loc=0)
plt.ylabel('metric value between 0 and 1')
plt.xlabel('number of random forest estimators')


Out[26]:
<matplotlib.text.Text at 0x7fa935127a50>

2. Micro averaged metrics. Plotting precision, recall, f1 for K-nearest neighbors and random forests.


In [27]:
knn_vals = [(0.18460698093625388, 0.18192780164292713, 0.18325759958355473, None),
(0.32441346692389073, 0.05612437152116103, 0.095693516410896531, None),
(0.29199417758369722, 0.11878384634432566, 0.16887072580892032, None),
(0.40188092417061611, 0.058428345337682888, 0.1020237435024956, None),
(0.3683049648341587, 0.095281160169245177, 0.15139592171889968, None),
(0.45745780227351018, 0.057190228567122081, 0.10166993636059142, None),
(0.42678805058874836, 0.084288836493222663, 0.14077517149612953, None),
(0.49246090841399853, 0.056964137678584888, 0.10211624383606588, None),
(0.46640936743638384, 0.076763239774770411, 0.13182952759545161, None),
(0.51886142026987092, 0.056716514324472722, 0.1022555223417058, None)]

rf_vals = [(0.17449087405995567, 0.1813571913051904, 0.17785778767929639, None),
(0.31112883925009971, 0.058783631019669907, 0.098884381338742389, None),
(0.27922903134707711, 0.12774135202351344, 0.17529085872576178, None),
(0.39065748440748443, 0.064737357751149291, 0.11106903717386284, None),
(0.34741018918727412, 0.10557367871408116, 0.16193676770512513, None),
(0.43537414965986393, 0.066836773144708941, 0.11588359373541654, None),
(0.39917992159689991, 0.095378056264332545, 0.1539678125760367, None),
(0.46451144206320377, 0.06883929244318121, 0.11990848398469732, None),
(0.44010596026490068, 0.08943509576564064, 0.14866049857728303, None),
(0.48523239546090469, 0.067213591292270919, 0.11807203850627429, None)]

In [28]:
knn_precision = [x[0] for x in knn_vals]
knn_recall = [x[1] for x in knn_vals]
knn_f1 = [x[2] for x in knn_vals]
prec, = plt.plot(range(1,11), knn_precision, 'r-', label='Precision')
rec, = plt.plot(range(1,11), knn_recall, 'b-', label='Recall')
f, = plt.plot(range(1,11), knn_f1, 'y-', label='F1')
plt.legend(handles=[prec,rec,f], loc=0)
plt.ylabel('metric value between 0 and 1')
plt.xlabel('number of nearest neighbors')


Out[28]:
<matplotlib.text.Text at 0x7fa91a85ae50>

In [29]:
rf_precision = [x[0] for x in rf_vals]
rf_recall = [x[1] for x in rf_vals]
rf_f1 = [x[2] for x in rf_vals]
prec, = plt.plot(range(1,11), rf_precision, 'r-', label='Precision')
rec, = plt.plot(range(1,11), rf_recall, 'b-', label='Recall')
f, = plt.plot(range(1,11), rf_f1, 'y-', label='F1')
plt.legend(handles=[prec,rec,f], loc=0)
plt.ylabel('metric value between 0 and 1')
plt.xlabel('number of random forest estimators')


Out[29]:
<matplotlib.text.Text at 0x7fa91a63a5d0>

In [ ]: