Visualization in Python

A notebook by Gerrit Gruben


In [1]:
import matplotlib.pyplot as plt
%matplotlib inline

General Tricks with Jupyter

As Data Scientists mainly work with Jupyter based notebooks if they are working with Python, it's worth the time to investigate visualization tricks.

Quality of Plots


In [ ]:
x = range(1000)
y = [i ** 2 for i in x]
plt.plot(x,y)
plt.show();

In [9]:
plt.rcParams['savefig.dpi'] = 200

plt.rcParams['figure.autolayout'] = False
plt.rcParams['figure.figsize'] = 20, 12
plt.rcParams['axes.labelsize'] = 18
plt.rcParams['axes.titlesize'] = 20
plt.rcParams['font.size'] = 16
plt.rcParams['lines.linewidth'] = 2.0
plt.rcParams['lines.markersize'] = 8
plt.rcParams['legend.fontsize'] = 14

plt.rcParams['text.usetex'] = False # True activates latex output in fonts!
plt.rcParams['font.family'] = "serif"
plt.rcParams['font.serif'] = "cm"
plt.rcParams['text.latex.preamble'] = "\usepackage{subdepth}, \usepackage{type1cm}"

In [10]:
x = range(1000)
y = [i ** 2 for i in x]
plt.plot(x,y)
plt.show();


C:\Users\gerrit\Anaconda\lib\site-packages\matplotlib\font_manager.py:1288: UserWarning: findfont: Font family [u'serif'] not found. Falling back to Bitstream Vera Sans
  (prop.get_family(), self.defaultFamily[fontext]))

Matplotlib

Matplotlib is the standard plotting functionality for machine learners. The reason is that most of the example code available is written in matplotlib. Also, a few years ago Matlab was standard among machine learners and you still find reference code in research papers. We import again to reset the settings we've done.


In [11]:
import matplotlib.pyplot as plt
%matplotlib inline 
# also check pylab inline
import numpy as np

plt.rcParams['savefig.dpi'] = 200

# plt.rcParams['figure.autolayout'] = False
plt.rcParams['figure.figsize'] = 20, 12
plt.rcParams['axes.labelsize'] = 18
plt.rcParams['axes.titlesize'] = 20
plt.rcParams['font.size'] = 16
plt.rcParams['lines.linewidth'] = 2.0
plt.rcParams['lines.markersize'] = 8
plt.rcParams['legend.fontsize'] = 14

plt.rcParams['font.family'] = "serif"
plt.rcParams['font.serif'] = "cm"

In [12]:
from mpl_toolkits.mplot3d import Axes3D
from sklearn import datasets
from sklearn.decomposition import PCA

# Prepare Iris data set with PCA on 2 dims
iris = datasets.load_iris()
pca = PCA(n_components=2)
X = pca.fit_transform(iris.data)
Y = iris.target

# 


# Second plot -> 2nd dimensionel
plt.figure(2) # select figure number 2
plt.clf() # clear current figure

# Plot the training points
# scatter plot, colors given by label
# colormap are "spectra" of colours.
# http://matplotlib.org/examples/color/colormaps_reference.html
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired) 

# configure the axes
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5

plt.xlabel('1st eigenvector') # labels of axis
plt.ylabel('2nd eigenvector')
plt.xlim(x_min, x_max) # the range of the axis
plt.ylim(y_min, y_max)
plt.xticks(()) # ticks are the values used to denote the axis
plt.yticks(())
plt.title("PCA with k = 2, %.2f%% var. expl." % sum(pca.explained_variance_ratio_))

# To getter a better understanding of interaction of the dimensions
# plot the first three PCA dimensions
fig = plt.figure(1)
ax = Axes3D(fig, elev=-150, azim=110)
pca = PCA(n_components=3)
X_reduced = pca.fit_transform(iris.data)
ax.scatter(X_reduced[:, 0], X_reduced[:, 1], X_reduced[:, 2], c=Y,
           cmap=plt.cm.Paired)
ax.set_title("PCA with k = 3, %.2f%% var. expl" % sum(pca.explained_variance_ratio_))
ax.set_xlabel("1st eigenvector")
ax.w_xaxis.set_ticklabels([])
ax.set_ylabel("2nd eigenvector")
ax.w_yaxis.set_ticklabels([])
ax.set_zlabel("3rd eigenvector")
ax.w_zaxis.set_ticklabels([])

plt.show()


There are also different styles to use.


In [13]:
with plt.xkcd(): # without with not temporary!

    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)
    ax.spines['right'].set_color('none')
    ax.spines['top'].set_color('none')
    plt.xticks([])
    plt.yticks([])
    ax.set_ylim([-30, 10])

    data = np.ones(100)
    data[70:] -= np.arange(30)

    plt.annotate(
        'THE DAY I REALIZED\nI COULD COOK BACON\nWHENEVER I WANTED',
        xy=(70, 1), arrowprops=dict(arrowstyle='->'), xytext=(15, -10))

    plt.plot(data)

    plt.xlabel('time')
    plt.ylabel('my overall health')

    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)
    ax.bar([-0.125, 1.0-0.125], [0, 100], 0.25)
    ax.spines['right'].set_color('none')
    ax.spines['top'].set_color('none')
    ax.xaxis.set_ticks_position('bottom')
    ax.set_xticks([0, 1])
    ax.set_xlim([-0.5, 1.5])
    ax.set_ylim([0, 110])
    ax.set_xticklabels(['CONFIRMED BY\nEXPERIMENT', 'REFUTED BY\nEXPERIMENT'])
    plt.yticks([])

    plt.title("CLAIMS OF SUPERNATURAL POWERS")

    plt.show()


Out[13]:
([], <a list of 0 Text xticklabel objects>)
Out[13]:
([], <a list of 0 Text yticklabel objects>)
Out[13]:
(-30, 10)
Out[13]:
<matplotlib.text.Annotation at 0x9dfbd30>
Out[13]:
[<matplotlib.lines.Line2D at 0x9e1e0b8>]
Out[13]:
<matplotlib.text.Text at 0x972bc50>
Out[13]:
<matplotlib.text.Text at 0x99d9ba8>
Out[13]:
<Container object of 2 artists>
Out[13]:
[<matplotlib.axis.XTick at 0x9e34ef0>, <matplotlib.axis.XTick at 0x9d5e278>]
Out[13]:
(-0.5, 1.5)
Out[13]:
(0, 110)
Out[13]:
[<matplotlib.text.Text at 0x9d765c0>, <matplotlib.text.Text at 0x9d816d8>]
Out[13]:
([], <a list of 0 Text yticklabel objects>)
Out[13]:
<matplotlib.text.Text at 0x9c8fa58>

In [14]:
np.random.seed(0)

n_bins = 10
x = np.random.randn(1000, 3)

fig, axes = plt.subplots(nrows=2, ncols=2)
ax0, ax1, ax2, ax3 = axes.flatten()

colors = ['red', 'tan', 'lime']
ax0.hist(x, n_bins, normed=1, histtype='bar', color=colors, label=colors)
ax0.legend(prop={'size': 10})
ax0.set_title('bars with legend')

ax1.hist(x, n_bins, normed=1, histtype='bar', stacked=True)
ax1.set_title('stacked bar')

ax2.hist(x, n_bins, histtype='step', stacked=True, fill=False)
ax2.set_title('stack step (unfilled)')

# Make a multiple-histogram of data-sets with different length.
x_multi = [np.random.randn(n) for n in [10000, 5000, 2000]]
ax3.hist(x_multi, n_bins, histtype='bar')
ax3.set_title('different sample sizes')

fig.tight_layout()
plt.show()


Matplotlib offers much more functionality and if you see a plot in ML books it most often can be reproduced rather easily, if they base it on Matlab.

The gallery is most useful to inspect: http://matplotlib.org/gallery.html

Seaborn

There are more styles for xkcd, which can be compared at https://tonysyu.github.io/raw_content/matplotlib-style-gallery/gallery.html. In fact, Seaborn is a package that extends matplotlib's functionality to improve the layout of the plots without the need of code. Seaborn additionally provides new styles to choose from and new plot types. Careful importing Seaborn overwrites the currently selected style.


In [30]:
from math import *

X = np.arange(-5, 5, 0.01)
Y = [sin(x) for x in X]
plt.plot(X, Y)


Out[30]:
[<matplotlib.lines.Line2D at 0x1d3645f8>]

In [31]:
import seaborn as sns
plt.plot(X, Y)


Out[31]:
[<matplotlib.lines.Line2D at 0x1d5db630>]

ggplot


In [17]:
from ggplot import *
diamonds.head()


Out[17]:
carat cut color clarity depth table price x y z
0 0.23 Ideal E SI2 61.5 55.0 326 3.95 3.98 2.43
1 0.21 Premium E SI1 59.8 61.0 326 3.89 3.84 2.31
2 0.23 Good E VS1 56.9 65.0 327 4.05 4.07 2.31
3 0.29 Premium I VS2 62.4 58.0 334 4.20 4.23 2.63
4 0.31 Good J SI2 63.3 58.0 335 4.34 4.35 2.75

First choose an aesthetic. We then add components to the grid step by step to show how ggplots are composed "algebraically".


In [18]:
p = ggplot(aes(x='date', y='beef'), data=meat)
p


Out[18]:
<ggplot: (19894921)>

In [19]:
p + geom_point()


Out[19]:
<ggplot: (19894921)>

In [20]:
p + geom_point() + geom_line()


Out[20]:
<ggplot: (19894921)>

In [21]:
p + geom_point() + geom_line() + stat_smooth(color='blue')


C:\Users\gerrit\Anaconda\lib\site-packages\ggplot\stats\stat_smooth.py:77: FutureWarning: sort(columns=....) is deprecated, use sort_values(by=.....)
  smoothed_data = smoothed_data.sort('x')
Out[21]:
<ggplot: (19894921)>

In [22]:
import pygal
from IPython.display import SVG, HTML, display
display(SVG(pygal.Bar()(1, 3, 3, 7)(1, 6, 6, 4).render()))


Pygal0011223344556677162.1346153846510.7692307693234.730769231433.8461538463407.326923077433.8461538467579.923076923280.01138.076923077510.7692307696310.673076923318.4615384626483.269230769318.4615384624655.865384615395.384615385

In [23]:
html_pygal = """
<!DOCTYPE html>
<html>
  <head>
  <script type="text/javascript" src="http://kozea.github.com/pygal.js/javascripts/svg.jquery.js"></script>
  <script type="text/javascript" src="http://kozea.github.com/pygal.js/javascripts/pygal-tooltips.js"></script>
    <!-- ... -->
  </head>
  <body>
    <figure>
      {pygal_render}
    </figure>
  </body>
</html>
"""

In [24]:
ages = [(364381, 358443, 360172, 345848, 334895, 326914, 323053, 312576, 302015, 301277, 309874, 318295, 323396, 332736, 330759, 335267, 345096, 352685, 368067, 381521, 380145, 378724, 388045, 382303, 373469, 365184, 342869, 316928, 285137, 273553, 250861, 221358, 195884, 179321, 171010, 162594, 152221, 148843, 143013, 135887, 125824, 121493, 115913, 113738, 105612, 99596, 91609, 83917, 75688, 69538, 62999, 58864, 54593, 48818, 44739, 41096, 39169, 36321, 34284, 32330, 31437, 30661, 31332, 30334, 23600, 21999, 20187, 19075, 16574, 15091, 14977, 14171, 13687, 13155, 12558, 11600, 10827, 10436, 9851, 9794, 8787, 7993, 6901, 6422, 5506, 4839, 4144, 3433, 2936, 2615),
   (346205, 340570, 342668, 328475, 319010, 312898, 308153, 296752, 289639, 290466, 296190, 303871, 309886, 317436, 315487, 316696, 325772, 331694, 345815, 354696, 354899, 351727, 354579, 341702, 336421, 321116, 292261, 261874, 242407, 229488, 208939, 184147, 162662, 147361, 140424, 134336, 126929, 125404, 122764, 116004, 105590, 100813, 95021, 90950, 85036, 79391, 72952, 66022, 59326, 52716, 46582, 42772, 38509, 34048, 30887, 28053, 26152, 23931, 22039, 20677, 19869, 19026, 18757, 18308, 14458, 13685, 12942, 12323, 11033, 10183, 10628, 10803, 10655, 10482, 10202, 10166, 9939, 10138, 10007, 10174, 9997, 9465, 9028, 8806, 8450, 7941, 7253, 6698, 6267, 5773),
   (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 91, 412, 1319, 2984, 5816, 10053, 16045, 24240, 35066, 47828, 62384, 78916, 97822, 112738, 124414, 130658, 140789, 153951, 168560, 179996, 194471, 212006, 225209, 228886, 239690, 245974, 253459, 255455, 260715, 259980, 256481, 252222, 249467, 240268, 238465, 238167, 231361, 223832, 220459, 222512, 220099, 219301, 221322, 229783, 239336, 258360, 271151, 218063, 213461, 207617, 196227, 174615, 160855, 165410, 163070, 157379, 149698, 140570, 131785, 119936, 113751, 106989, 99294, 89097, 78413, 68174, 60592, 52189, 43375, 35469, 29648, 24575, 20863),
   (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 74, 392, 1351, 3906, 7847, 12857, 19913, 29108, 42475, 58287, 74163, 90724, 108375, 125886, 141559, 148061, 152871, 159725, 171298, 183536, 196136, 210831, 228757, 238731, 239616, 250036, 251759, 259593, 261832, 264864, 264702, 264070, 258117, 253678, 245440, 241342, 239843, 232493, 226118, 221644, 223440, 219833, 219659, 221271, 227123, 232865, 250646, 261796, 210136, 201824, 193109, 181831, 159280, 145235, 145929, 140266, 133082, 124350, 114441, 104655, 93223, 85899, 78800, 72081, 62645, 53214, 44086, 38481, 32219, 26867, 21443, 16899, 13680, 11508),
   (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 5, 17, 15, 31, 34, 38, 35, 45, 299, 295, 218, 247, 252, 254, 222, 307, 316, 385, 416, 463, 557, 670, 830, 889, 1025, 1149, 1356, 1488, 1835, 1929, 2130, 2362, 2494, 2884, 3160, 3487, 3916, 4196, 4619, 5032, 5709, 6347, 7288, 8139, 9344, 11002, 12809, 11504, 11918, 12927, 13642, 13298, 14015, 15751, 17445, 18591, 19682, 20969, 21629, 22549, 23619, 25288, 26293, 27038, 27039, 27070, 27750, 27244, 25905, 24357, 22561, 21794, 20595),
   (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 8, 0, 8, 21, 34, 49, 84, 97, 368, 401, 414, 557, 654, 631, 689, 698, 858, 1031, 1120, 1263, 1614, 1882, 2137, 2516, 2923, 3132, 3741, 4259, 4930, 5320, 5948, 6548, 7463, 8309, 9142, 10321, 11167, 12062, 13317, 15238, 16706, 18236, 20336, 23407, 27024, 32502, 37334, 34454, 38080, 41811, 44490, 45247, 46830, 53616, 58798, 63224, 66841, 71086, 73654, 77334, 82062, 87314, 92207, 94603, 94113, 92753, 93174, 91812, 87757, 84255, 79723, 77536, 74173),
   (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 5, 0, 11, 35, 137, 331, 803, 1580, 2361, 3632, 4866, 6849, 8754, 10422, 12316, 14152, 16911, 19788, 22822, 27329, 31547, 35711, 38932, 42956, 46466, 49983, 52885, 55178, 56549, 57632, 57770, 57427, 56348, 55593, 55554, 53266, 51084, 49342, 48555, 47067, 45789, 44988, 44624, 44238, 46267, 46203, 36964, 33866, 31701, 28770, 25174, 22702, 21934, 20638, 19051, 17073, 15381, 13736, 11690, 10368, 9350, 8375, 7063, 6006, 5044, 4030, 3420, 2612, 2006, 1709, 1264, 1018),
   (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 6, 11, 20, 68, 179, 480, 1077, 2094, 3581, 5151, 7047, 9590, 12434, 15039, 17257, 19098, 21324, 24453, 27813, 32316, 37281, 43597, 49647, 53559, 58888, 62375, 67219, 70956, 73547, 74904, 75994, 76224, 74979, 72064, 70330, 68944, 66527, 63073, 60899, 60968, 58756, 57647, 56301, 57246, 57068, 59027, 59187, 47549, 44425, 40976, 38077, 32904, 29431, 29491, 28020, 26086, 24069, 21742, 19498, 17400, 15738, 14451, 13107, 11568, 10171, 8530, 7273, 6488, 5372, 4499, 3691, 3259, 2657)]

types = ['Males single', 'Females single',
         'Males married', 'Females married',
         'Males widowed', 'Females widowed',
         'Males divorced', 'Females divorced']

pyramid_chart = pygal.Pyramid(human_readable=True, legend_at_bottom=True)
pyramid_chart.title = 'England population by age in 2010 (source: ons.gov.uk)'
pyramid_chart.x_labels = map(lambda x: str(x) if not x % 5 else '', range(90))
for type, age in zip(types, ages):
    pyramid_chart.add(type, age)
display(HTML(html_pygal.format(pygal_render=pyramid_chart.render())))


England population by age in 2010 (source: ons.gov.uk)00.0055555555560.016666666670.027777777780.038888888890.0550.061111111110.072222222220.083333333330.094444444440.1055555556100.11666666670.12777777780.13888888890.150.1611111111150.17222222220.18333333330.19444444440.20555555560.2166666667200.22777777780.23888888890.250.26111111110.2722222222250.28333333330.29444444440.30555555560.31666666670.3277777778300.33888888890.350.36111111110.37222222220.3833333333350.39444444440.40555555560.41666666670.42777777780.4388888889400.450.46111111110.47222222220.48333333330.4944444444450.50555555560.51666666670.52777777780.53888888890.55500.56111111110.57222222220.58333333330.59444444440.6055555556550.61666666670.62777777780.63888888890.650.6611111111600.67222222220.68333333330.69444444440.70555555560.7166666667650.72777777780.73888888890.750.76111111110.7722222222700.78333333330.79444444440.80555555560.81666666670.8277777778750.83888888890.850.86111111110.87222222220.8833333333800.89444444440.90555555560.91666666670.92777777780.9388888889850.950.96111111110.97222222220.98333333330.9944444444-400000-300000-200000-1000000100000200000300000400000364381521.916246855436.9914529910358443519.424303878432.205128205360172520.14989657427.418803419345848514.138682155422.632478632334895509.542142676417.846153846326914506.192833667413.059829065323053504.572525173408.273504274312576500.175744007403.487179487302015495.743711373398.700854701301277495.434002057393.914529915309874499.041821823389.12820512810318295502.575781373384.341880342323396504.716468663379.555555556332736508.636096046374.769230769330759507.806427595369.982905983335267509.698256315365.19658119715345096513.823097594360.41025641352685517.007899759355.623931624368067523.463114792350.837606838381521529.10922473346.051282051380145528.53177213341.26495726520378724527.935434816336.478632479388045531.847088654331.692307692382303529.4373991326.905982906373469525.730119838322.11965812365184522.253234091317.33333333325342869512.888514063312.547008547316928502.002105717307.760683761285137488.660684502302.974358974273553483.799339361298.188034188250861474.276407393293.40170940230221358461.895168989288.615384615195884451.204742011283.829059829179321444.253908141279.042735043171010440.766111226274.256410256162594437.234249978269.4700854735152221432.881113484264.683760684148843431.463500925259.897435897143013429.016881263255.111111111135887426.026381827250.324786325125824421.803340032245.53846153840121493419.985791189240.752136752115913417.644086606235.965811966113738416.731325411231.179487179105612413.321165656226.39316239399596410.796489174221.6068376074591609407.444662203216.82051282183917404.216635026212.03418803475688400.763250257207.24786324869538398.182339293202.46153846262999395.438180464197.6752136755058864393.702885043192.88888888954593391.910515819188.10256410348818389.486977474183.31623931644739387.775183031178.5299145341096386.246360487173.7435897445539169385.437675052168.95726495736321384.242482462164.17094017134284383.387634391159.38461538532330382.567618127154.59829059831437382.192861462149.8119658126030661381.867205054145.02564102631332382.148797128140.23931623930334381.72997613135.45299145323600378.903983539130.66666666721999378.232107367125.880341886520187377.471682868121.09401709419075377.005020593116.30769230816574375.955450134111.52136752115091375.333093881106.73504273514977375.285252604101.9487179497014171374.94700638797.162393162413687374.74389079292.376068376113155374.52063150287.589743589712558374.27009429182.803418803411600373.86805970578.01709401717510827373.54366227873.230769230810436373.37957509468.44444444449851373.13407380763.65811965819794373.11015316958.87179487188787372.68755522754.0854700855807993372.35434493349.29914529916901371.89607586444.51282051286422371.69505857139.72649572655506371.31064971934.94017094024839371.03073628630.1538461538854144370.73907236425.36752136753433370.44069387720.58119658122936370.23212269815.79487179492615370.09741173511.0085470085346205223.711499111436.9914529910340570226.076285011432.205128205342668225.195837661427.418803419328475231.152076575422.632478632319010235.124161498417.846153846312898237.689125371413.059829065308153239.680413586408.273504274296752244.464960888403.487179487289639247.45000474398.700854701290466247.102945656393.914529915296190244.700809988389.12820512810303871241.477399074384.341880342309886238.953142253379.555555556317436235.784706841374.769230769315487236.602624804369.982905983316696236.095255477365.19658119715325772232.286418418360.41025641331694229.801190007355.623931624345815223.875166636350.837606838354696220.148163339346.051282051354899220.062972294341.26495726520351727221.394134827336.478632479354579220.197263596331.692307692341702225.60122953326.905982906336421227.817455677322.11965812321116234.240356865317.33333333325292261246.349655383312.547008547261874259.101873509307.760683761242407267.271400948302.974358974229488272.692992614298.188034188208939281.316592518293.40170940230184147291.720811158288.615384615162662300.737213121283.829059829147361307.158435668279.042735043140424310.069619304274.256410256134336312.624511329269.4700854735126929315.732935315264.683760684125404316.372917302259.897435897122764317.480820545255.111111111116004320.317724305250.324786325105590324.688066871245.53846153840100813326.692784217240.75213675295021329.123456787235.96581196690950330.831893948231.17948717985036333.313765077226.39316239379391335.68274758221.6068376074572952338.384940377216.82051282166022341.293186391212.03418803459326344.103231889207.24786324852716346.877186601202.46153846246582349.451383197.6752136755042772351.050288817192.88888888938509352.839300759188.10256410334048354.711405444183.31623931630887356.037951714178.5299145328053357.227269059173.7435897445526152358.025043326168.95726495723931358.957108896164.17094017122039359.75110622159.38461538520677360.322683575154.59829059819869360.661769113149.8119658126019026361.015542762145.02564102618757361.128431388140.23931623918308361.316858872135.45299145314458362.932551102130.66666666713685363.256948529125.880341886512942363.568756146121.09401709412323363.828525884116.30769230811033364.369887696111.52136752110183364.726598967106.73504273510628364.539850125101.9487179497010803364.46640956997.162393162410655364.52851929692.376068376110482364.60112053287.589743589710202364.71862542182.803418803410166364.73373319378.0170940171759939364.82899608573.230769230810138364.74548368268.444444444410007364.80045918363.658119658110174364.7303759158.87179487189997364.80465578754.0854700855809465365.02791507749.29914529919028365.21130663644.51282051288806365.30447122739.72649572658450365.45387030134.94017094027941365.66747740330.1538461538857253365.95620370325.36752136756698366.1891151820.58119658126267366.36998877815.79487179495773366.57730097611.00854700850674.832493709436.99145299100669.848607756432.2051282050671.29979314427.4188034190659.277364309422.6324786320650.084285352417.8461538460643.385667333413.0598290650640.145050347408.2735042740631.351488013403.4871794870622.487422746398.7008547010621.868004115393.9145299150629.083643647389.128205128100636.151562747384.3418803420640.432937326379.5555555560648.272192093374.7692307690646.61285519369.9829059830650.39651263365.1965811971523658.655847376360.4102564191665.053988606355.623931624412678.099129636350.8376068381319689.771981421346.0512820512984689.315810654341.264957265205816689.31161405336.47863247910053698.913022499331.69230769216045696.608248024326.90598290624240692.632805818322.1196581235066690.222276944317.3333333332547828676.848541884312.54700854762384661.184300801307.76068376178916641.439282772302.97435897497822639.65069049298.188034188112738626.864479878293.40170940230124414607.001956959288.615384615130658588.241462039283.829059829140789578.591372994279.042735043153951577.139348289274.256410256168560576.206443399269.4700854735179996572.299405825264.683760684194471575.53876383259.897435897212006578.004268207255.111111111225209577.564044532250.324786325228886570.661051938245.53846153840239690571.559964342240.752136752245974569.513700624235.965811966253459570.829335726231.179487179255455564.846658212226.393162393260715562.004718529221.60683760745259980554.992614251216.820512821256481547.06816844212.034188034252222538.374065602207.247863248249467532.056079493202.461538462240268522.707306557197.67521367550238465518.480068159192.888888889238167514.770270935188.102564103231361507.066986112183.316239316223832500.483774681178.52991453220459496.010615336173.74358974455222512495.254807101168.957264957220099491.851781571164.170940171219301489.807196494159.384615385221322489.015297472154.598290598229783491.816530104149.81196581260239336495.17423232145.025641026258360503.721034385140.239316239271151508.251267533135.452991453218063480.320355313130.666666667213461477.04532618125.8803418865207617473.071982276121.094017094196227467.358726687116.307692308174615456.189886946111.521367521160855449.170648443106.735042735165410450.986518645101.94871794970163070449.32802106397.1623931624157379446.53350299692.3760683761149698442.86357350287.5897435897140570438.53183968582.8034188034131785434.04105460778.017094017175119936428.41970462873.2307692308113751425.49593118368.4444444444106989422.16718552963.658119658199294418.89005809558.871794871889097413.76558593454.08547008558078413408.61551449449.299145299168174403.40207434644.512820512860592399.81817521839.726495726552189395.52295184834.940170940243375391.26423892730.15384615388535469387.36307659725.367521367529648384.32347690420.581196581224575381.77739774615.794871794920863379.95019671511.00854700850674.832493709436.99145299100669.848607756432.2051282050671.29979314427.4188034190659.277364309422.6324786320650.084285352417.8461538460643.385667333413.0598290650640.145050347408.2735042740631.351488013403.4871794870622.487422746398.7008547010621.868004115393.9145299150629.083643647389.128205128100636.151562747384.3418803420640.432937326379.5555555560648.272192093374.7692307690646.61285519369.9829059830650.39651263365.196581197157495.5417819728360.4102564139290.4378731679355.623931624135178.1833721802350.837606838390669.6571334703346.051282051784767.8328700617341.264957265201285768.3926969278336.4786324791991363.0378312519331.6923076922910869.9869864813326.9059829064247568.8098392853322.119658125828775.0199726923317.333333333257416392.5760424954312.54700854790724111.130484198307.760683761108375120.062114777302.974358974125886123.556626257298.188034188141559134.226489878293.40170940230148061152.30629576288.615384615152871168.32053355283.829059829159725178.286626815279.042735043171298179.25226521274.256410256183536179.226246271269.4700854735196136180.155374218264.683760684210831175.268429798259.897435897228757169.961405399255.111111111238731171.449520891250.324786325239616179.818806642245.53846153840250036179.455380805240.752136752251759183.593651215235.965811966259593183.722906593231.179487179261832187.747029396226.393162393264864191.212584314221.60683760745264702196.68495488216.820512821264070202.766672229212.034188034258117210.885001108207.247863248253678218.295782689202.461538462245440226.901337199197.67521367550241342231.818916822192.888888889239843236.026011525188.102564103232493242.854724242183.316239316226118248.183151318178.52991453221644252.439346278173.74358974455223440253.281184879168.957264957219833256.65903079164.170940171219659258.320046334159.384615385221271258.786708609154.598290598227123257.009027496149.81196581260232865255.306885241145.025641026250646248.070682353140.239316239261796243.768324758135.452991453210136268.679361319130.666666667201824272.816372748125.8803418865193109277.097327667121.094017094181831282.349796225116.307692308159280292.896279712111.521367521145235299.503831442106.735042735145929298.839089496101.94871794970140266301.06874477397.1623931624133082304.20780396292.3760683761124350308.01748034287.5897435897114441312.41090422682.8034188034104655316.54791565578.01709401717593223321.53599821273.230769230885899324.44256558468.444444444478800327.53168519663.658119658172081330.21121633558.871794871862645334.31969086354.08547008558053214338.72402591549.299145299144086342.9214684344.512820512838481345.45999370239.726495726532219348.3867047734.940170940226867351.05994100530.15384615388521443353.91363117725.367521367516899356.28639062320.581196581213680357.99902438615.794871794911508359.32515099611.00854700850674.832493709436.99145299100669.848607756432.2051282050671.29979314427.4188034190659.277364309422.6324786320650.084285352417.8461538460643.385667333413.0598290650640.145050347408.2735042740631.351488013403.4871794870622.487422746398.7008547010621.868004115393.9145299150629.083643647389.128205128100636.151562747384.3418803420640.432937326379.5555555560648.272192093374.7692307690646.61285519369.9829059830650.39651263365.196581197157658.668437186360.410256415665.094275997355.62393162417678.279163913350.83760683815690.331808287346.05128205131690.581086517341.2649572652034691.766626919336.47863247938703.147814782331.69230769235703.356385961326.90598290645702.824256676322.11965812299705.06356414317.33333333325295697.043855436312.547008547218687.455876118307.760683761247674.660852639302.974358974252680.808456658298.188034188254674.282738691293.40170940230222659.306740532288.615384615307643.202275774283.829059829316637.807542367279.042735043385641.908043348274.256410256416647.118965535269.4700854735463648.030887409264.683760684557657.384276608259.897435897670667.255946302255.111111111830672.423643476250.324786325889667.088501835245.538461538401025672.578498134240.7521367521149673.221417743235.9658119661356677.765080022231.1794871791488672.675439668226.3931623931835672.186535395221.606837607451929664.905428853216.8205128212130655.596943308212.0341880342362645.212868364207.2478632482494637.794113237202.4615384622884624.748552547197.675213675503160619.880492842192.8888888893487616.182865767188.1025641033916605.803407087183.3162393164196596.178078178.529914534619590.466920713173.743589744555032590.745994826168.9572649575709586.614438981164.1709401716347584.502708253159.3846153857288584.953843097154.5982905988139591.662952624149.811965812609344599.535360556145.02564102611002616.761577348140.23931623912809627.418011839135.45299145311504576.660515864130.66666666711918571.627949314125.880341886512927565.625547765121.09401709413642555.432418266116.30769230813298535.049516551111.52136752114015522.556648502106.73504273515751527.012601774101.9487179497017445525.08300362597.162393162418591520.38112940692.376068376119682513.94563840887.589743589720969506.32334802682.803418803421629498.42282285278.01709401717522549488.21500524273.230769230823619483.14466926368.444444444425288477.67859360263.658119658126293471.59393863158.871794871827038462.50283713154.08547008558027039452.86953449949.299145299127070443.3722018144.512820512827750436.89180715739.726495726527244428.85783000234.940170940225905420.33830585730.15384615388524357412.46967486825.367521367522561406.23352252120.581196581221794401.23662709715.794871794920595397.34847423811.00854700850674.832493709436.99145299100669.848607756432.2051282050671.29979314427.4188034190659.277364309422.6324786320650.084285352417.8461538460643.385667333413.0598290650640.145050347408.2735042740631.351488013403.4871794870622.487422746398.7008547010621.868004115393.9145299150629.083643647389.128205128100636.151562747384.3418803420640.432937326379.5555555560648.272192093374.7692307690646.61285519369.9829059830650.39651263365.19658119715695.5082091472360.41025641890.2700090401355.6239316240678.286298138350.837606838868.01458298346.0512820512164.5309826684341.264957265203462.98285575336.4786324794954.6605719554331.6923076928457.7362624365326.9059829069750.9440601665322.1196581236850.404796656317.3333333332540161.2844904373312.54700854741472.8834820061307.76068376155774.3476768606302.97435897465470.4528094359298.18803418863174.5549890584293.4017094023068989.8818232446288.615384615698103.873717955283.829059829858110.89631374279.0427350431031106.932622023274.2564102561120101.733450326269.4700854735126397.3148468224264.683760684161486.1136932364259.897435897188273.1713689852255.111111111213770.3667790704250.324786325251678.2056141771245.53846153840292373.298526062240.752136752313276.6260127347235.965811966374173.2120760362231.179487179425976.0791953385226.393162393493077.9907480935221.60683760745532083.3674361061216.820512821594889.4508320966212.034188034654899.8156026659207.2478632487463108.705267212202.4615384628309120.412950804197.675213675509142126.70072137192.88888888910321131.042107374188.10256410311167140.60029081183.31623931612062148.228456436178.5299145313317153.835537964173.7435897445515238153.117499157168.95726495716706157.392998492164.17094017118236158.484954643159.38461538520336157.393837813154.59829059823407151.871527669149.8119658126027024146.241784484145.02564102632502129.244702226140.23931623937334118.235333406135.45299145334454166.03464379130.66666666738080172.138183475125.880341886541811178.510725426121.09401709444490187.371853071116.30769230845247207.06441356111.52136752146830218.90177219106.73504273553616215.097971055101.9487179497058798217.52948294697.162393162463224221.82596529692.376068376166841227.7822042187.589743589771086234.55258414382.803418803473654241.71870375878.01709401717577334249.95999311173.230769230882062253.95599867268.444444444487314257.82023089463.658119658192207261.26606177758.871794871894603268.32894495354.08547008558094113276.89673003449.299145299192753285.49556997944.512820512893174290.20961434739.726495726591812296.3358156934.940170940287757302.9567965530.15384615388584255309.55637473425.367521367579723315.73797123920.581196581277536319.71928868915.794871794974173323.36823516711.00854700850674.832493709436.99145299100669.848607756432.2051282050671.29979314427.4188034190659.277364309422.6324786320650.084285352417.8461538460643.385667333413.0598290650640.145050347408.2735042740631.351488013403.4871794870622.487422746398.7008547010621.868004115393.9145299150629.083643647389.128205128100636.151562747384.3418803420640.432937326379.5555555560648.272192093374.7692307690646.61285519369.9829059830650.39651263365.196581197150658.671374808360.410256411665.096793959355.6239316245678.28839644350.8376068380690.338103192346.05128205111690.59871225341.2649572652035691.795583481336.478632479137703.221255338331.692307692331703.509981638326.905982906803703.180128627322.119658121580705.852105881317.333333333252361698.158473244312.5470085473632689.071568348307.7606837614866676.806575852302.9743589746849683.788464586298.1880341888754678.063038848293.4017094023010422663.773604972288.61538461512316648.499647986283.82905982914152643.879187869279.04273504316911649.166488233274.25641025619788655.597782629269.470085473522822657.802677947264.68376068427329669.086924276259.89743589731547680.776142814255.11111111135711687.758451208250.32478632538932683.799795415245.5384615384042956691.035578643240.75213675246466693.203543853235.96581196649983699.310021161231.17948717952885695.493630216226.39316239355178696.112629187221.6068376074556549689.446325013216.82051282157632680.676683318212.03418803457770670.447882692207.24786324857427662.940579238202.46153846256348649.605872588197.6752136755055593644.536795589192.88888888955554640.960030687188.10256410353266629.800423473183.31623931651084619.376900458178.5299145349342613.11221121173.7435897445548555613.234332363168.95726495747067608.762431999164.17094017145789606.382118667159.38461538544988606.892005955154.59829059844624613.805490058149.8119658126044238622.021599792145.02564102646267640.795104181140.23931623946203652.183006609135.45299145336964597.000612226130.66666666733866590.841677378125.880341886531701584.3541485121.09401709428770573.231051734116.30769230825174551.19468836111.52136752122702537.965316451106.73504273521934542.827500912101.9487179497020638541.0649275797.162393162419051536.1779831592.376068376117073529.37025344887.589743589715381521.57800063782.803418803413736513.26411004978.01709401717511690502.58375491973.230769230810368497.4076645468.44444444449350492.21478774763.65811965818375486.14272258558.87179487187063476.81367368454.0854700855806006466.73720975449.29914529915044456.84917330844.51282051284030450.22861210839.72649572653420441.72629403734.94017094022612432.30575918630.1538461538852006423.53317986925.36752136751709416.41867847420.58119658121264410.91315474315.79487179491018406.41859272211.00854700850674.832493709436.99145299100669.848607756432.2051282050671.29979314427.4188034190659.277364309422.6324786320650.084285352417.8461538460643.385667333413.0598290650640.145050347408.2735042740631.351488013403.4871794870622.487422746398.7008547010621.868004115393.9145299150629.083643647389.128205128100636.151562747384.3418803420640.432937326379.5555555560648.272192093374.7692307690646.61285519369.9829059830650.39651263365.19658119715495.504012544360.41025641690.2641337956355.6239316241177.6117948251350.8376068382068.0028324911346.0512820516864.4936329341.2649572652017962.893468102336.47863247948054.4385716464331.692307692107757.2490368056326.905982906209450.0245844066322.11965812358148.7475580546317.33333333325515158.9545363438312.547008547704769.7523963628307.760683761959070.0893835993302.9743589741243464.9602951752298.1880341881503967.9789118529293.401709402301725782.3505991521288.6153846151909895.5661222713283.82905982921324101.587408535279.0427350432445396.2379984427274.2564102562781389.5914183036269.47008547353231683.2230729562264.6837606843728169.7910051122259.8974358974359754.0856373178255.1111111114964748.635089089250.3247863255355954.673161765245.538461538405888847.3589020576240.7521367526237549.1353241898235.9658119666721943.4329797693231.1794871797095644.5144444125226.3931623937354745.0570652055221.606837607457490449.70060664216.8205128217599455.0630262017212.0341880347622465.0794787059207.2478632487497974.1076311578202.4615384627206486.6835919502197.675213675507033093.3494764641192.8888888896894497.7777321547188.10256410366527107.995201952183.31623931663073116.697278336178.5299145360899122.690027697173.7435897445560968121.136864855168.95726495758756125.724591467164.17094017157647126.639870624159.38461538556301125.232329912154.59829059857246118.024663926149.8119658126057068110.951708902145.0256410265902790.8336128491140.2393162395918777.7292997145135.45299145347549131.621238615130.66666666744425137.514108821125.880341886540976143.768306561121.09401709438077152.721759476116.30769230832904174.267539936111.52136752129431186.89805657106.73504273529491180.221260888101.9487179497028020181.09541333397.162393162426086184.34610216892.376068376124069189.6308845787.589743589721742195.59635601182.803418803419498202.62650568278.01709401717517400210.2038924173.230769230815738212.91321943268.444444444414451215.11349848763.658119658113107217.06995489658.871794871811568223.77318917854.08547008558010171233.13287328349.29914529918530242.99111384644.51282051287273248.05599424139.72649572656488255.0832062934.94017094025372263.87425066230.1538461538854499272.30984274325.36752136753691280.73242535420.58119658123259285.81283318115.79487179492657291.12573282511.0085470085England population by age in 2010 (source: ons.gov.uk)Males singleFemales singleMales marriedFemales marriedMales widowedFemales widowedMales divorcedFemales divorced

Standard ML plots

ROC Curve


In [26]:
from sklearn.datasets import make_classification
from sklearn.linear_model import LogisticRegression
from sklearn import metrics
import pandas as pd

X, y = make_classification(n_samples=10000, n_features=10, n_classes=2, n_informative=5)
Xtrain = X[:9000]
Xtest = X[9000:]
ytrain = y[:9000]
ytest = y[9000:]

clf = LogisticRegression()
clf.fit(Xtrain, ytrain)

preds = clf.predict_proba(Xtest)[:,1]
fpr, tpr, _ = metrics.roc_curve(ytest, preds)

df = pd.DataFrame(dict(fpr=fpr, tpr=tpr))
ggplot(df, aes(x='fpr', y='tpr')) +\
    geom_line() +\
    geom_abline(linetype='dashed')


Out[26]:
<ggplot: (27985843)>

In [27]:
from scipy import interp
from itertools import cycle

from sklearn import svm, datasets
from sklearn.metrics import roc_curve, auc
from sklearn.model_selection import StratifiedKFold

In [34]:
iris = datasets.load_iris()
X = iris.data
y = iris.target
X, y = X[y != 2], y[y != 2]
n_samples, n_features = X.shape

# Add noisy features
random_state = np.random.RandomState(0)
X = np.c_[X, random_state.randn(n_samples, 200 * n_features)]

# Classification and ROC analysis
# Run classifier with cross-validation and plot ROC curves
cv = StratifiedKFold(n_splits=6)
classifier = svm.SVC(kernel='linear', probability=True,
                     random_state=random_state)

mean_tpr = 0.0
mean_fpr = np.linspace(0, 1, 100)

colors = cycle(['cyan', 'indigo', 'seagreen', 'yellow', 'blue', 'darkorange'])
lw = 2

i = 0
sns.set_style("whitegrid")
for (train, test), color in zip(cv.split(X, y), colors):
    probas_ = classifier.fit(X[train], y[train]).predict_proba(X[test])
    # Compute ROC curve and area the curve
    fpr, tpr, thresholds = roc_curve(y[test], probas_[:, 1])
    mean_tpr += interp(mean_fpr, fpr, tpr)
    mean_tpr[0] = 0.0
    roc_auc = auc(fpr, tpr)
    plt.plot(fpr, tpr, lw=lw, color=color,
             label='ROC fold %d (area = %0.2f)' % (i, roc_auc))

    i += 1
plt.plot([0, 1], [0, 1], linestyle='--', lw=lw, color='k',
         label='Luck')

mean_tpr /= cv.get_n_splits(X, y)
mean_tpr[-1] = 1.0
mean_auc = auc(mean_fpr, mean_tpr)

plt.plot(mean_fpr, mean_tpr, color='g', linestyle='--',
         label='Mean ROC (area = %0.2f)' % mean_auc, lw=lw)

plt.xlim([-0.05, 1.05])
plt.ylim([-0.05, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic example')
plt.legend(loc="lower right")
plt.show()


Learning Curve


In [35]:
from sklearn import cross_validation
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from sklearn.datasets import load_digits
from sklearn.model_selection import learning_curve


def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,
                        n_jobs=1, train_sizes=np.linspace(.1, 1.0, 5)):
    """
    Generate a simple plot of the test and traning learning curve.

    Parameters
    ----------
    estimator : object type that implements the "fit" and "predict" methods
        An object of that type which is cloned for each validation.

    title : string
        Title for the chart.

    X : array-like, shape (n_samples, n_features)
        Training vector, where n_samples is the number of samples and
        n_features is the number of features.

    y : array-like, shape (n_samples) or (n_samples, n_features), optional
        Target relative to X for classification or regression;
        None for unsupervised learning.

    ylim : tuple, shape (ymin, ymax), optional
        Defines minimum and maximum yvalues plotted.

    cv : integer, cross-validation generator, optional
        If an integer is passed, it is the number of folds (defaults to 3).
        Specific cross-validation objects can be passed, see
        sklearn.cross_validation module for the list of possible objects

    n_jobs : integer, optional
        Number of jobs to run in parallel (default 1).
    """
    plt.figure()
    plt.title(title)
    if ylim is not None:
        plt.ylim(*ylim)
    plt.xlabel("Training examples")
    plt.ylabel("Score")
    train_sizes, train_scores, test_scores = learning_curve(
        estimator, X, y, cv=cv, n_jobs=n_jobs, train_sizes=train_sizes)
    train_scores_mean = np.mean(train_scores, axis=1)
    train_scores_std = np.std(train_scores, axis=1)
    test_scores_mean = np.mean(test_scores, axis=1)
    test_scores_std = np.std(test_scores, axis=1)
    plt.grid()

    plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
                     train_scores_mean + train_scores_std, alpha=0.1,
                     color="r")
    plt.fill_between(train_sizes, test_scores_mean - test_scores_std,
                     test_scores_mean + test_scores_std, alpha=0.1, color="g")
    plt.plot(train_sizes, train_scores_mean, 'o-', color="r",
             label="Training score")
    plt.plot(train_sizes, test_scores_mean, 'o-', color="g",
             label="Cross-validation score")

    plt.legend(loc="best")
    return plt


digits = load_digits()
X, y = digits.data, digits.target


title = "Learning Curves (Naive Bayes)"
# Cross validation with 100 iterations to get smoother mean test and train
# score curves, each time with 20% data randomly selected as a validation set.
cv = cross_validation.ShuffleSplit(digits.data.shape[0], n_iter=100,
                                   test_size=0.2, random_state=0)

estimator = GaussianNB()
plot_learning_curve(estimator, title, X, y, ylim=(0.7, 1.01), cv=cv, n_jobs=4)

title = "Learning Curves (SVM, RBF kernel, $\gamma=0.001$)"
# SVC is more expensive so we do a lower number of CV iterations:
cv = cross_validation.ShuffleSplit(digits.data.shape[0], n_iter=10,
                                   test_size=0.2, random_state=0)
estimator = SVC(gamma=0.001)
plot_learning_curve(estimator, title, X, y, (0.7, 1.01), cv=cv, n_jobs=4)

plt.show()


C:\Users\gerrit\Anaconda\lib\site-packages\sklearn\cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)