In [0]:
import warnings
warnings.filterwarnings('ignore')
In [3]:
%matplotlib inline
%pylab inline
import matplotlib.pyplot as plt
In [4]:
import pandas as pd
print(pd.__version__)
In [5]:
import numpy as np
print(np.__version__)
In [6]:
import seaborn as sns
print(sns.__version__)
In [7]:
!curl -O https://raw.githubusercontent.com/DJCordhose/deep-learning-crash-course-notebooks/master/data/insurance-customers-1500.csv
In [0]:
df = pd.read_csv('./insurance-customers-1500.csv', sep=';')
In [9]:
df.head()
Out[9]:
In [10]:
df.describe()
Out[10]:
In [11]:
sample_df = df.sample(n=100, random_state=42)
sns.pairplot(sample_df, hue="group", palette={0: '#AA4444', 1: '#006000', 2: '#EEEE44'})
Out[11]:
In [17]:
cols_for_correlation = [
'speed',
'age',
'miles'
]
corrmat = df[cols_for_correlation].corr()
sns.heatmap(corrmat, annot=True)
figure = plt.gcf()
figure.set_size_inches(10, 10)
In [0]:
# ignore this, it is just technical code to plot decision boundaries
# Adapted from:
# http://scikit-learn.org/stable/auto_examples/neighbors/plot_classification.html
# http://jponttuset.cat/xkcd-deep-learning/
from matplotlib.colors import ListedColormap
cmap_print = ListedColormap(['#AA8888', '#004000', '#FFFFDD'])
cmap_bold = ListedColormap(['#AA4444', '#006000', '#EEEE44'])
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#FFFFDD'])
font_size=25
title_font_size=40
def meshGrid(x_data, y_data):
h = 1 # step size in the mesh
x_min, x_max = x_data.min() - 1, x_data.max() + 1
y_min, y_max = y_data.min() - 1, y_data.max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
return (xx,yy)
def plot_prediction(clf, x_data, y_data, x_label, y_label, ground_truth, title="",
mesh=True, fname=None, print=False):
xx,yy = meshGrid(x_data, y_data)
fig, ax = plt.subplots(figsize=(15, 8))
if clf and mesh:
Z = clf.predict(np.c_[yy.ravel(), xx.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
ax.pcolormesh(xx, yy, Z, cmap=cmap_light)
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
if print:
ax.scatter(x_data, y_data, c=ground_truth, cmap=cmap_print, s=200, marker='o', edgecolors='k')
else:
ax.scatter(x_data, y_data, c=ground_truth, cmap=cmap_bold, s=100, marker='o', edgecolors='k', alpha=0.7)
ax.set_xlabel(x_label, fontsize=font_size)
ax.set_ylabel(y_label, fontsize=font_size)
ax.set_title(title, fontsize=title_font_size)
if fname:
fig.savefig('figures/'+fname)
In [0]:
sample_df = df.sample(n=1000, random_state=42)
y = sample_df['group']
sample_df.drop('group', axis='columns', inplace=True)
X = sample_df.as_matrix()
In [0]:
X[:10]
Out[0]:
In [0]:
plot_prediction(None, X[:, 1], X[:, 0],
'Age', 'Max Speed', y, mesh=False,
title="Max Speed vs Age")
In [0]:
# 0: red
# 1: green
# 2: yellow
class ClassifierBase:
def predict(self, X):
return np.array([ self.predict_single(x) for x in X])
def score(self, X, y):
n = len(y)
correct = 0
predictions = self.predict(X)
for prediction, ground_truth in zip(predictions, y):
if prediction == ground_truth:
correct = correct + 1
return correct / n
In [0]:
from random import randrange
class RandomClassifier(ClassifierBase):
def predict_single(self, x):
return randrange(3)
In [0]:
random_clf = RandomClassifier()
In [0]:
plot_prediction(random_clf, X[:, 1], X[:, 0],
'Age', 'Max Speed', y,
title="Max Speed vs Age (Random)")
In [0]:
random_clf.score(X, y)
Out[0]:
In [0]:
# 0: red
# 1: green
# 2: yellow
class BaseLineClassifier(ClassifierBase):
def predict_single(self, x):
try:
speed, age, miles_per_year = x
except:
speed, age = x
miles_per_year = 0
if age < 25:
if speed > 140:
return 0
else:
return 2
if age > 75:
return 0
if miles_per_year > 30:
return 0
if miles_per_year > 20:
return 2
return 1
In [0]:
base_clf = BaseLineClassifier()
In [0]:
plot_prediction(base_clf, X[:, 1], X[:, 0],
'Age', 'Max Speed', y,
title="Max Speed vs Age with Classification")
In [0]:
base_clf.score(X, y)
Out[0]:
In [0]: