In [1]:
import warnings
warnings.filterwarnings('ignore')
%matplotlib inline
%pylab inline
import matplotlib.pyplot as plt
plt.xkcd()
Out[1]:
In [22]:
import sklearn
sklearn.__version__
Out[22]:
In [0]:
import pandas as pd
df = pd.read_csv('https://raw.githubusercontent.com/DJCordhose/deep-learning-crash-course-notebooks/master/data/insurance-customers-1500.csv', sep=';')
In [6]:
df.head()
Out[6]:
In [7]:
df.describe()
Out[7]:
In [0]:
y=df['group']
In [0]:
df.drop('group', axis='columns', inplace=True)
In [0]:
X = df.as_matrix()
In [0]:
# ignore this, it is just technical code to plot decision boundaries
# Adapted from:
# http://scikit-learn.org/stable/auto_examples/neighbors/plot_classification.html
# http://jponttuset.cat/xkcd-deep-learning/
from matplotlib.colors import ListedColormap
cmap_print = ListedColormap(['#AA8888', '#004000', '#FFFFDD'])
cmap_bold = ListedColormap(['#AA4444', '#006000', '#EEEE44'])
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#FFFFDD'])
font_size=25
title_font_size=40
def meshGrid(x_data, y_data):
h = 1 # step size in the mesh
# x_min, x_max = 0, 500
# y_min, y_max = 0, 500
x_min, x_max = x_data.min() - 1, x_data.max() + 1
y_min, y_max = y_data.min() - 1, y_data.max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
return (xx,yy)
def plotPrediction(clf, x_data, y_data, x_label, y_label, ground_truth, title="",
size=(15, 8)):
xx,yy = meshGrid(x_data, y_data)
fig, ax = plt.subplots(figsize=size)
if clf:
Z = clf.predict(np.c_[yy.ravel(), xx.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
ax.pcolormesh(xx, yy, Z, cmap=cmap_light)
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
ax.scatter(x_data, y_data, c=ground_truth, cmap=cmap_bold, s=100, marker='o', edgecolors='k')
ax.set_xlabel(x_label, fontsize=font_size)
ax.set_ylabel(y_label, fontsize=font_size)
ax.set_title(title, fontsize=title_font_size)
In [69]:
plotPrediction(None, X[:, 1], X[:, 0],
'Age', 'Max Speed', y,
title="All Data")
In [18]:
X[:, :2]
Out[18]:
https://scikit-learn.org/stable/auto_examples/classification/plot_classifier_comparison.html
https://scikit-learn.org/stable/supervised_learning.html
https://scikit-learn.org/stable/modules/naive_bayes.html#gaussian-naive-bayes
https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.GaussianNB.html
https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.MultinomialNB.html
In [72]:
%%time
from sklearn.naive_bayes import GaussianNB, MultinomialNB, ComplementNB, BernoulliNB
clf = GaussianNB()
clf.fit(X[:, :2], y)
In [21]:
input = [[100.0, 48.0]]
clf.predict_proba(input)
Out[21]:
In [23]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42, stratify=y)
X_train.shape, y_train.shape, X_test.shape, y_test.shape
Out[23]:
In [0]:
X_train_2_dim = X_train[:, :2]
X_test_2_dim = X_test[:, :2]
In [25]:
plotPrediction(None, X_train_2_dim[:, 1], X_train_2_dim[:, 0],
'Age', 'Max Speed', y_train, title="Train Data")
In [26]:
plotPrediction(None, X_test_2_dim[:, 1], X_test_2_dim[:, 0],
'Age', 'Max Speed', y_test, title="Test Data")
In [78]:
# clf = GaussianNB(priors=[0.33, 0.33, 0.34], var_smoothing=1e-03)
clf = GaussianNB()
clf.fit(X_train_2_dim, y_train)
Out[78]:
In [79]:
plotPrediction(clf, X_train_2_dim[:, 1], X_train_2_dim[:, 0],
'Age', 'Max Speed', y_train,
title="Train Data, Decision Tree")
In [66]:
clf.score(X_train_2_dim, y_train)
Out[66]:
In [67]:
plotPrediction(clf, X_test_2_dim[:, 1], X_test_2_dim[:, 0],
'Age', 'Max Speed', y_test,
title="Test Data, Decision Tree")
In [47]:
clf.score(X_test_2_dim, y_test)
Out[47]:
In [0]: