In [1]:
import warnings
warnings.filterwarnings('ignore')
%matplotlib inline
%pylab inline
import matplotlib.pyplot as plt
import pandas as pd
import tensorflow as tf
from tensorflow import keras
In [2]:
!curl -O https://raw.githubusercontent.com/DJCordhose/deep-learning-crash-course-notebooks/master/data/insurance-customers-1500.csv
In [0]:
df = pd.read_csv('./insurance-customers-1500.csv', sep=';')
In [0]:
# sns.pairplot?
In [31]:
import seaborn as sns
sample_df = df.sample(n=100, random_state=42)
sns.pairplot(sample_df, hue="group", palette={0: '#AA4444', 1: '#006000', 2: '#EEEE44'}, diag_kind='kde')
Out[31]:
In [0]:
y = df['group']
X = df.drop('group', axis='columns')
In [33]:
from tensorflow.keras.layers import Dense
model = keras.Sequential()
model.add(Dense(units=50, activation='relu', input_dim=3))
model.add(Dense(units=3, activation='softmax'))
model.summary()
In [34]:
%%time
BATCH_SIZE=1000
EPOCHS = 2000
model.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(X, y, epochs=EPOCHS, batch_size=BATCH_SIZE, verbose=0)
In [37]:
loss, accuracy = model.evaluate(X, y, batch_size=BATCH_SIZE)
loss, accuracy
Out[37]:
In [38]:
np.argmax(model.predict(np.array([[100, 47, 10]])))
Out[38]:
In [39]:
plt.plot(history.history['acc'])
plt.plot(history.history['loss'])
plt.yscale('log')
plt.xlabel('epoch')
plt.legend(['Accuracy', 'Loss'])
Out[39]: