In [0]:
    
!pip install -q tf-nightly-gpu-2.0-preview
    
In [2]:
    
import tensorflow as tf
print(tf.__version__)
    
    
In [0]:
    
import matplotlib.pyplot as plt
import pandas as pd
import tensorflow as tf
import numpy as np
from tensorflow import keras
    
In [7]:
    
!curl -O https://raw.githubusercontent.com/DJCordhose/deep-learning-crash-course-notebooks/master/data/insurance-customers-1500.csv
    
    
In [6]:
    
df = pd.read_csv('./insurance-customers-1500.csv', sep=';')
y=df['group']
df.drop('group', axis='columns', inplace=True)
X = df.as_matrix()
    
    
In [0]:
    
from sklearn.model_selection import train_test_split
    
In [0]:
    
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42, stratify=y)
    
In [9]:
    
from tensorflow.keras.layers import Dense, Dropout, BatchNormalization, Activation
num_categories = 3
dropout = 0.6
model = tf.keras.Sequential()
model.add(Dense(100, name='hidden1', input_dim=3))
model.add(Activation('relu'))
model.add(BatchNormalization())
model.add(Dropout(dropout))
model.add(Dense(100, name='hidden2'))
model.add(Activation('relu'))
model.add(BatchNormalization())
model.add(Dropout(dropout))
model.add(Dense(num_categories, name='softmax', activation='softmax'))
model.compile(loss='sparse_categorical_crossentropy',
             optimizer='adam',
             metrics=['accuracy'])
model.summary()
    
    
    
In [10]:
    
%%time 
BATCH_SIZE=1000
EPOCHS = 2000
history = model.fit(X_train, y_train, epochs=EPOCHS, batch_size=BATCH_SIZE, validation_split=0.2, verbose=0)
    
    
In [11]:
    
train_loss, train_accuracy = model.evaluate(X_train, y_train, batch_size=BATCH_SIZE)
train_loss, train_accuracy
    
    
    Out[11]:
In [12]:
    
test_loss, test_accuracy = model.evaluate(X_test, y_test, batch_size=BATCH_SIZE)
test_loss, test_accuracy
    
    
    Out[12]:
In [15]:
    
# plt.yscale('log')
plt.ylabel("accuracy")
plt.xlabel("epochs")
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.legend(["Accuracy", "Valdation Accuracy"])
    
    Out[15]:
    
In [0]:
    
model.save('insurance.h5')
    
In [17]:
    
# the model has a decent size as we only have a little more than 10.000 parameters
!ls -l insurance.h5