In [15]:
import pandas as pd
import numpy as np
np.random.seed(1)

from sklearn.model_selection import StratifiedShuffleSplit, ShuffleSplit
from sklearn.metrics import accuracy_score, precision_score, recall_score
from keras import backend as K
from keras.models import Sequential, load_model, optimizers
from keras.layers import Dense, Activation, Dropout
from keras.callbacks import EarlyStopping
from __future__ import division
from imblearn.over_sampling import SMOTE

In [2]:
data = pd.read_csv("data/creditcard.csv")

In [3]:
X, y = data[data.columns[1:29]].values, data.Class.values

In [4]:
sss = StratifiedShuffleSplit(n_splits=1, test_size=0.5, random_state=0)

In [5]:
for train_index, test_index in sss.split(X, y):
    X_train, X_test = X[train_index], X[test_index]
    y_train, y_test = y[train_index], y[test_index]

In [6]:
def network_1(X_train):
    model = Sequential()
    model.add(Dense(1024, activation="sigmoid", input_dim=X_train.shape[1]))
    model.add(Dense(512, activation="sigmoid"))
    model.add(Dense(256, activation="sigmoid"))
    model.add(Dense(128, activation="sigmoid"))
    model.add(Dense(64, activation="sigmoid"))
    model.add(Dense(32, activation="sigmoid"))
    model.add(Dense(16, activation="sigmoid"))
    model.add(Dropout(0.5))
    model.add(Dense(1, activation="sigmoid"))
    model.compile(optimizer="rmsprop",
                  loss="binary_crossentropy",
                  metrics=["accuracy"])
    early_stopping = EarlyStopping(monitor="loss", patience=4)
    model.fit(X_train, y_train, epochs=20, batch_size=32, callbacks=[early_stopping], verbose=1)
    return model

In [7]:
def network_2(X_train):
    model = Sequential()
    model.add(Dense(256, activation="sigmoid", input_dim=X_train.shape[1]))
    model.add(Dense(128, activation="sigmoid"))
    model.add(Dense(64, activation="sigmoid"))
    model.add(Dense(32, activation="sigmoid"))
    model.add(Dense(16, activation="sigmoid"))
    model.add(Dense(1, activation="sigmoid"))
    model.compile(optimizer="rmsprop",
                  loss="binary_crossentropy",
                  metrics=["accuracy"])
    early_stopping = EarlyStopping(monitor="loss", patience=4)
    model.fit(X_train, y_train, epochs=20, batch_size=32, callbacks=[early_stopping], verbose=1)
    return model

In [26]:
def evaluation(y_true, y_pred):
    acc = accuracy_score(y_true, y_pred)
    prec = precision_score(y_true, y_pred)
    rec = recall_score(y_true, y_pred)
    return acc, prec, rec

In [9]:
model_1 = network_1(X_train)


Epoch 1/20
142403/142403 [==============================] - 28s - loss: 0.0275 - acc: 0.9960    
Epoch 2/20
142403/142403 [==============================] - 31s - loss: 0.0152 - acc: 0.9983    
Epoch 3/20
142403/142403 [==============================] - 32s - loss: 0.0095 - acc: 0.9983    
Epoch 4/20
142403/142403 [==============================] - 30s - loss: 0.0066 - acc: 0.9983    
Epoch 5/20
142403/142403 [==============================] - 30s - loss: 0.0062 - acc: 0.9993    
Epoch 6/20
142403/142403 [==============================] - 32s - loss: 0.0059 - acc: 0.9994    
Epoch 7/20
142403/142403 [==============================] - 30s - loss: 0.0059 - acc: 0.9994    
Epoch 8/20
142403/142403 [==============================] - 31s - loss: 0.0144 - acc: 0.9989    
Epoch 9/20
142403/142403 [==============================] - 31s - loss: 0.0152 - acc: 0.9989    
Epoch 10/20
142403/142403 [==============================] - 30s - loss: 0.0181 - acc: 0.9987    
Epoch 11/20
142403/142403 [==============================] - 30s - loss: 0.0279 - acc: 0.9983    
Epoch 12/20
142403/142403 [==============================] - 33s - loss: 0.0279 - acc: 0.9983    

In [10]:
y_test_pred = model_1.predict_classes(X_test)
network_1 = evaluation(y_test, y_test_pred)
print(network_1)


141888/142404 [============================>.] - ETA: 0s(0.99827252043481929, 0.0, 0.0)
/Users/datitran/anaconda/envs/fraud-detection/lib/python2.7/site-packages/sklearn/metrics/classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 due to no predicted samples.
  'precision', 'predicted', average, warn_for)

In [11]:
model_2 = network_2(X_train)


Epoch 1/20
142403/142403 [==============================] - 9s - loss: 0.0199 - acc: 0.9966     
Epoch 2/20
142403/142403 [==============================] - 9s - loss: 0.0045 - acc: 0.9991     
Epoch 3/20
142403/142403 [==============================] - 9s - loss: 0.0057 - acc: 0.9992     
Epoch 4/20
142403/142403 [==============================] - 9s - loss: 0.0060 - acc: 0.9992     
Epoch 5/20
142403/142403 [==============================] - 9s - loss: 0.0068 - acc: 0.9990     
Epoch 6/20
142403/142403 [==============================] - 9s - loss: 0.0064 - acc: 0.9994     
Epoch 7/20
142403/142403 [==============================] - 10s - loss: 0.0108 - acc: 0.9975    

In [12]:
y_test_pred = model_2.predict_classes(X_test)
network_2 = evaluation(y_test, y_test_pred)
print(network_2)


142400/142404 [============================>.] - ETA: 0s(0.99934692845706585, 0.83700440528634357, 0.77235772357723576)

Balanced dataset

Undersample


In [108]:
fraudulent = data[data.Class == 1]
normal = data[data.Class == 0]

In [109]:
normal_sample = normal.sample(fraudulent.Class.count(), random_state=0)

In [110]:
data_undersample = pd.concat([fraudulent, normal_sample])

In [111]:
X, y = data_undersample[data_undersample.columns[1:29]].values, data_undersample.Class.values

In [112]:
sss = StratifiedShuffleSplit(n_splits=1, test_size=0.5, random_state=0)

In [113]:
for train_index, test_index in sss.split(X, y):
    X_train, X_test = X[train_index], X[test_index]
    y_train, y_test = y[train_index], y[test_index]

In [114]:
def network_3(X_train):
    model = Sequential()
    model.add(Dense(256, activation="sigmoid", input_dim=X_train.shape[1]))
    model.add(Dense(128, activation="sigmoid"))
    model.add(Dense(64, activation="sigmoid"))
    model.add(Dense(32, activation="sigmoid"))
    model.add(Dense(1, activation="sigmoid"))
    model.compile(optimizer="rmsprop",
                  loss="binary_crossentropy",
                  metrics=["accuracy"])
    early_stopping = EarlyStopping(monitor="loss", patience=10)
    model.fit(X_train, y_train, epochs=100, batch_size=32, callbacks=[early_stopping], verbose=1)
    return model

In [115]:
model_3 = network_3(X_train)


Epoch 1/100
492/492 [==============================] - 0s - loss: 0.6322 - acc: 0.7500     
Epoch 2/100
492/492 [==============================] - 0s - loss: 0.4369 - acc: 0.9126     
Epoch 3/100
492/492 [==============================] - 0s - loss: 0.2955 - acc: 0.9167     
Epoch 4/100
492/492 [==============================] - 0s - loss: 0.2290 - acc: 0.9350     
Epoch 5/100
492/492 [==============================] - 0s - loss: 0.2006 - acc: 0.9390     
Epoch 6/100
492/492 [==============================] - 0s - loss: 0.1842 - acc: 0.9350     
Epoch 7/100
492/492 [==============================] - 0s - loss: 0.1744 - acc: 0.9431     
Epoch 8/100
492/492 [==============================] - 0s - loss: 0.1699 - acc: 0.9431     
Epoch 9/100
492/492 [==============================] - 0s - loss: 0.1594 - acc: 0.9431     
Epoch 10/100
492/492 [==============================] - 0s - loss: 0.1599 - acc: 0.9431     
Epoch 11/100
492/492 [==============================] - 0s - loss: 0.1614 - acc: 0.9431     
Epoch 12/100
492/492 [==============================] - 0s - loss: 0.1570 - acc: 0.9451     
Epoch 13/100
492/492 [==============================] - 0s - loss: 0.1524 - acc: 0.9492     
Epoch 14/100
492/492 [==============================] - 0s - loss: 0.1440 - acc: 0.9533     
Epoch 15/100
492/492 [==============================] - 0s - loss: 0.1481 - acc: 0.9472     
Epoch 16/100
492/492 [==============================] - 0s - loss: 0.1491 - acc: 0.9512     
Epoch 17/100
492/492 [==============================] - 0s - loss: 0.1443 - acc: 0.9492     
Epoch 18/100
492/492 [==============================] - 0s - loss: 0.1472 - acc: 0.9492     
Epoch 19/100
492/492 [==============================] - 0s - loss: 0.1459 - acc: 0.9492     
Epoch 20/100
492/492 [==============================] - 0s - loss: 0.1367 - acc: 0.9492     
Epoch 21/100
492/492 [==============================] - 0s - loss: 0.1484 - acc: 0.9431     
Epoch 22/100
492/492 [==============================] - 0s - loss: 0.1331 - acc: 0.9512     
Epoch 23/100
492/492 [==============================] - 0s - loss: 0.1376 - acc: 0.9472     
Epoch 24/100
492/492 [==============================] - 0s - loss: 0.1333 - acc: 0.9492     
Epoch 25/100
492/492 [==============================] - 0s - loss: 0.1368 - acc: 0.9512     
Epoch 26/100
492/492 [==============================] - 0s - loss: 0.1392 - acc: 0.9431     
Epoch 27/100
492/492 [==============================] - 0s - loss: 0.1304 - acc: 0.9512     
Epoch 28/100
492/492 [==============================] - 0s - loss: 0.1297 - acc: 0.9593     
Epoch 29/100
492/492 [==============================] - 0s - loss: 0.1400 - acc: 0.9492     
Epoch 30/100
492/492 [==============================] - 0s - loss: 0.1351 - acc: 0.9492     
Epoch 31/100
492/492 [==============================] - 0s - loss: 0.1237 - acc: 0.9533     
Epoch 32/100
492/492 [==============================] - 0s - loss: 0.1273 - acc: 0.9472     
Epoch 33/100
492/492 [==============================] - 0s - loss: 0.1341 - acc: 0.9492     
Epoch 34/100
492/492 [==============================] - 0s - loss: 0.1244 - acc: 0.9593     
Epoch 35/100
492/492 [==============================] - 0s - loss: 0.1255 - acc: 0.9512     
Epoch 36/100
492/492 [==============================] - 0s - loss: 0.1276 - acc: 0.9533     
Epoch 37/100
492/492 [==============================] - 0s - loss: 0.1229 - acc: 0.9533     
Epoch 38/100
492/492 [==============================] - ETA: 0s - loss: 0.3396 - acc: 0.875 - 0s - loss: 0.1306 - acc: 0.9451     
Epoch 39/100
492/492 [==============================] - ETA: 0s - loss: 0.2528 - acc: 0.875 - 0s - loss: 0.1267 - acc: 0.9512     
Epoch 40/100
492/492 [==============================] - 0s - loss: 0.1232 - acc: 0.9553     
Epoch 41/100
492/492 [==============================] - 0s - loss: 0.1213 - acc: 0.9533     
Epoch 42/100
492/492 [==============================] - 0s - loss: 0.1220 - acc: 0.9614     
Epoch 43/100
492/492 [==============================] - 0s - loss: 0.1232 - acc: 0.9533     
Epoch 44/100
492/492 [==============================] - 0s - loss: 0.1281 - acc: 0.9472     
Epoch 45/100
492/492 [==============================] - 0s - loss: 0.1203 - acc: 0.9512     
Epoch 46/100
492/492 [==============================] - 0s - loss: 0.1208 - acc: 0.9533     
Epoch 47/100
492/492 [==============================] - 0s - loss: 0.1227 - acc: 0.9512     
Epoch 48/100
492/492 [==============================] - 0s - loss: 0.1121 - acc: 0.9593     
Epoch 49/100
492/492 [==============================] - 0s - loss: 0.1236 - acc: 0.9512     
Epoch 50/100
492/492 [==============================] - 0s - loss: 0.1101 - acc: 0.9573     
Epoch 51/100
492/492 [==============================] - 0s - loss: 0.1194 - acc: 0.9533     
Epoch 52/100
492/492 [==============================] - 0s - loss: 0.1111 - acc: 0.9593     
Epoch 53/100
492/492 [==============================] - 0s - loss: 0.1154 - acc: 0.9553     
Epoch 54/100
492/492 [==============================] - 0s - loss: 0.1168 - acc: 0.9553     
Epoch 55/100
492/492 [==============================] - 0s - loss: 0.1175 - acc: 0.9573     
Epoch 56/100
492/492 [==============================] - 0s - loss: 0.1154 - acc: 0.9512     
Epoch 57/100
492/492 [==============================] - 0s - loss: 0.1115 - acc: 0.9593     
Epoch 58/100
492/492 [==============================] - 0s - loss: 0.1147 - acc: 0.9512     
Epoch 59/100
492/492 [==============================] - 0s - loss: 0.1101 - acc: 0.9553     
Epoch 60/100
492/492 [==============================] - 0s - loss: 0.1164 - acc: 0.9593     
Epoch 61/100
492/492 [==============================] - 0s - loss: 0.1119 - acc: 0.9472     

In [116]:
y_test_pred = model_3.predict_classes(X_test)
network_3 = evaluation(y_test, y_test_pred)
print(network_3)


 32/492 [>.............................] - ETA: 1s(0.91463414634146345, 0.90800000000000003, 0.92276422764227639)

In [117]:
# all data
y_test_pred_all = model_3.predict_classes(X)
network_3_all = evaluation(y, y_test_pred_all)
print(network_3_all)


 32/984 [..............................] - ETA: 0s(0.93292682926829273, 0.92600000000000005, 0.94105691056910568)

In [324]:
model_3.save("models/balance_model.h5")

Oversample


In [18]:
fraudulent = data[data.Class == 1]
normal = data[data.Class == 0]

In [19]:
fraudulent.Class.count(), normal.Class.count()


Out[19]:
(492, 284315)

In [25]:
normal.Class.count() / fraudulent.Class.count()


Out[25]:
577.8760162601626

In [28]:
fraudulent_oversample = pd.concat([fraudulent]*578, ignore_index=True)

In [29]:
fraudulent_oversample.Class.count()


Out[29]:
284376

In [30]:
data_oversample = pd.concat([fraudulent_oversample, normal])

In [33]:
data_oversample.Class.value_counts(normalize=True)


Out[33]:
1    0.500054
0    0.499946
Name: Class, dtype: float64

In [34]:
X, y = data_oversample[data_oversample.columns[1:29]].values, data_oversample.Class.values

In [35]:
sss = StratifiedShuffleSplit(n_splits=1, test_size=0.5, random_state=0)

In [36]:
for train_index, test_index in sss.split(X, y):
    X_train, X_test = X[train_index], X[test_index]
    y_train, y_test = y[train_index], y[test_index]

In [39]:
def network_4(X_train):
    model = Sequential()
    model.add(Dense(64, activation="sigmoid", input_dim=X_train.shape[1]))
    model.add(Dense(32, activation="sigmoid"))
    model.add(Dense(1, activation="sigmoid"))
    model.compile(optimizer="rmsprop",
                  loss="binary_crossentropy",
                  metrics=["accuracy"])
    early_stopping = EarlyStopping(monitor="loss", patience=4)
    model.fit(X_train, y_train, epochs=20, batch_size=32, callbacks=[early_stopping], verbose=1)
    return model

In [40]:
model_4 = network_4(X_train)


Epoch 1/20
284345/284345 [==============================] - 15s - loss: 0.1015 - acc: 0.9612    
Epoch 2/20
284345/284345 [==============================] - 15s - loss: 0.0301 - acc: 0.9907    
Epoch 3/20
284345/284345 [==============================] - 13s - loss: 0.0142 - acc: 0.9965    
Epoch 4/20
284345/284345 [==============================] - 13s - loss: 0.0126 - acc: 0.9971    
Epoch 5/20
284345/284345 [==============================] - 13s - loss: 0.0119 - acc: 0.9974    
Epoch 6/20
284345/284345 [==============================] - 13s - loss: 0.0119 - acc: 0.9975    
Epoch 7/20
284345/284345 [==============================] - 13s - loss: 0.0115 - acc: 0.9976    
Epoch 8/20
284345/284345 [==============================] - 14s - loss: 0.0109 - acc: 0.9977    
Epoch 9/20
284345/284345 [==============================] - 13s - loss: 0.0102 - acc: 0.9977    
Epoch 10/20
284345/284345 [==============================] - 13s - loss: 0.0100 - acc: 0.9977    
Epoch 11/20
284345/284345 [==============================] - 13s - loss: 0.0099 - acc: 0.9977    
Epoch 12/20
284345/284345 [==============================] - 13s - loss: 0.0107 - acc: 0.9974    
Epoch 13/20
284345/284345 [==============================] - 13s - loss: 0.0106 - acc: 0.9975    
Epoch 14/20
284345/284345 [==============================] - 13s - loss: 0.0099 - acc: 0.9977    
Epoch 15/20
284345/284345 [==============================] - 12s - loss: 0.0093 - acc: 0.9978    
Epoch 16/20
284345/284345 [==============================] - 12s - loss: 0.0093 - acc: 0.9977    
Epoch 17/20
284345/284345 [==============================] - 12s - loss: 0.0087 - acc: 0.9978    
Epoch 18/20
284345/284345 [==============================] - 12s - loss: 0.0081 - acc: 0.9979    
Epoch 19/20
284345/284345 [==============================] - 14s - loss: 0.0077 - acc: 0.9981    
Epoch 20/20
284345/284345 [==============================] - 14s - loss: 0.0072 - acc: 0.9981    

In [43]:
y_test_pred = model_4.predict_classes(X_test)
network_4 = evaluation(y_test, y_test_pred)
print(network_4)


281152/284346 [============================>.] - ETA: 0s(0.99822047786851231, 0.99645395041136975, 1.0)
SMOTE

In [4]:
sm = SMOTE(random_state=42)

In [5]:
X, y = data[data.columns[1:29]].values, data.Class.values

In [6]:
X_res, y_res = sm.fit_sample(X, y)

In [7]:
pd.Series(y_res).value_counts()


Out[7]:
1    284315
0    284315
dtype: int64

In [16]:
rs = ShuffleSplit(n_splits=1, test_size=0.2, random_state=0)

In [17]:
for train_index, test_index in rs.split(X_res, y_res):
    X_train, X_test = X_res[train_index], X_res[test_index]
    y_train, y_test = y_res[train_index], y_res[test_index]

In [20]:
X_train.shape, X_test.shape


Out[20]:
((454904, 28), (113726, 28))

In [60]:
def network_5(X_train):
    model = Sequential()
    model.add(Dense(64, activation="sigmoid", input_dim=X_train.shape[1]))
    model.add(Dense(32, activation="sigmoid"))
    model.add(Dense(1, activation="sigmoid"))
    model.compile(optimizer="rmsprop",
                  loss="binary_crossentropy",
                  metrics=["accuracy"])
    early_stopping = EarlyStopping(monitor="loss", patience=4)
    model.fit(X_train, y_train, epochs=20, batch_size=32, callbacks=[early_stopping], verbose=1)
    return model

In [61]:
model_5 = network_5(X_train)


Epoch 1/20
284315/284315 [==============================] - 15s - loss: 0.0970 - acc: 0.9618    
Epoch 2/20
284315/284315 [==============================] - 14s - loss: 0.0297 - acc: 0.9901    
Epoch 3/20
284315/284315 [==============================] - 14s - loss: 0.0145 - acc: 0.9961    
Epoch 4/20
284315/284315 [==============================] - 16s - loss: 0.0119 - acc: 0.9969    
Epoch 5/20
284315/284315 [==============================] - 13s - loss: 0.0111 - acc: 0.9972    
Epoch 6/20
284315/284315 [==============================] - 13s - loss: 0.0108 - acc: 0.9974    
Epoch 7/20
284315/284315 [==============================] - 14s - loss: 0.0105 - acc: 0.9974    
Epoch 8/20
284315/284315 [==============================] - 15s - loss: 0.0106 - acc: 0.9974    
Epoch 9/20
284315/284315 [==============================] - 13s - loss: 0.0106 - acc: 0.9974    
Epoch 10/20
284315/284315 [==============================] - 14s - loss: 0.0106 - acc: 0.9974    
Epoch 11/20
284315/284315 [==============================] - 13s - loss: 0.0109 - acc: 0.9973    
Epoch 12/20
284315/284315 [==============================] - 14s - loss: 0.0106 - acc: 0.9973    

In [62]:
y_test_pred = model_5.predict_classes(X_test)
network_5 = evaluation(y_test, y_test_pred)
print(network_5)


282080/284315 [============================>.] - ETA: 0s(0.99747463201027031, 0.99619014208033674, 0.99876896670582527)

In [21]:
def network_6(X_train):
    model = Sequential()
    model.add(Dense(256, activation="sigmoid", input_dim=X_train.shape[1]))
    model.add(Dense(128, activation="sigmoid"))
    model.add(Dense(64, activation="sigmoid"))
    model.add(Dense(32, activation="sigmoid"))
    model.add(Dropout(0.5))
    model.add(Dense(1, activation="sigmoid"))
    model.compile(optimizer="rmsprop",
                  loss="binary_crossentropy",
                  metrics=["accuracy"])
    early_stopping = EarlyStopping(monitor="val_loss", patience=4)
    model.fit(X_train, y_train, epochs=20, batch_size=32, validation_split=0.2, callbacks=[early_stopping], verbose=1)
    return model

In [22]:
model_6 = network_6(X_train)


Train on 363923 samples, validate on 90981 samples
Epoch 1/20
363923/363923 [==============================] - 28s - loss: 0.1022 - acc: 0.9587 - val_loss: 0.0478 - val_acc: 0.9818
Epoch 2/20
363923/363923 [==============================] - 29s - loss: 0.0372 - acc: 0.9871 - val_loss: 0.0240 - val_acc: 0.9936
Epoch 3/20
363923/363923 [==============================] - 30s - loss: 0.0240 - acc: 0.9933 - val_loss: 0.0167 - val_acc: 0.9948
Epoch 4/20
363923/363923 [==============================] - 28s - loss: 0.0197 - acc: 0.9953 - val_loss: 0.0137 - val_acc: 0.9971
Epoch 5/20
363923/363923 [==============================] - 28s - loss: 0.0176 - acc: 0.9960 - val_loss: 0.0226 - val_acc: 0.9941
Epoch 6/20
363923/363923 [==============================] - 29s - loss: 0.0163 - acc: 0.9964 - val_loss: 0.0101 - val_acc: 0.9982
Epoch 7/20
363923/363923 [==============================] - 31s - loss: 0.0159 - acc: 0.9967 - val_loss: 0.0123 - val_acc: 0.9976
Epoch 8/20
363923/363923 [==============================] - 30s - loss: 0.0154 - acc: 0.9970 - val_loss: 0.0087 - val_acc: 0.9983
Epoch 9/20
363923/363923 [==============================] - 29s - loss: 0.0153 - acc: 0.9972 - val_loss: 0.0172 - val_acc: 0.9976
Epoch 10/20
363923/363923 [==============================] - 29s - loss: 0.0146 - acc: 0.9973 - val_loss: 0.0083 - val_acc: 0.9984
Epoch 11/20
363923/363923 [==============================] - 28s - loss: 0.0144 - acc: 0.9974 - val_loss: 0.0143 - val_acc: 0.9966
Epoch 12/20
363923/363923 [==============================] - 29s - loss: 0.0135 - acc: 0.9974 - val_loss: 0.0106 - val_acc: 0.9987
Epoch 13/20
363923/363923 [==============================] - 31s - loss: 0.0132 - acc: 0.9979 - val_loss: 0.0123 - val_acc: 0.9982
Epoch 14/20
363923/363923 [==============================] - 29s - loss: 0.0139 - acc: 0.9978 - val_loss: 0.0115 - val_acc: 0.9986
Epoch 15/20
363923/363923 [==============================] - 29s - loss: 0.0132 - acc: 0.9978 - val_loss: 0.0124 - val_acc: 0.9984

In [27]:
y_test_pred = model_6.predict_classes(X_test)
network_6 = evaluation(y_test, y_test_pred)
print(network_6)


112832/113726 [============================>.] - ETA: 0s(0.99839086928231013, 0.9967998601031739, 1.0)

In [29]:
y_full = model_6.predict_classes(X)
full_data = evaluation(y, y_full)
print(full_data)


283328/284807 [============================>.] - ETA: 0s(0.99686805450708726, 0.3554913294797688, 1.0)

Pre-trained weights

  • Train network on balanced dataset and save the model
  • Use the model as initial weights on imbalanced dataset

In [336]:
X, y = data[data.columns[1:29]].values, data.Class.values

In [337]:
sss = StratifiedShuffleSplit(n_splits=1, test_size=0.5, random_state=0)

In [338]:
for train_index, test_index in sss.split(X, y):
    X_train, X_test = X[train_index], X[test_index]
    y_train, y_test = y[train_index], y[test_index]

In [339]:
model = load_model("models/balance_model.h5")

In [340]:
model.layers


Out[340]:
[<keras.layers.core.Dense at 0x128c01150>,
 <keras.layers.core.Dense at 0x127c5c690>,
 <keras.layers.core.Dense at 0x11f5808d0>,
 <keras.layers.core.Dense at 0x12bff8b90>,
 <keras.layers.core.Dense at 0x12cb4b750>]

In [341]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_70 (Dense)             (None, 256)               7424      
_________________________________________________________________
dense_71 (Dense)             (None, 128)               32896     
_________________________________________________________________
dense_72 (Dense)             (None, 64)                8256      
_________________________________________________________________
dense_73 (Dense)             (None, 32)                2080      
_________________________________________________________________
dense_74 (Dense)             (None, 1)                 33        
=================================================================
Total params: 50,689
Trainable params: 50,689
Non-trainable params: 0
_________________________________________________________________

In [342]:
model.layers.pop()
model.layers.pop()


Out[342]:
<keras.layers.core.Dense at 0x12bff8b90>

In [343]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_70 (Dense)             (None, 256)               7424      
_________________________________________________________________
dense_71 (Dense)             (None, 128)               32896     
_________________________________________________________________
dense_72 (Dense)             (None, 64)                8256      
=================================================================
Total params: 48,576
Trainable params: 48,576
Non-trainable params: 0
_________________________________________________________________

In [344]:
top_model = Sequential()
top_model.add(Dense(32, activation="sigmoid", input_shape=model.output_shape[1:]))
top_model.add(Dense(1, activation="sigmoid"))

In [345]:
model.add(top_model)

In [346]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_70 (Dense)             (None, 256)               7424      
_________________________________________________________________
dense_71 (Dense)             (None, 128)               32896     
_________________________________________________________________
dense_72 (Dense)             (None, 64)                8256      
_________________________________________________________________
sequential_54 (Sequential)   (None, 1)                 97        
=================================================================
Total params: 48,673
Trainable params: 48,673
Non-trainable params: 0
_________________________________________________________________

In [347]:
# freeze weights until top layer
for layer in model.layers[:4]:
    layer.trainable = False

In [348]:
# fine-tuning should be done with slow learning rate
model.compile(optimizer=optimizers.SGD(lr=1e-4, momentum=0.9),
              loss="binary_crossentropy",
              metrics=["accuracy"])
early_stopping = EarlyStopping(monitor="loss", patience=4)
model.fit(X_train, y_train, epochs=20, batch_size=32, callbacks=[early_stopping], verbose=1)


Epoch 1/20
142403/142403 [==============================] - 7s - loss: 0.4487 - acc: 0.9983     
Epoch 2/20
142403/142403 [==============================] - 6s - loss: 0.4487 - acc: 0.9983     
Epoch 3/20
142403/142403 [==============================] - 6s - loss: 0.4487 - acc: 0.9983     
Epoch 4/20
142403/142403 [==============================] - 6s - loss: 0.4486 - acc: 0.9983     
Epoch 5/20
142403/142403 [==============================] - 6s - loss: 0.4485 - acc: 0.9983     
Epoch 6/20
142403/142403 [==============================] - 6s - loss: 0.4485 - acc: 0.9983     
Epoch 7/20
142403/142403 [==============================] - 7s - loss: 0.4484 - acc: 0.9983     
Epoch 8/20
142403/142403 [==============================] - 6s - loss: 0.4484 - acc: 0.9983     
Epoch 9/20
142403/142403 [==============================] - 6s - loss: 0.4483 - acc: 0.9983     
Epoch 10/20
142403/142403 [==============================] - 6s - loss: 0.4482 - acc: 0.9983     
Epoch 11/20
142403/142403 [==============================] - 6s - loss: 0.4481 - acc: 0.9983     
Epoch 12/20
142403/142403 [==============================] - 6s - loss: 0.4480 - acc: 0.9983     
Epoch 13/20
142403/142403 [==============================] - 6s - loss: 0.4479 - acc: 0.9983     
Epoch 14/20
142403/142403 [==============================] - 6s - loss: 0.4478 - acc: 0.9983     
Epoch 15/20
142403/142403 [==============================] - 6s - loss: 0.4477 - acc: 0.9983     
Epoch 16/20
142403/142403 [==============================] - 6s - loss: 0.4476 - acc: 0.9983     
Epoch 17/20
142403/142403 [==============================] - 6s - loss: 0.4475 - acc: 0.9983     
Epoch 18/20
142403/142403 [==============================] - 6s - loss: 0.4473 - acc: 0.9983     
Epoch 19/20
142403/142403 [==============================] - 6s - loss: 0.4472 - acc: 0.9983     
Epoch 20/20
142403/142403 [==============================] - 7s - loss: 0.4470 - acc: 0.9983     
Out[348]:
<keras.callbacks.History at 0x12d0fe6d0>

In [349]:
y_test_pred = model.predict_classes(X_test)
network = evaluation(y_test, y_test_pred)
print(network)


142304/142404 [============================>.] - ETA: 0s(0.99827252043481929, 0.0, 0.0)

In [ ]: