In [1]:
import numpy as np 
import pandas as pd

In [2]:
data = pd.read_csv('Churn_Modelling.csv')

In [3]:
data.head()


Out[3]:
RowNumber CustomerId Surname CreditScore Geography Gender Age Tenure Balance NumOfProducts HasCrCard IsActiveMember EstimatedSalary Exited
0 1 15634602 Hargrave 619 France Female 42 2 0.00 1 1 1 101348.88 1
1 2 15647311 Hill 608 Spain Female 41 1 83807.86 1 0 1 112542.58 0
2 3 15619304 Onio 502 France Female 42 8 159660.80 3 1 0 113931.57 1
3 4 15701354 Boni 699 France Female 39 1 0.00 2 0 0 93826.63 0
4 5 15737888 Mitchell 850 Spain Female 43 2 125510.82 1 1 1 79084.10 0

In [4]:
x = data.iloc[:,3:13].values
y = data.iloc[:,13].values

In [5]:
from sklearn.preprocessing import LabelEncoder,OneHotEncoder,StandardScaler

In [6]:
labelencoder1 = LabelEncoder()
labelencoder2 = LabelEncoder()

In [7]:
x[:, 1] = labelencoder1.fit_transform(x[:, 1])

x[:, 2]=labelencoder2.fit_transform(x[:,2])

onehotencoder = OneHotEncoder(categorical_features = [1])

x = onehotencoder.fit_transform(x).toarray()

x = x[:, 1:]

In [8]:
from sklearn.model_selection import train_test_split
x_train,x_test,y_train,y_test = train_test_split(x,y,test_size=0.15,random_state = 0)
sc = StandardScaler()


c:\program files\python36\lib\site-packages\sklearn\cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)

In [11]:
x_train = sc.fit_transform(x_train)
x_test = sc.transform(x_test)

In [12]:
from keras.models import Sequential
from keras.layers import Dense


Using TensorFlow backend.

In [13]:
classifier = Sequential()

In [14]:
classifier.add(Dense(units = 6 , kernel_initializer='uniform',activation='relu',input_dim=11))
classifier.add(Dense(units = 6 , kernel_initializer='uniform',activation='relu'))
classifier.add(Dense(units = 1 , kernel_initializer='uniform',activation='sigmoid'))

In [15]:
classifier.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])

In [16]:
classifier.fit(x_train,y_train,batch_size = 10,epochs=100)


Epoch 1/100
8500/8500 [==============================] - 1s - loss: 0.4831 - acc: 0.7913     - ETA: 0s - loss: 0.4946 - acc: 0.
Epoch 2/100
8500/8500 [==============================] - 0s - loss: 0.4312 - acc: 0.7921     - ETA: 0s - loss: 0.4287 -
Epoch 3/100
8500/8500 [==============================] - 1s - loss: 0.4251 - acc: 0.7993     
Epoch 4/100
8500/8500 [==============================] - 1s - loss: 0.4206 - acc: 0.8224     
Epoch 5/100
8500/8500 [==============================] - 0s - loss: 0.4181 - acc: 0.8248     
Epoch 6/100
8500/8500 [==============================] - 0s - loss: 0.4157 - acc: 0.8305     - ETA: 0s - loss: 0.4214 -
Epoch 7/100
8500/8500 [==============================] - 0s - loss: 0.4140 - acc: 0.8309     
Epoch 8/100
8500/8500 [==============================] - 1s - loss: 0.4126 - acc: 0.8321     - ETA: 0s - loss: 0.4136 - 
Epoch 9/100
8500/8500 [==============================] - 1s - loss: 0.4115 - acc: 0.8338     
Epoch 10/100
8500/8500 [==============================] - 0s - loss: 0.4103 - acc: 0.8352     
Epoch 11/100
8500/8500 [==============================] - 0s - loss: 0.4098 - acc: 0.8348     
Epoch 12/100
8500/8500 [==============================] - 0s - loss: 0.4087 - acc: 0.8334     - ETA: 0s - loss: 0.4070 - acc: 
Epoch 13/100
8500/8500 [==============================] - 0s - loss: 0.4079 - acc: 0.8351     
Epoch 14/100
8500/8500 [==============================] - 0s - loss: 0.4070 - acc: 0.8335     - ETA: 0s - loss: 0.4074 - acc: 0.83
Epoch 15/100
8500/8500 [==============================] - 0s - loss: 0.4068 - acc: 0.8347     
Epoch 16/100
8500/8500 [==============================] - 0s - loss: 0.4062 - acc: 0.8353     - ETA: 0s - loss: 0.4054 - acc: 0.8
Epoch 17/100
8500/8500 [==============================] - 0s - loss: 0.4061 - acc: 0.8338     
Epoch 18/100
8500/8500 [==============================] - 1s - loss: 0.4059 - acc: 0.8342     - ETA: 0s - loss: 0.4039 - acc: 0.834 - ETA: 0s - loss: 0.4035 - acc: 0
Epoch 19/100
8500/8500 [==============================] - 1s - loss: 0.4051 - acc: 0.8345     
Epoch 20/100
8500/8500 [==============================] - 1s - loss: 0.4051 - acc: 0.8347     
Epoch 21/100
8500/8500 [==============================] - 1s - loss: 0.4049 - acc: 0.8362     
Epoch 22/100
8500/8500 [==============================] - 1s - loss: 0.4044 - acc: 0.8346     - ETA: 0s - loss: 0.4023 - acc: 0.
Epoch 23/100
8500/8500 [==============================] - 1s - loss: 0.4041 - acc: 0.8344     - ETA: 0s - loss: 0.4007 - acc: 0.8
Epoch 24/100
8500/8500 [==============================] - 1s - loss: 0.4044 - acc: 0.8351     
Epoch 25/100
8500/8500 [==============================] - 1s - loss: 0.4039 - acc: 0.8355     
Epoch 26/100
8500/8500 [==============================] - 1s - loss: 0.4039 - acc: 0.8346     - ETA: 0s - loss: 0.4048 - acc: 
Epoch 27/100
8500/8500 [==============================] - 1s - loss: 0.4039 - acc: 0.8349     
Epoch 28/100
8500/8500 [==============================] - 1s - loss: 0.4035 - acc: 0.8339     - ETA: 0s - loss: 0.3921
Epoch 29/100
8500/8500 [==============================] - 1s - loss: 0.4035 - acc: 0.8360     - ETA: 0s - loss: 0.4069 - 
Epoch 30/100
8500/8500 [==============================] - 1s - loss: 0.4035 - acc: 0.8347     - ETA: 0s - loss: 0.4004
Epoch 31/100
8500/8500 [==============================] - 1s - loss: 0.4035 - acc: 0.8347     
Epoch 32/100
8500/8500 [==============================] - 1s - loss: 0.4033 - acc: 0.8345     
Epoch 33/100
8500/8500 [==============================] - 1s - loss: 0.4030 - acc: 0.8351     
Epoch 34/100
8500/8500 [==============================] - 0s - loss: 0.4034 - acc: 0.8341     - ETA: 0s - loss: 0.4091 - acc: 0.
Epoch 35/100
8500/8500 [==============================] - 0s - loss: 0.4030 - acc: 0.8365     
Epoch 36/100
8500/8500 [==============================] - 0s - loss: 0.4030 - acc: 0.8351     
Epoch 37/100
8500/8500 [==============================] - 0s - loss: 0.4028 - acc: 0.8353     
Epoch 38/100
8500/8500 [==============================] - 0s - loss: 0.4031 - acc: 0.8347     
Epoch 39/100
8500/8500 [==============================] - 0s - loss: 0.4019 - acc: 0.8329     
Epoch 40/100
8500/8500 [==============================] - 0s - loss: 0.4024 - acc: 0.8351     
Epoch 41/100
8500/8500 [==============================] - 0s - loss: 0.4024 - acc: 0.8352     
Epoch 42/100
8500/8500 [==============================] - 0s - loss: 0.4026 - acc: 0.8366     
Epoch 43/100
8500/8500 [==============================] - 0s - loss: 0.4025 - acc: 0.8356     
Epoch 44/100
8500/8500 [==============================] - 0s - loss: 0.4017 - acc: 0.8351     
Epoch 45/100
8500/8500 [==============================] - 0s - loss: 0.4022 - acc: 0.8344     
Epoch 46/100
8500/8500 [==============================] - 0s - loss: 0.4023 - acc: 0.8327     
Epoch 47/100
8500/8500 [==============================] - 0s - loss: 0.4021 - acc: 0.8340     
Epoch 48/100
8500/8500 [==============================] - 0s - loss: 0.4021 - acc: 0.8335     - ETA: 0s - loss: 0.3997 - acc: 0.832 - ETA: 0s - loss: 0.3983 - acc
Epoch 49/100
8500/8500 [==============================] - 0s - loss: 0.4023 - acc: 0.8344     
Epoch 50/100
8500/8500 [==============================] - 0s - loss: 0.4022 - acc: 0.8356     
Epoch 51/100
8500/8500 [==============================] - 0s - loss: 0.4018 - acc: 0.8347     
Epoch 52/100
8500/8500 [==============================] - 0s - loss: 0.4022 - acc: 0.8362     
Epoch 53/100
8500/8500 [==============================] - 0s - loss: 0.4024 - acc: 0.8339     
Epoch 54/100
8500/8500 [==============================] - 0s - loss: 0.4020 - acc: 0.8358     - ETA: 0s - loss: 0.4020 - acc: 0.83
Epoch 55/100
8500/8500 [==============================] - 1s - loss: 0.4016 - acc: 0.8347     - ETA: 0s - loss: 0.4052 - a
Epoch 56/100
8500/8500 [==============================] - 1s - loss: 0.4022 - acc: 0.8351     
Epoch 57/100
8500/8500 [==============================] - 1s - loss: 0.4020 - acc: 0.8360     - ETA: 0s - loss: 0.4013 - acc: 
Epoch 58/100
8500/8500 [==============================] - 0s - loss: 0.4020 - acc: 0.8347     
Epoch 59/100
8500/8500 [==============================] - 0s - loss: 0.4018 - acc: 0.8353     - ETA: 0s - loss: 0.3973 - acc: 0.
Epoch 60/100
8500/8500 [==============================] - 0s - loss: 0.4023 - acc: 0.8352     
Epoch 61/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8347     
Epoch 62/100
8500/8500 [==============================] - 1s - loss: 0.4019 - acc: 0.8362     
Epoch 63/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8360     
Epoch 64/100
8500/8500 [==============================] - 0s - loss: 0.4018 - acc: 0.8364     
Epoch 65/100
8500/8500 [==============================] - 1s - loss: 0.4017 - acc: 0.8367     
Epoch 66/100
8500/8500 [==============================] - 1s - loss: 0.4019 - acc: 0.8340     
Epoch 67/100
8500/8500 [==============================] - 1s - loss: 0.4017 - acc: 0.8349     
Epoch 68/100
8500/8500 [==============================] - 0s - loss: 0.4020 - acc: 0.8353     
Epoch 69/100
8500/8500 [==============================] - 1s - loss: 0.4017 - acc: 0.8353     
Epoch 70/100
8500/8500 [==============================] - 1s - loss: 0.4018 - acc: 0.8359     
Epoch 71/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8352     
Epoch 72/100
8500/8500 [==============================] - 1s - loss: 0.4014 - acc: 0.8349     
Epoch 73/100
8500/8500 [==============================] - 1s - loss: 0.4018 - acc: 0.8351     
Epoch 74/100
8500/8500 [==============================] - 1s - loss: 0.4015 - acc: 0.8345     - ETA: 0s - loss: 0.4013 - acc: 0.833
Epoch 75/100
8500/8500 [==============================] - 1s - loss: 0.4015 - acc: 0.8360     - ETA: 0s - loss: 0.4128 -
Epoch 76/100
8500/8500 [==============================] - 1s - loss: 0.4014 - acc: 0.8348     - ETA: 0s - loss: 0.3993 - acc: 0.8
Epoch 77/100
8500/8500 [==============================] - 0s - loss: 0.4017 - acc: 0.8365     
Epoch 78/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8342     - ETA: 0s - loss: 0.3991 - acc: 0.835 - ETA: 0s - loss: 0.4014 - acc: 0.834
Epoch 79/100
8500/8500 [==============================] - 1s - loss: 0.4016 - acc: 0.8358     
Epoch 80/100
8500/8500 [==============================] - 0s - loss: 0.4014 - acc: 0.8362     
Epoch 81/100
8500/8500 [==============================] - 0s - loss: 0.4016 - acc: 0.8367     
Epoch 82/100
8500/8500 [==============================] - 0s - loss: 0.4011 - acc: 0.8355     
Epoch 83/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8358     
Epoch 84/100
8500/8500 [==============================] - 0s - loss: 0.4017 - acc: 0.8348     
Epoch 85/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8369     - ETA: 0s - loss: 0.3989 - acc: 0.8
Epoch 86/100
8500/8500 [==============================] - 1s - loss: 0.4010 - acc: 0.8352     
Epoch 87/100
8500/8500 [==============================] - 1s - loss: 0.4016 - acc: 0.8349     
Epoch 88/100
8500/8500 [==============================] - 0s - loss: 0.4013 - acc: 0.8365     
Epoch 89/100
8500/8500 [==============================] - 0s - loss: 0.4014 - acc: 0.8359     
Epoch 90/100
8500/8500 [==============================] - 0s - loss: 0.4016 - acc: 0.8356     
Epoch 91/100
8500/8500 [==============================] - 0s - loss: 0.4010 - acc: 0.8364     
Epoch 92/100
8500/8500 [==============================] - 0s - loss: 0.4016 - acc: 0.8358     - ETA: 0s - loss: 0.4056 - acc:
Epoch 93/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8376     
Epoch 94/100
8500/8500 [==============================] - 0s - loss: 0.4016 - acc: 0.8347     
Epoch 95/100
8500/8500 [==============================] - 0s - loss: 0.4016 - acc: 0.8349     - ETA: 0s - loss: 0.3983 
Epoch 96/100
8500/8500 [==============================] - 0s - loss: 0.4013 - acc: 0.8355     
Epoch 97/100
8500/8500 [==============================] - 0s - loss: 0.4015 - acc: 0.8347     
Epoch 98/100
8500/8500 [==============================] - 0s - loss: 0.4014 - acc: 0.8346     
Epoch 99/100
8500/8500 [==============================] - 0s - loss: 0.4014 - acc: 0.8372     
Epoch 100/100
8500/8500 [==============================] - 1s - loss: 0.4011 - acc: 0.8346     
Out[16]:
<keras.callbacks.History at 0xdbcd940>

In [17]:
y_pred = classifier.predict(x_test)

In [18]:
y_pred


Out[18]:
array([[ 0.23284076],
       [ 0.51396227],
       [ 0.15699457],
       ..., 
       [ 0.32842132],
       [ 0.03585028],
       [ 0.1930901 ]], dtype=float32)

In [19]:
y_predstat = y_pred>0.5

In [20]:
from sklearn.metrics import confusion_matrix,accuracy_score

In [21]:
confusion_matrix(y_test,y_predstat)


Out[21]:
array([[1189,   41],
       [ 189,   81]])

In [23]:
accuracy_score(y_test,y_predstat)


Out[23]:
0.84666666666666668