In [8]:
%matplotlib notebook
import sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt 
import scipy as sp
import IPython
from IPython.display import display
import sklearn
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import Normalizer
from sklearn import cross_validation
from sklearn.model_selection import cross_val_score
from sklearn.metrics import classification_report
import keras
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.utils import to_categorical
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
from keras.utils import np_utils


c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\sklearn\cross_validation.py:41: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)

Part2

In this part other models will be used in order to see if greater accuracy is achieved.

Importing Dataset:


In [9]:
raw_df_white = pd.read_csv("winequality-white.csv", sep =';')

Encoding target variable

Creating dummy variables and separate each category into one column


In [10]:
X = raw_df_white.iloc[:,:-1]
y = raw_df_white['quality']
encoder = LabelEncoder()
y = encoder.fit_transform(y)
y = y.reshape(-1, 1)
onehotencoder = OneHotEncoder()
y = onehotencoder.fit_transform(y).toarray()
y = pd.DataFrame(y)
y.columns = ['3', '4', '5', '6', '7', '8', '9']

In [11]:
X_train_white, X_test_white, y_train_white, y_test_white = cross_validation.train_test_split(X, y, test_size = 0.2, random_state = 0)

In [12]:
y_train_white.describe()


Out[12]:
3 4 5 6 7 8 9
count 3918.000000 3918.000000 3918.000000 3918.000000 3918.000000 3918.000000 3918.000000
mean 0.002808 0.028586 0.296580 0.456611 0.177897 0.036243 0.001276
std 0.052919 0.166661 0.456808 0.498177 0.382475 0.186918 0.035705
min 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
25% 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
50% 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
75% 0.000000 0.000000 1.000000 1.000000 0.000000 0.000000 0.000000
max 1.000000 1.000000 1.000000 1.000000 1.000000 1.000000 1.000000

Performing feature scaling on the dataset


In [13]:
scaler = StandardScaler()
#scaler =  MinMaxScaler()
#scaler = Normalizer()
X_train_white = scaler.fit(X_train_white).transform(X_train_white)
X_test_white = scaler.fit(X_test_white).transform(X_test_white)

ANN

Performing Artificial Neural Networks. Instanciating the model adding layers and training the model:


In [14]:
#classifier = Sequential()
#classifier.add(Dense(output_dim = 6, init = 'uniform', activation = 'relu', input_dim = 11))
# input_dim: number of independent var
#classifier.add(Dense(output_dim = 6,init = 'uniform', activation = 'relu'))

#adding layers with keras 2.0
model = Sequential()
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=11))
model.add(Activation('relu'))
model.add(Dense(units=7))
model.add(Activation('softmax'))

# 
model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
model.fit(X_train_white, y_train_white, batch_size = 5, epochs = 100)


Epoch 1/100
3918/3918 [==============================] - 2s 458us/step - loss: 1.3815 - acc: 0.4505
Epoch 2/100
3918/3918 [==============================] - 1s 304us/step - loss: 1.2477 - acc: 0.4592
Epoch 3/100
3918/3918 [==============================] - 1s 277us/step - loss: 1.1895 - acc: 0.4806
Epoch 4/100
3918/3918 [==============================] - 1s 282us/step - loss: 1.1281 - acc: 0.5110
Epoch 5/100
3918/3918 [==============================] - 1s 286us/step - loss: 1.0932 - acc: 0.5120
Epoch 6/100
3918/3918 [==============================] - 1s 354us/step - loss: 1.0794 - acc: 0.5253
Epoch 7/100
3918/3918 [==============================] - 1s 347us/step - loss: 1.0659 - acc: 0.5401
Epoch 8/100
3918/3918 [==============================] - 1s 361us/step - loss: 1.0545 - acc: 0.5426
Epoch 9/100
3918/3918 [==============================] - 1s 346us/step - loss: 1.0474 - acc: 0.5465
Epoch 10/100
3918/3918 [==============================] - 1s 342us/step - loss: 1.0395 - acc: 0.5480
Epoch 11/100
3918/3918 [==============================] - 2s 388us/step - loss: 1.0330 - acc: 0.5510
Epoch 12/100
3918/3918 [==============================] - 1s 356us/step - loss: 1.0249 - acc: 0.5551
Epoch 13/100
3918/3918 [==============================] - 2s 407us/step - loss: 1.0257 - acc: 0.5666
Epoch 14/100
3918/3918 [==============================] - 1s 365us/step - loss: 1.0261 - acc: 0.5628
Epoch 15/100
3918/3918 [==============================] - 1s 337us/step - loss: 1.0217 - acc: 0.5641
Epoch 16/100
3918/3918 [==============================] - 1s 341us/step - loss: 1.0209 - acc: 0.5610
Epoch 17/100
3918/3918 [==============================] - 1s 353us/step - loss: 1.0154 - acc: 0.5630
Epoch 18/100
3918/3918 [==============================] - 1s 344us/step - loss: 1.0138 - acc: 0.5625
Epoch 19/100
3918/3918 [==============================] - 1s 352us/step - loss: 1.0134 - acc: 0.5676
Epoch 20/100
3918/3918 [==============================] - 1s 351us/step - loss: 1.0139 - acc: 0.5661
Epoch 21/100
3918/3918 [==============================] - 1s 352us/step - loss: 1.0108 - acc: 0.5661
Epoch 22/100
3918/3918 [==============================] - 1s 360us/step - loss: 1.0071 - acc: 0.5694
Epoch 23/100
3918/3918 [==============================] - 1s 345us/step - loss: 1.0014 - acc: 0.5661
Epoch 24/100
3918/3918 [==============================] - 1s 350us/step - loss: 1.0034 - acc: 0.5758
Epoch 25/100
3918/3918 [==============================] - 1s 352us/step - loss: 0.9990 - acc: 0.5725
Epoch 26/100
3918/3918 [==============================] - 2s 397us/step - loss: 0.9977 - acc: 0.5817 1s -
Epoch 27/100
3918/3918 [==============================] - 1s 353us/step - loss: 0.9974 - acc: 0.5781
Epoch 28/100
3918/3918 [==============================] - 1s 346us/step - loss: 0.9975 - acc: 0.5763
Epoch 29/100
3918/3918 [==============================] - 1s 346us/step - loss: 0.9909 - acc: 0.5830
Epoch 30/100
3918/3918 [==============================] - 1s 351us/step - loss: 0.9923 - acc: 0.5814
Epoch 31/100
3918/3918 [==============================] - 1s 347us/step - loss: 0.9913 - acc: 0.5789
Epoch 32/100
3918/3918 [==============================] - 1s 343us/step - loss: 0.9927 - acc: 0.5786
Epoch 33/100
3918/3918 [==============================] - 1s 347us/step - loss: 0.9847 - acc: 0.5827
Epoch 34/100
3918/3918 [==============================] - 1s 349us/step - loss: 0.9866 - acc: 0.5786
Epoch 35/100
3918/3918 [==============================] - 1s 352us/step - loss: 0.9869 - acc: 0.5776
Epoch 36/100
3918/3918 [==============================] - 1s 353us/step - loss: 0.9845 - acc: 0.5847
Epoch 37/100
3918/3918 [==============================] - 1s 364us/step - loss: 0.9862 - acc: 0.5763
Epoch 38/100
3918/3918 [==============================] - 1s 372us/step - loss: 0.9822 - acc: 0.5799
Epoch 39/100
3918/3918 [==============================] - 2s 527us/step - loss: 0.9788 - acc: 0.5842
Epoch 40/100
3918/3918 [==============================] - 2s 439us/step - loss: 0.9775 - acc: 0.5827
Epoch 41/100
3918/3918 [==============================] - 1s 381us/step - loss: 0.9839 - acc: 0.5837
Epoch 42/100
3918/3918 [==============================] - 2s 395us/step - loss: 0.9799 - acc: 0.5832
Epoch 43/100
3918/3918 [==============================] - 2s 392us/step - loss: 0.9781 - acc: 0.5906
Epoch 44/100
3918/3918 [==============================] - 1s 366us/step - loss: 0.9744 - acc: 0.5949
Epoch 45/100
3918/3918 [==============================] - 1s 378us/step - loss: 0.9709 - acc: 0.5947
Epoch 46/100
3918/3918 [==============================] - 1s 374us/step - loss: 0.9734 - acc: 0.5870
Epoch 47/100
3918/3918 [==============================] - 1s 374us/step - loss: 0.9712 - acc: 0.5891
Epoch 48/100
3918/3918 [==============================] - 2s 399us/step - loss: 0.9672 - acc: 0.5898
Epoch 49/100
3918/3918 [==============================] - 1s 359us/step - loss: 0.9643 - acc: 0.5926
Epoch 50/100
3918/3918 [==============================] - 1s 365us/step - loss: 0.9653 - acc: 0.5924
Epoch 51/100
3918/3918 [==============================] - 1s 364us/step - loss: 0.9611 - acc: 0.5947
Epoch 52/100
3918/3918 [==============================] - 1s 355us/step - loss: 0.9600 - acc: 0.5942
Epoch 53/100
3918/3918 [==============================] - 1s 366us/step - loss: 0.9583 - acc: 0.5972
Epoch 54/100
3918/3918 [==============================] - 2s 405us/step - loss: 0.9604 - acc: 0.5965
Epoch 55/100
3918/3918 [==============================] - 2s 448us/step - loss: 0.9549 - acc: 0.5965
Epoch 56/100
3918/3918 [==============================] - 2s 400us/step - loss: 0.9570 - acc: 0.6018
Epoch 57/100
3918/3918 [==============================] - 2s 400us/step - loss: 0.9556 - acc: 0.5998
Epoch 58/100
3918/3918 [==============================] - 2s 389us/step - loss: 0.9509 - acc: 0.5947
Epoch 59/100
3918/3918 [==============================] - 2s 387us/step - loss: 0.9527 - acc: 0.5983
Epoch 60/100
3918/3918 [==============================] - 2s 422us/step - loss: 0.9516 - acc: 0.6039
Epoch 61/100
3918/3918 [==============================] - 2s 413us/step - loss: 0.9476 - acc: 0.6016
Epoch 62/100
3918/3918 [==============================] - 1s 358us/step - loss: 0.9474 - acc: 0.6041
Epoch 63/100
3918/3918 [==============================] - 1s 382us/step - loss: 0.9495 - acc: 0.6036
Epoch 64/100
3918/3918 [==============================] - 1s 363us/step - loss: 0.9500 - acc: 0.6036
Epoch 65/100
3918/3918 [==============================] - 1s 367us/step - loss: 0.9422 - acc: 0.6123
Epoch 66/100
3918/3918 [==============================] - 1s 371us/step - loss: 0.9428 - acc: 0.6067
Epoch 67/100
3918/3918 [==============================] - 1s 369us/step - loss: 0.9448 - acc: 0.6108
Epoch 68/100
3918/3918 [==============================] - 1s 369us/step - loss: 0.9468 - acc: 0.6041
Epoch 69/100
3918/3918 [==============================] - 1s 368us/step - loss: 0.9478 - acc: 0.6075
Epoch 70/100
3918/3918 [==============================] - 1s 373us/step - loss: 0.9379 - acc: 0.6113
Epoch 71/100
3918/3918 [==============================] - 1s 371us/step - loss: 0.9405 - acc: 0.6131
Epoch 72/100
3918/3918 [==============================] - 1s 380us/step - loss: 0.9317 - acc: 0.6166
Epoch 73/100
3918/3918 [==============================] - 1s 379us/step - loss: 0.9354 - acc: 0.6149
Epoch 74/100
3918/3918 [==============================] - 1s 361us/step - loss: 0.9316 - acc: 0.6128
Epoch 75/100
3918/3918 [==============================] - 2s 394us/step - loss: 0.9339 - acc: 0.6095
Epoch 76/100
3918/3918 [==============================] - 2s 399us/step - loss: 0.9333 - acc: 0.6169
Epoch 77/100
3918/3918 [==============================] - 2s 441us/step - loss: 0.9326 - acc: 0.6164
Epoch 78/100
3918/3918 [==============================] - 2s 413us/step - loss: 0.9334 - acc: 0.6062
Epoch 79/100
3918/3918 [==============================] - 2s 549us/step - loss: 0.9345 - acc: 0.6136
Epoch 80/100
3918/3918 [==============================] - 2s 402us/step - loss: 0.9279 - acc: 0.6141
Epoch 81/100
3918/3918 [==============================] - 2s 412us/step - loss: 0.9264 - acc: 0.6238
Epoch 82/100
3918/3918 [==============================] - 2s 408us/step - loss: 0.9279 - acc: 0.6133
Epoch 83/100
3918/3918 [==============================] - 2s 394us/step - loss: 0.9315 - acc: 0.6276
Epoch 84/100
3918/3918 [==============================] - 2s 438us/step - loss: 0.9274 - acc: 0.6220
Epoch 85/100
3918/3918 [==============================] - 2s 386us/step - loss: 0.9278 - acc: 0.6246
Epoch 86/100
3918/3918 [==============================] - 2s 395us/step - loss: 0.9250 - acc: 0.6246
Epoch 87/100
3918/3918 [==============================] - 2s 388us/step - loss: 0.9199 - acc: 0.6192
Epoch 88/100
3918/3918 [==============================] - 1s 382us/step - loss: 0.9221 - acc: 0.6217
Epoch 89/100
3918/3918 [==============================] - 2s 385us/step - loss: 0.9207 - acc: 0.6235
Epoch 90/100
3918/3918 [==============================] - 2s 400us/step - loss: 0.9202 - acc: 0.6177
Epoch 91/100
3918/3918 [==============================] - 2s 417us/step - loss: 0.9247 - acc: 0.6263
Epoch 92/100
3918/3918 [==============================] - 1s 354us/step - loss: 0.9156 - acc: 0.6230
Epoch 93/100
3918/3918 [==============================] - 2s 387us/step - loss: 0.9211 - acc: 0.6248
Epoch 94/100
3918/3918 [==============================] - 2s 425us/step - loss: 0.9215 - acc: 0.6261
Epoch 95/100
3918/3918 [==============================] - 2s 412us/step - loss: 0.9220 - acc: 0.6149
Epoch 96/100
3918/3918 [==============================] - 2s 412us/step - loss: 0.9189 - acc: 0.6266
Epoch 97/100
3918/3918 [==============================] - 2s 426us/step - loss: 0.9190 - acc: 0.6197
Epoch 98/100
3918/3918 [==============================] - 2s 426us/step - loss: 0.9218 - acc: 0.6205
Epoch 99/100
3918/3918 [==============================] - 1s 358us/step - loss: 0.9168 - acc: 0.6248
Epoch 100/100
3918/3918 [==============================] - 1s 378us/step - loss: 0.9126 - acc: 0.6228
Out[14]:
<keras.callbacks.History at 0x238914627f0>

Performing predictions. if y_predict > 0.7 == True


In [15]:
y_predict = model.predict(X_test_white)
y_test_white = y_test_white.astype(float)
y_predict = y_predict
y_predict = y_predict > 0.7

Creating confusion matrix:


In [16]:
from sklearn.metrics import confusion_matrix
from numpy import argmax
cf = confusion_matrix(y_test_white.values.argmax(axis=1), y_predict.argmax(axis=1))
from sklearn.metrics import accuracy_score
#accuracy = accuracy_score(y_test_white, y_predict)
cf


Out[16]:
array([[  5,   0,   3,   1,   0,   0],
       [ 23,   0,  24,   4,   0,   0],
       [201,   0,  64,  30,   0,   0],
       [328,   0,  19,  62,   0,   0],
       [161,   0,   0,  22,   0,   0],
       [ 29,   0,   0,   4,   0,   0]], dtype=int64)

The results are not accurate. Improving the model:

Improving the model


In [19]:
from keras.layers import Dropout
odel = Sequential()
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=7))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=11, input_dim=11))
model.add(Activation('relu'))
model.add(Dropout(p = 0.2))
model.add(Dense(units=7))
model.add(Activation('softmax'))
model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
model.fit(X_train_white, y_train_white, batch_size = 5, epochs = 100)


c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:5: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
  """
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:8: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
  
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:11: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
  # This is added back by InteractiveShellApp.init_path()
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:14: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
  
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:17: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:21: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:24: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:27: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:30: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
c:\users\franc\appdata\local\programs\python\python36\lib\site-packages\ipykernel_launcher.py:33: UserWarning: Update your `Dropout` call to the Keras 2 API: `Dropout(rate=0.2)`
Epoch 1/100
3918/3918 [==============================] - 7s 2ms/step - loss: 1.4759 - acc: 0.4372
Epoch 2/100
3918/3918 [==============================] - 4s 985us/step - loss: 1.3068 - acc: 0.4413
Epoch 3/100
3918/3918 [==============================] - 4s 994us/step - loss: 1.2980 - acc: 0.4461
Epoch 4/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2917 - acc: 0.4538
Epoch 5/100
3918/3918 [==============================] - 5s 1ms/step - loss: 1.2883 - acc: 0.4558
Epoch 6/100
3918/3918 [==============================] - 4s 988us/step - loss: 1.2856 - acc: 0.4574
Epoch 7/100
3918/3918 [==============================] - 4s 984us/step - loss: 1.2873 - acc: 0.4558
Epoch 8/100
3918/3918 [==============================] - 4s 992us/step - loss: 1.2836 - acc: 0.4561
Epoch 9/100
3918/3918 [==============================] - 4s 999us/step - loss: 1.2843 - acc: 0.4553
Epoch 10/100
3918/3918 [==============================] - 4s 991us/step - loss: 1.2796 - acc: 0.4569
Epoch 11/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2808 - acc: 0.4566
Epoch 12/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2812 - acc: 0.4566
Epoch 13/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2777 - acc: 0.4566
Epoch 14/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2812 - acc: 0.4566
Epoch 15/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2795 - acc: 0.4566
Epoch 16/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2796 - acc: 0.4566
Epoch 17/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2770 - acc: 0.4566
Epoch 18/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2788 - acc: 0.4566
Epoch 19/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2791 - acc: 0.4566
Epoch 20/100
3918/3918 [==============================] - 5s 1ms/step - loss: 1.2778 - acc: 0.4566
Epoch 21/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2746 - acc: 0.4566
Epoch 22/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2756 - acc: 0.4566
Epoch 23/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2773 - acc: 0.4566
Epoch 24/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2737 - acc: 0.4566
Epoch 25/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2774 - acc: 0.4566
Epoch 26/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2773 - acc: 0.4566
Epoch 27/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2764 - acc: 0.4566
Epoch 28/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2758 - acc: 0.4566
Epoch 29/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2755 - acc: 0.4566
Epoch 30/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2739 - acc: 0.4566
Epoch 31/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2763 - acc: 0.4566
Epoch 32/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2746 - acc: 0.4566
Epoch 33/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2773 - acc: 0.4566
Epoch 34/100
3918/3918 [==============================] - 5s 1ms/step - loss: 1.2744 - acc: 0.4566
Epoch 35/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2755 - acc: 0.4566
Epoch 36/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2743 - acc: 0.4566
Epoch 37/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2759 - acc: 0.4566
Epoch 38/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2764 - acc: 0.4566
Epoch 39/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2752 - acc: 0.4566
Epoch 40/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2761 - acc: 0.4566
Epoch 41/100
3918/3918 [==============================] - 4s 968us/step - loss: 1.2765 - acc: 0.4566
Epoch 42/100
3918/3918 [==============================] - 4s 974us/step - loss: 1.2764 - acc: 0.4566
Epoch 43/100
3918/3918 [==============================] - 4s 975us/step - loss: 1.2752 - acc: 0.4566
Epoch 44/100
3918/3918 [==============================] - 4s 975us/step - loss: 1.2743 - acc: 0.4566
Epoch 45/100
3918/3918 [==============================] - 4s 978us/step - loss: 1.2756 - acc: 0.4566
Epoch 46/100
3918/3918 [==============================] - 4s 976us/step - loss: 1.2728 - acc: 0.4566
Epoch 47/100
3918/3918 [==============================] - 4s 981us/step - loss: 1.2745 - acc: 0.4566
Epoch 48/100
3918/3918 [==============================] - 4s 980us/step - loss: 1.2742 - acc: 0.4566
Epoch 49/100
3918/3918 [==============================] - 5s 1ms/step - loss: 1.2758 - acc: 0.4566
Epoch 50/100
3918/3918 [==============================] - 4s 971us/step - loss: 1.2740 - acc: 0.4566
Epoch 51/100
3918/3918 [==============================] - 4s 973us/step - loss: 1.2751 - acc: 0.4566
Epoch 52/100
3918/3918 [==============================] - 4s 981us/step - loss: 1.2740 - acc: 0.4566
Epoch 53/100
3918/3918 [==============================] - 4s 985us/step - loss: 1.2759 - acc: 0.4566
Epoch 54/100
3918/3918 [==============================] - 4s 989us/step - loss: 1.2752 - acc: 0.4566
Epoch 55/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2737 - acc: 0.4566
Epoch 56/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2736 - acc: 0.4566
Epoch 57/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2748 - acc: 0.4566
Epoch 58/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2748 - acc: 0.4566
Epoch 59/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2749 - acc: 0.4566
Epoch 60/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2742 - acc: 0.4566
Epoch 61/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2751 - acc: 0.4566
Epoch 62/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2733 - acc: 0.4566
Epoch 63/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2743 - acc: 0.4566
Epoch 64/100
3918/3918 [==============================] - 5s 1ms/step - loss: 1.2743 - acc: 0.4566
Epoch 65/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2737 - acc: 0.4566
Epoch 66/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2738 - acc: 0.4566
Epoch 67/100
3918/3918 [==============================] - 4s 984us/step - loss: 1.2735 - acc: 0.4566
Epoch 68/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2736 - acc: 0.4566
Epoch 69/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2749 - acc: 0.4566
Epoch 70/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2746 - acc: 0.4566
Epoch 71/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2745 - acc: 0.4566
Epoch 72/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2741 - acc: 0.4566
Epoch 73/100
3918/3918 [==============================] - 4s 998us/step - loss: 1.2729 - acc: 0.4566
Epoch 74/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2749 - acc: 0.4566
Epoch 75/100
3918/3918 [==============================] - 4s 969us/step - loss: 1.2737 - acc: 0.4566
Epoch 76/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2748 - acc: 0.4566
Epoch 77/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2743 - acc: 0.4566
Epoch 78/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2736 - acc: 0.4566
Epoch 79/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2747 - acc: 0.4566
Epoch 80/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2732 - acc: 0.4566
Epoch 81/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2744 - acc: 0.4566
Epoch 82/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2740 - acc: 0.4566
Epoch 83/100
3918/3918 [==============================] - 4s 967us/step - loss: 1.2737 - acc: 0.4566
Epoch 84/100
3918/3918 [==============================] - 4s 961us/step - loss: 1.2737 - acc: 0.4566
Epoch 85/100
3918/3918 [==============================] - 4s 955us/step - loss: 1.2733 - acc: 0.4566
Epoch 86/100
3918/3918 [==============================] - 4s 940us/step - loss: 1.2744 - acc: 0.4566
Epoch 87/100
3918/3918 [==============================] - 4s 1ms/step - loss: 1.2741 - acc: 0.4566
Epoch 88/100
3918/3918 [==============================] - 4s 897us/step - loss: 1.2745 - acc: 0.4566
Epoch 89/100
3918/3918 [==============================] - 3s 883us/step - loss: 1.2740 - acc: 0.4566
Epoch 90/100
3918/3918 [==============================] - 3s 872us/step - loss: 1.2741 - acc: 0.4566
Epoch 91/100
3918/3918 [==============================] - 3s 880us/step - loss: 1.2738 - acc: 0.4566
Epoch 92/100
3918/3918 [==============================] - 3s 856us/step - loss: 1.2731 - acc: 0.4566
Epoch 93/100
3918/3918 [==============================] - 3s 873us/step - loss: 1.2744 - acc: 0.4566
Epoch 94/100
3918/3918 [==============================] - 3s 858us/step - loss: 1.2733 - acc: 0.4566
Epoch 95/100
3918/3918 [==============================] - 4s 988us/step - loss: 1.2730 - acc: 0.4566
Epoch 96/100
3918/3918 [==============================] - 3s 867us/step - loss: 1.2743 - acc: 0.4566
Epoch 97/100
3918/3918 [==============================] - 3s 865us/step - loss: 1.2746 - acc: 0.4566
Epoch 98/100
3918/3918 [==============================] - 3s 882us/step - loss: 1.2741 - acc: 0.4566
Epoch 99/100
3918/3918 [==============================] - 3s 887us/step - loss: 1.2734 - acc: 0.4566
Epoch 100/100
3918/3918 [==============================] - 3s 883us/step - loss: 1.2739 - acc: 0.4566
Out[19]:
<keras.callbacks.History at 0x2389d4b5358>

In [20]:
y_predict = model.predict(X_test_white)
y_test_white = y_test_white.astype(float)
y_predict = y_predict
y_predict = y_predict > 0.7

In [21]:
cf = confusion_matrix(y_test_white.values.argmax(axis=1), y_predict.argmax(axis=1))
cf


Out[21]:
array([[  9,   0,   0,   0,   0,   0],
       [ 51,   0,   0,   0,   0,   0],
       [295,   0,   0,   0,   0,   0],
       [409,   0,   0,   0,   0,   0],
       [183,   0,   0,   0,   0,   0],
       [ 33,   0,   0,   0,   0,   0]], dtype=int64)

The model accuracy was reduced.

xgboost


In [ ]:
from xgboost.sklearn import XGBClassifier
from xgboost.sklearn import XGBRegressor

xclas = XGBClassifier()  # for classifier
xclas.fit(X_train_white, y_train_white)
y_pred = xclas.predict(X_test_white)

cross_val_score(xclas, X_train_white, y_train_white)

from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test_white, y_pred)
print (cm)

for Windows, xgboost it is avaliable only in anaconda:

Cross_val_score: array([0.59770992, 0.55708812, 0.58940906])

Conslusion

In this analysis, the two models that performed the best were the KNN and the KernelSVC. Those models achieved about 0.65 of accuracy on the test set. The accuracy for the training models was close to 1.0 which is an indicative of overfitting. Feature extraction did not improve the accuracy of the models. In the research paper. They manage to have an accuracy of 0.868 using the Fuzzy inductive Reasoning methodology, an absolute error tolerance of 1.0 and the discretization of the input variables. Their results vary greatly. For the SVM model, they vary from 0.503 to 0.868 (0.25 to 1.0 absolute error tolerance). In contrast to the research paper, the discretization of the input variables into two categories did not increased accuracy, on the contrary, it significantly reduced it. Apparently, ANN and Xgboost are not better that KNN and SVC models.

Questions:

What is Fuzzy inductive reasoning(FIR)?

What is the absolute error tolerance T that is used with FIR?


In [ ]: