In [1]:
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import SGD
from read_dataset import read_ceps_with_train_test


Using TensorFlow backend.

In [2]:
X_train, X_test, y_train, y_test = read_ceps_with_train_test()
y_train = keras.utils.to_categorical(y_train, num_classes=10)
y_test = keras.utils.to_categorical(y_test, num_classes=10)

In [3]:
model = Sequential()

# this layerf from https://keras.io/getting-started/sequential-model-guide/
# Dense(64) is fully-connected layer with 64 hidden units
model.add(Dense(64, activation='relu', input_dim=20))
model.add(Dropout(0.3))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(10, activation='softmax'))

sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)

In [4]:
model.compile(loss='categorical_crossentropy',
             optimizer=sgd,
             metrics=['accuracy'])

model.fit(X_train, y_train, epochs=20, batch_size=128)


Epoch 1/20
600/600 [==============================] - 0s - loss: 13.8234 - acc: 0.0867     
Epoch 2/20
600/600 [==============================] - 0s - loss: 14.0663 - acc: 0.1033     
Epoch 3/20
600/600 [==============================] - 0s - loss: 14.1973 - acc: 0.1083     
Epoch 4/20
600/600 [==============================] - 0s - loss: 14.1704 - acc: 0.1067     
Epoch 5/20
600/600 [==============================] - 0s - loss: 14.1967 - acc: 0.1100     
Epoch 6/20
600/600 [==============================] - 0s - loss: 14.0987 - acc: 0.1250     
Epoch 7/20
600/600 [==============================] - 0s - loss: 14.0741 - acc: 0.1217     
Epoch 8/20
600/600 [==============================] - 0s - loss: 13.9858 - acc: 0.1233     
Epoch 9/20
600/600 [==============================] - 0s - loss: 14.4398 - acc: 0.1017     
Epoch 10/20
600/600 [==============================] - 0s - loss: 13.8712 - acc: 0.1350     
Epoch 11/20
600/600 [==============================] - 0s - loss: 14.1116 - acc: 0.1183     
Epoch 12/20
600/600 [==============================] - 0s - loss: 13.9227 - acc: 0.1250     
Epoch 13/20
600/600 [==============================] - 0s - loss: 14.0201 - acc: 0.1250     
Epoch 14/20
600/600 [==============================] - 0s - loss: 13.5318 - acc: 0.1483     
Epoch 15/20
600/600 [==============================] - 0s - loss: 13.7463 - acc: 0.1383     
Epoch 16/20
600/600 [==============================] - 0s - loss: 13.5902 - acc: 0.1433     
Epoch 17/20
600/600 [==============================] - 0s - loss: 13.7977 - acc: 0.1350     
Epoch 18/20
600/600 [==============================] - 0s - loss: 13.6058 - acc: 0.1500     
Epoch 19/20
600/600 [==============================] - 0s - loss: 13.5016 - acc: 0.1483     
Epoch 20/20
600/600 [==============================] - 0s - loss: 13.1480 - acc: 0.1617     
Out[4]:
<keras.callbacks.History at 0x7f25f0e77a90>

In [5]:
score = model.evaluate(X_test, y_test, batch_size=100)
print("")
print(score)


100/400 [======>.......................] - ETA: 0s
[12.930396318435669, 0.17749999836087227]

In [6]:
model.metrics_names


Out[6]:
['loss', 'acc']