Simple 2layer


In [6]:
import matplotlib.pyplot as plt
%matplotlib inline

In [2]:
from keras.models import Sequential
from keras.layers import Dense, Activation


Using TensorFlow backend.

In [3]:
model = Sequential()
model.add(Dense(32, activation='relu', input_dim=100))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop',
              loss='binary_crossentropy',
              metrics=['accuracy'])

In [4]:
import numpy as np
data = np.random.random((1000, 100))
labels = np.array([[int(np.sum(i>0.5) > 50)] for i in data])

test_data = np.random.random((100, 100))
test_labels = np.array([[int(np.sum(i>0.5) > 50)] for i in test_data])

In [5]:
h = model.fit(data, labels, epochs=50, batch_size=32,
             validation_data=(test_data,test_labels))


Train on 1000 samples, validate on 100 samples
Epoch 1/50
1000/1000 [==============================] - 0s 122us/step - loss: 0.7061 - acc: 0.5080 - val_loss: 0.6980 - val_acc: 0.4900
Epoch 2/50
1000/1000 [==============================] - 0s 48us/step - loss: 0.6965 - acc: 0.5210 - val_loss: 0.6895 - val_acc: 0.5700
Epoch 3/50
1000/1000 [==============================] - 0s 39us/step - loss: 0.6923 - acc: 0.5190 - val_loss: 0.6898 - val_acc: 0.5400
Epoch 4/50
1000/1000 [==============================] - 0s 44us/step - loss: 0.6841 - acc: 0.5590 - val_loss: 0.6938 - val_acc: 0.4800
Epoch 5/50
1000/1000 [==============================] - 0s 40us/step - loss: 0.6833 - acc: 0.5600 - val_loss: 0.6891 - val_acc: 0.4900
Epoch 6/50
1000/1000 [==============================] - 0s 48us/step - loss: 0.6750 - acc: 0.5790 - val_loss: 0.6840 - val_acc: 0.4700
Epoch 7/50
1000/1000 [==============================] - 0s 35us/step - loss: 0.6701 - acc: 0.5890 - val_loss: 0.6896 - val_acc: 0.4900
Epoch 8/50
1000/1000 [==============================] - 0s 46us/step - loss: 0.6683 - acc: 0.5930 - val_loss: 0.6972 - val_acc: 0.4900
Epoch 9/50
1000/1000 [==============================] - 0s 43us/step - loss: 0.6666 - acc: 0.5970 - val_loss: 0.6855 - val_acc: 0.4700
Epoch 10/50
1000/1000 [==============================] - 0s 35us/step - loss: 0.6580 - acc: 0.6200 - val_loss: 0.7258 - val_acc: 0.5100
Epoch 11/50
1000/1000 [==============================] - 0s 38us/step - loss: 0.6602 - acc: 0.6230 - val_loss: 0.6802 - val_acc: 0.5100
Epoch 12/50
1000/1000 [==============================] - 0s 41us/step - loss: 0.6522 - acc: 0.6430 - val_loss: 0.6768 - val_acc: 0.5600
Epoch 13/50
1000/1000 [==============================] - 0s 35us/step - loss: 0.6486 - acc: 0.6370 - val_loss: 0.6769 - val_acc: 0.5600
Epoch 14/50
1000/1000 [==============================] - 0s 45us/step - loss: 0.6430 - acc: 0.6570 - val_loss: 0.6784 - val_acc: 0.5600
Epoch 15/50
1000/1000 [==============================] - 0s 38us/step - loss: 0.6396 - acc: 0.6690 - val_loss: 0.6776 - val_acc: 0.5400
Epoch 16/50
1000/1000 [==============================] - 0s 42us/step - loss: 0.6323 - acc: 0.6680 - val_loss: 0.6774 - val_acc: 0.5000
Epoch 17/50
1000/1000 [==============================] - 0s 48us/step - loss: 0.6280 - acc: 0.6830 - val_loss: 0.6773 - val_acc: 0.5400
Epoch 18/50
1000/1000 [==============================] - 0s 38us/step - loss: 0.6247 - acc: 0.6880 - val_loss: 0.6736 - val_acc: 0.5100
Epoch 19/50
1000/1000 [==============================] - 0s 47us/step - loss: 0.6162 - acc: 0.6880 - val_loss: 0.6887 - val_acc: 0.5700
Epoch 20/50
1000/1000 [==============================] - 0s 38us/step - loss: 0.6136 - acc: 0.6910 - val_loss: 0.6819 - val_acc: 0.5400
Epoch 21/50
1000/1000 [==============================] - 0s 48us/step - loss: 0.6070 - acc: 0.7050 - val_loss: 0.7121 - val_acc: 0.5500
Epoch 22/50
1000/1000 [==============================] - 0s 34us/step - loss: 0.6066 - acc: 0.7090 - val_loss: 0.6870 - val_acc: 0.5600
Epoch 23/50
1000/1000 [==============================] - 0s 38us/step - loss: 0.5987 - acc: 0.7280 - val_loss: 0.7187 - val_acc: 0.5000
Epoch 24/50
1000/1000 [==============================] - 0s 38us/step - loss: 0.6002 - acc: 0.7130 - val_loss: 0.6749 - val_acc: 0.4900
Epoch 25/50
1000/1000 [==============================] - 0s 47us/step - loss: 0.5918 - acc: 0.7250 - val_loss: 0.6728 - val_acc: 0.5800
Epoch 26/50
1000/1000 [==============================] - 0s 46us/step - loss: 0.5861 - acc: 0.7370 - val_loss: 0.6735 - val_acc: 0.5600
Epoch 27/50
1000/1000 [==============================] - 0s 39us/step - loss: 0.5768 - acc: 0.7480 - val_loss: 0.7061 - val_acc: 0.5500
Epoch 28/50
1000/1000 [==============================] - 0s 56us/step - loss: 0.5731 - acc: 0.7420 - val_loss: 0.6749 - val_acc: 0.5300
Epoch 29/50
1000/1000 [==============================] - 0s 37us/step - loss: 0.5718 - acc: 0.7510 - val_loss: 0.6774 - val_acc: 0.5200
Epoch 30/50
1000/1000 [==============================] - 0s 37us/step - loss: 0.5651 - acc: 0.7600 - val_loss: 0.6836 - val_acc: 0.5900
Epoch 31/50
1000/1000 [==============================] - 0s 46us/step - loss: 0.5591 - acc: 0.7560 - val_loss: 0.6702 - val_acc: 0.5700
Epoch 32/50
1000/1000 [==============================] - 0s 39us/step - loss: 0.5558 - acc: 0.7650 - val_loss: 0.6726 - val_acc: 0.5700
Epoch 33/50
1000/1000 [==============================] - 0s 46us/step - loss: 0.5482 - acc: 0.7670 - val_loss: 0.6926 - val_acc: 0.5500
Epoch 34/50
1000/1000 [==============================] - 0s 33us/step - loss: 0.5404 - acc: 0.7700 - val_loss: 0.7061 - val_acc: 0.5500
Epoch 35/50
1000/1000 [==============================] - 0s 52us/step - loss: 0.5415 - acc: 0.7830 - val_loss: 0.7399 - val_acc: 0.5700
Epoch 36/50
1000/1000 [==============================] - 0s 33us/step - loss: 0.5311 - acc: 0.7700 - val_loss: 0.6793 - val_acc: 0.5200
Epoch 37/50
1000/1000 [==============================] - 0s 39us/step - loss: 0.5251 - acc: 0.7860 - val_loss: 0.7007 - val_acc: 0.5600
Epoch 38/50
1000/1000 [==============================] - 0s 42us/step - loss: 0.5206 - acc: 0.7900 - val_loss: 0.6839 - val_acc: 0.5000
Epoch 39/50
1000/1000 [==============================] - 0s 41us/step - loss: 0.5194 - acc: 0.7780 - val_loss: 0.7208 - val_acc: 0.5400
Epoch 40/50
1000/1000 [==============================] - 0s 46us/step - loss: 0.5131 - acc: 0.7820 - val_loss: 0.7182 - val_acc: 0.5400
Epoch 41/50
1000/1000 [==============================] - 0s 39us/step - loss: 0.5060 - acc: 0.7980 - val_loss: 0.6738 - val_acc: 0.5700
Epoch 42/50
1000/1000 [==============================] - 0s 36us/step - loss: 0.5009 - acc: 0.8120 - val_loss: 0.7577 - val_acc: 0.5400
Epoch 43/50
1000/1000 [==============================] - 0s 44us/step - loss: 0.4968 - acc: 0.8070 - val_loss: 0.6942 - val_acc: 0.5900
Epoch 44/50
1000/1000 [==============================] - 0s 39us/step - loss: 0.4891 - acc: 0.8050 - val_loss: 0.6826 - val_acc: 0.5900
Epoch 45/50
1000/1000 [==============================] - 0s 43us/step - loss: 0.4817 - acc: 0.8140 - val_loss: 0.6792 - val_acc: 0.5600
Epoch 46/50
1000/1000 [==============================] - 0s 37us/step - loss: 0.4790 - acc: 0.8210 - val_loss: 0.6938 - val_acc: 0.6100
Epoch 47/50
1000/1000 [==============================] - 0s 35us/step - loss: 0.4718 - acc: 0.8290 - val_loss: 0.7351 - val_acc: 0.5700
Epoch 48/50
1000/1000 [==============================] - 0s 44us/step - loss: 0.4661 - acc: 0.8290 - val_loss: 0.7285 - val_acc: 0.5400
Epoch 49/50
1000/1000 [==============================] - 0s 35us/step - loss: 0.4589 - acc: 0.8310 - val_loss: 0.6861 - val_acc: 0.5700
Epoch 50/50
1000/1000 [==============================] - 0s 48us/step - loss: 0.4574 - acc: 0.8440 - val_loss: 0.6883 - val_acc: 0.6000

In [6]:
plt.figure(figsize=[8,6])
plt.plot(h.history['loss'],'r',linewidth=3.0)
plt.plot(h.history['val_loss'],'b',linewidth=3.0)
plt.legend(['Training loss', 'Validation Loss'],fontsize=18)
plt.xlabel('Epochs ',fontsize=16)
plt.ylabel('Loss',fontsize=16)
plt.title('Loss Curves',fontsize=16)


Out[6]:
<matplotlib.text.Text at 0x7fc180546160>

In [7]:
labels


Out[7]:
array([[0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0]])

Cifar10 CNN

Setup


In [1]:
import keras
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
import os


Using TensorFlow backend.

In [2]:
batch_size = 32
num_classes = 10
epochs = 100
data_augmentation = True
num_predictions = 20
save_dir = os.path.join(os.getcwd(), 'saved_models')
model_name = 'keras_cifar10_trained_model.h5'

Data


In [3]:
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')


x_train shape: (50000, 32, 32, 3)
50000 train samples
10000 test samples

In [7]:
i = 69
plt.imshow(x_train[i])
print(y_train[i])


[8]

In [8]:
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

Model


In [9]:
model = Sequential()

# 2 convolution layers
model.add(Conv2D(32, (3, 3), padding='same',
                 input_shape=x_train.shape[1:]))
model.add(Activation('relu'))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))

# 2 dense layers
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes))
model.add(Activation('softmax'))

In [10]:
opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6)

model.compile(loss='categorical_crossentropy',
              optimizer=opt,
              metrics=['accuracy'])

Data Preprocessing


In [ ]:
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255

Fit


In [16]:
if not data_augmentation:
    print('Not using data augmentation.')
    hist = model.fit(x_train, y_train,
                      batch_size=batch_size,
                      epochs=epochs,
                      validation_data=(x_test, y_test),
                      shuffle=True)
else:
    print('Using real-time data augmentation.')
    # This will do preprocessing and realtime data augmentation:
    datagen = ImageDataGenerator(
        featurewise_center=False,  # set input mean to 0 over the dataset
        samplewise_center=False,  # set each sample mean to 0
        featurewise_std_normalization=False,  # divide inputs by std of the dataset
        samplewise_std_normalization=False,  # divide each input by its std
        zca_whitening=False,  # apply ZCA whitening
        rotation_range=0,  # randomly rotate images in the range (degrees, 0 to 180)
        width_shift_range=0.1,  # randomly shift images horizontally (fraction of total width)
        height_shift_range=0.1,  # randomly shift images vertically (fraction of total height)
        horizontal_flip=True,  # randomly flip images
        vertical_flip=False)  # randomly flip images

    # Compute quantities required for feature-wise normalization
    # (std, mean, and principal components if ZCA whitening is applied).
    datagen.fit(x_train)

    # Fit the model on the batches generated by datagen.flow().
    hist = model.fit_generator(datagen.flow(x_train, y_train,
                                             batch_size=batch_size),
                                epochs=epochs,
                                validation_data=(x_test, y_test),
                                workers=4)


Using real-time data augmentation.
Epoch 1/100
1563/1563 [==============================] - 185s 118ms/step - loss: 1.7609 - acc: 0.3628 - val_loss: 1.4209 - val_acc: 0.5006
Epoch 2/100
1563/1563 [==============================] - 181s 116ms/step - loss: 1.4847 - acc: 0.4674 - val_loss: 1.2764 - val_acc: 0.5558
Epoch 3/100
1563/1563 [==============================] - 184s 118ms/step - loss: 1.3750 - acc: 0.5102 - val_loss: 1.1869 - val_acc: 0.5850
Epoch 4/100
1563/1563 [==============================] - 185s 118ms/step - loss: 1.3035 - acc: 0.5383 - val_loss: 1.1389 - val_acc: 0.5993
Epoch 5/100
1563/1563 [==============================] - 180s 115ms/step - loss: 1.2446 - acc: 0.5604 - val_loss: 1.0941 - val_acc: 0.6188
Epoch 6/100
1563/1563 [==============================] - 184s 118ms/step - loss: 1.2055 - acc: 0.5740 - val_loss: 1.0465 - val_acc: 0.6394
Epoch 7/100
1563/1563 [==============================] - 183s 117ms/step - loss: 1.1664 - acc: 0.5860 - val_loss: 1.0063 - val_acc: 0.6471
Epoch 8/100
1563/1563 [==============================] - 190s 122ms/step - loss: 1.1351 - acc: 0.6012 - val_loss: 0.9882 - val_acc: 0.6548
Epoch 9/100
1563/1563 [==============================] - 188s 121ms/step - loss: 1.1114 - acc: 0.6093 - val_loss: 0.9572 - val_acc: 0.6670
Epoch 10/100
1563/1563 [==============================] - 187s 120ms/step - loss: 1.0836 - acc: 0.6197 - val_loss: 0.9463 - val_acc: 0.6668
Epoch 11/100
1563/1563 [==============================] - 202s 129ms/step - loss: 1.0710 - acc: 0.6245 - val_loss: 0.9250 - val_acc: 0.6778
Epoch 12/100
1563/1563 [==============================] - 151s 97ms/step - loss: 1.0572 - acc: 0.6297 - val_loss: 0.9453 - val_acc: 0.6727
Epoch 13/100
1563/1563 [==============================] - 135s 86ms/step - loss: 1.0442 - acc: 0.6353 - val_loss: 0.9063 - val_acc: 0.6851
Epoch 14/100
1563/1563 [==============================] - 135s 87ms/step - loss: 1.0397 - acc: 0.6376 - val_loss: 0.9147 - val_acc: 0.6837
Epoch 15/100
1563/1563 [==============================] - 135s 86ms/step - loss: 1.0294 - acc: 0.6426 - val_loss: 0.9067 - val_acc: 0.6856
Epoch 16/100
1563/1563 [==============================] - 135s 87ms/step - loss: 1.0228 - acc: 0.6442 - val_loss: 0.8992 - val_acc: 0.6874
Epoch 17/100
1563/1563 [==============================] - 136s 87ms/step - loss: 1.0155 - acc: 0.6467 - val_loss: 0.8908 - val_acc: 0.6941
Epoch 18/100
1563/1563 [==============================] - 137s 87ms/step - loss: 1.0115 - acc: 0.6483 - val_loss: 0.8820 - val_acc: 0.6971
Epoch 19/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0129 - acc: 0.6485 - val_loss: 0.8554 - val_acc: 0.7081
Epoch 20/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0108 - acc: 0.6514 - val_loss: 0.8626 - val_acc: 0.6994
Epoch 21/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0191 - acc: 0.6507 - val_loss: 0.8651 - val_acc: 0.7026
Epoch 22/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0132 - acc: 0.6512 - val_loss: 0.8538 - val_acc: 0.7032
Epoch 23/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0096 - acc: 0.6506 - val_loss: 0.8484 - val_acc: 0.7114
Epoch 24/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0135 - acc: 0.6498 - val_loss: 0.8569 - val_acc: 0.7030
Epoch 25/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0148 - acc: 0.6495 - val_loss: 0.8587 - val_acc: 0.7055
Epoch 26/100
1563/1563 [==============================] - 125s 80ms/step - loss: 1.0155 - acc: 0.6507 - val_loss: 0.8516 - val_acc: 0.7086
Epoch 27/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0206 - acc: 0.6489 - val_loss: 0.8595 - val_acc: 0.7055
Epoch 28/100
1563/1563 [==============================] - 127s 81ms/step - loss: 1.0181 - acc: 0.6491 - val_loss: 0.8490 - val_acc: 0.7073
Epoch 29/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0254 - acc: 0.6485 - val_loss: 0.8565 - val_acc: 0.7098
Epoch 30/100
1563/1563 [==============================] - 127s 81ms/step - loss: 1.0247 - acc: 0.6484 - val_loss: 0.8396 - val_acc: 0.7109
Epoch 31/100
1563/1563 [==============================] - 127s 81ms/step - loss: 1.0260 - acc: 0.6486 - val_loss: 0.8398 - val_acc: 0.7077
Epoch 32/100
1563/1563 [==============================] - 127s 81ms/step - loss: 1.0223 - acc: 0.6518 - val_loss: 0.8829 - val_acc: 0.7087
Epoch 33/100
1563/1563 [==============================] - 126s 81ms/step - loss: 1.0265 - acc: 0.6474 - val_loss: 0.8620 - val_acc: 0.6997
Epoch 34/100
1563/1563 [==============================] - 126s 81ms/step - loss: 1.0265 - acc: 0.6472 - val_loss: 0.8606 - val_acc: 0.7076
Epoch 35/100
1563/1563 [==============================] - 127s 81ms/step - loss: 1.0282 - acc: 0.6497 - val_loss: 0.8444 - val_acc: 0.7094
Epoch 36/100
1563/1563 [==============================] - 126s 81ms/step - loss: 1.0300 - acc: 0.6464 - val_loss: 0.8506 - val_acc: 0.7142
Epoch 37/100
1563/1563 [==============================] - 127s 81ms/step - loss: 1.0366 - acc: 0.6486 - val_loss: 0.8503 - val_acc: 0.7064
Epoch 38/100
1563/1563 [==============================] - 127s 81ms/step - loss: 1.0349 - acc: 0.6478 - val_loss: 0.8521 - val_acc: 0.7086
Epoch 39/100
1563/1563 [==============================] - 129s 83ms/step - loss: 1.0295 - acc: 0.6472 - val_loss: 0.8707 - val_acc: 0.7081
Epoch 40/100
1563/1563 [==============================] - 129s 83ms/step - loss: 1.0318 - acc: 0.6464 - val_loss: 0.8915 - val_acc: 0.6924
Epoch 41/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0290 - acc: 0.6464 - val_loss: 0.9117 - val_acc: 0.6899
Epoch 42/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0334 - acc: 0.6454 - val_loss: 0.8696 - val_acc: 0.6979
Epoch 43/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0429 - acc: 0.6437 - val_loss: 0.9158 - val_acc: 0.6875
Epoch 44/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0404 - acc: 0.6442 - val_loss: 0.8597 - val_acc: 0.7056
Epoch 45/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0368 - acc: 0.6485 - val_loss: 0.8673 - val_acc: 0.7065
Epoch 46/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0373 - acc: 0.6465 - val_loss: 0.9091 - val_acc: 0.6998
Epoch 47/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0408 - acc: 0.6460 - val_loss: 0.8645 - val_acc: 0.7059
Epoch 48/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0462 - acc: 0.6427 - val_loss: 0.8912 - val_acc: 0.7040
Epoch 49/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0397 - acc: 0.6450 - val_loss: 0.8849 - val_acc: 0.6975
Epoch 50/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0434 - acc: 0.6434 - val_loss: 0.8469 - val_acc: 0.7092
Epoch 51/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0357 - acc: 0.6474 - val_loss: 0.8506 - val_acc: 0.7069
Epoch 52/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0421 - acc: 0.6430 - val_loss: 0.8448 - val_acc: 0.7024
Epoch 53/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0402 - acc: 0.6454 - val_loss: 0.8914 - val_acc: 0.7042
Epoch 54/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0413 - acc: 0.6456 - val_loss: 0.9674 - val_acc: 0.6853
Epoch 55/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0417 - acc: 0.6462 - val_loss: 0.9172 - val_acc: 0.6893
Epoch 56/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0481 - acc: 0.6428 - val_loss: 0.8679 - val_acc: 0.7026
Epoch 57/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0460 - acc: 0.6452 - val_loss: 0.8879 - val_acc: 0.7030
Epoch 58/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0421 - acc: 0.6442 - val_loss: 0.8750 - val_acc: 0.6996
Epoch 59/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0461 - acc: 0.6433 - val_loss: 0.8866 - val_acc: 0.6939
Epoch 60/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0431 - acc: 0.6418 - val_loss: 0.8906 - val_acc: 0.7032
Epoch 61/100
1563/1563 [==============================] - 127s 82ms/step - loss: 1.0468 - acc: 0.6428 - val_loss: 0.8366 - val_acc: 0.7143
Epoch 62/100
1563/1563 [==============================] - 129s 82ms/step - loss: 1.0451 - acc: 0.6426 - val_loss: 0.8992 - val_acc: 0.6955
Epoch 63/100
1563/1563 [==============================] - 128s 82ms/step - loss: 1.0520 - acc: 0.6424 - val_loss: 0.8622 - val_acc: 0.7072
Epoch 64/100
1563/1563 [==============================] - 171s 109ms/step - loss: 1.0533 - acc: 0.6441 - val_loss: 0.9122 - val_acc: 0.6894
Epoch 65/100
1563/1563 [==============================] - 159s 102ms/step - loss: 1.0553 - acc: 0.6419 - val_loss: 0.8719 - val_acc: 0.7039
Epoch 66/100
1563/1563 [==============================] - 160s 102ms/step - loss: 1.0560 - acc: 0.6421 - val_loss: 0.8615 - val_acc: 0.7006
Epoch 67/100
1563/1563 [==============================] - 161s 103ms/step - loss: 1.0550 - acc: 0.6409 - val_loss: 0.9111 - val_acc: 0.6976
Epoch 68/100
1563/1563 [==============================] - 159s 101ms/step - loss: 1.0510 - acc: 0.6425 - val_loss: 0.8690 - val_acc: 0.7025
Epoch 69/100
1563/1563 [==============================] - 183s 117ms/step - loss: 1.0565 - acc: 0.6413 - val_loss: 0.8621 - val_acc: 0.7023
Epoch 70/100
1563/1563 [==============================] - 209s 134ms/step - loss: 1.0547 - acc: 0.6382 - val_loss: 0.9080 - val_acc: 0.7004
Epoch 71/100
1563/1563 [==============================] - 191s 122ms/step - loss: 1.0684 - acc: 0.6362 - val_loss: 0.8866 - val_acc: 0.6963
Epoch 72/100
1563/1563 [==============================] - 169s 108ms/step - loss: 1.0637 - acc: 0.6363 - val_loss: 0.8966 - val_acc: 0.6946
Epoch 73/100
1563/1563 [==============================] - 180s 115ms/step - loss: 1.0644 - acc: 0.6372 - val_loss: 0.9193 - val_acc: 0.7063
Epoch 74/100
1563/1563 [==============================] - 152s 98ms/step - loss: 1.0666 - acc: 0.6386 - val_loss: 0.9666 - val_acc: 0.6939
Epoch 75/100
1563/1563 [==============================] - 166s 106ms/step - loss: 1.0674 - acc: 0.6386 - val_loss: 0.9115 - val_acc: 0.6834
Epoch 76/100
1563/1563 [==============================] - 181s 116ms/step - loss: 1.0721 - acc: 0.6342 - val_loss: 0.8830 - val_acc: 0.6965
Epoch 77/100
1563/1563 [==============================] - 192s 123ms/step - loss: 1.0695 - acc: 0.6375 - val_loss: 0.8649 - val_acc: 0.7025
Epoch 78/100
1563/1563 [==============================] - 182s 116ms/step - loss: 1.0703 - acc: 0.6363 - val_loss: 0.8912 - val_acc: 0.6927
Epoch 79/100
1563/1563 [==============================] - 192s 123ms/step - loss: 1.0637 - acc: 0.6375 - val_loss: 0.8914 - val_acc: 0.7013
Epoch 80/100
1563/1563 [==============================] - 213s 136ms/step - loss: 1.0651 - acc: 0.6370 - val_loss: 0.8856 - val_acc: 0.6977
Epoch 81/100
1563/1563 [==============================] - 145s 93ms/step - loss: 1.0744 - acc: 0.6379 - val_loss: 1.0387 - val_acc: 0.6790
Epoch 82/100
1563/1563 [==============================] - 159s 102ms/step - loss: 1.0816 - acc: 0.6326 - val_loss: 0.8994 - val_acc: 0.6941
Epoch 83/100
1563/1563 [==============================] - 173s 110ms/step - loss: 1.0847 - acc: 0.6344 - val_loss: 0.8680 - val_acc: 0.7006
Epoch 84/100
1563/1563 [==============================] - 232s 149ms/step - loss: 1.0866 - acc: 0.6336 - val_loss: 1.0001 - val_acc: 0.6852
Epoch 85/100
1563/1563 [==============================] - 222s 142ms/step - loss: 1.0952 - acc: 0.6301 - val_loss: 0.9658 - val_acc: 0.6655
Epoch 86/100
1563/1563 [==============================] - 207s 132ms/step - loss: 1.0955 - acc: 0.6281 - val_loss: 0.8832 - val_acc: 0.6984
Epoch 87/100
1563/1563 [==============================] - 204s 130ms/step - loss: 1.0978 - acc: 0.6285 - val_loss: 0.8741 - val_acc: 0.7007
Epoch 88/100
1563/1563 [==============================] - 199s 127ms/step - loss: 1.0995 - acc: 0.6279 - val_loss: 0.9101 - val_acc: 0.6971
Epoch 89/100
1563/1563 [==============================] - 200s 128ms/step - loss: 1.1072 - acc: 0.6284 - val_loss: 0.9447 - val_acc: 0.6809
Epoch 90/100
1563/1563 [==============================] - 202s 129ms/step - loss: 1.1132 - acc: 0.6274 - val_loss: 0.9483 - val_acc: 0.6732
Epoch 91/100
1563/1563 [==============================] - 204s 130ms/step - loss: 1.1140 - acc: 0.6249 - val_loss: 0.9896 - val_acc: 0.6675
Epoch 92/100
1563/1563 [==============================] - 204s 131ms/step - loss: 1.1199 - acc: 0.6249 - val_loss: 0.9144 - val_acc: 0.6929
Epoch 93/100
1563/1563 [==============================] - 201s 129ms/step - loss: 1.1168 - acc: 0.6239 - val_loss: 1.0208 - val_acc: 0.6703
Epoch 94/100
1563/1563 [==============================] - 202s 129ms/step - loss: 1.1275 - acc: 0.6205 - val_loss: 1.0500 - val_acc: 0.6766
Epoch 95/100
1563/1563 [==============================] - 224s 143ms/step - loss: 1.1319 - acc: 0.6195 - val_loss: 0.9334 - val_acc: 0.6915
Epoch 96/100
1563/1563 [==============================] - 202s 129ms/step - loss: 1.1234 - acc: 0.6225 - val_loss: 0.9371 - val_acc: 0.6866
Epoch 97/100
1563/1563 [==============================] - 209s 134ms/step - loss: 1.1375 - acc: 0.6170 - val_loss: 0.9449 - val_acc: 0.6791
Epoch 98/100
1563/1563 [==============================] - 224s 143ms/step - loss: 1.1348 - acc: 0.6185 - val_loss: 0.9264 - val_acc: 0.6811
Epoch 99/100
1563/1563 [==============================] - 225s 144ms/step - loss: 1.1378 - acc: 0.6180 - val_loss: 0.9841 - val_acc: 0.6657
Epoch 100/100
1563/1563 [==============================] - 213s 136ms/step - loss: 1.1503 - acc: 0.6153 - val_loss: 0.9649 - val_acc: 0.6785

Plot


In [23]:
f = plt.figure(figsize=[8,6])
plt.plot(hist.history['loss'],'r',linewidth=3.0)
plt.plot(hist.history['val_loss'],'b',linewidth=3.0)
plt.legend(['Training loss', 'Validation Loss'],fontsize=18)
plt.xlabel('Epochs ',fontsize=16)
plt.ylabel('Loss',fontsize=16)
plt.title('Loss Curves',fontsize=16)
f.savefig('test_loss.pdf')

f = plt.figure(figsize=[8,6])
plt.plot(hist.history['acc'],'r',linewidth=3.0)
plt.plot(hist.history['val_acc'],'b',linewidth=3.0)
plt.legend(['Training Accuracy', 'Validation Accuracy'],fontsize=18)
plt.xlabel('Epochs ',fontsize=16)
plt.ylabel('Accuracy',fontsize=16)
plt.title('Accuracy Curves',fontsize=16)

f.savefig('test_acc.pdf')


Save


In [11]:
if not os.path.isdir(save_dir):
    os.makedirs(save_dir)
model_path = os.path.join(save_dir, model_name)
model.save(model_path)
print('Saved trained model at %s ' % model_path)


Saved trained model at /home/annam/Desktop/git/gal-ML/saved_models/keras_cifar10_trained_model.h5 

In [21]:
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])


10000/10000 [==============================] - 10s 1ms/step
Test loss: 0.9648862292289734
Test accuracy: 0.6785

In [26]:
model.save_weights('test_weights.json')


---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
<ipython-input-26-4004b90aee1f> in <module>()
----> 1 model.save_weights('test_weights.json')

/usr/local/lib64/python3.6/site-packages/keras/models.py in save_weights(self, filepath, overwrite)
    741     def save_weights(self, filepath, overwrite=True):
    742         if h5py is None:
--> 743             raise ImportError('`save_weights` requires h5py.')
    744         # If file exists and should not be overwritten:
    745         if not overwrite and os.path.isfile(filepath):

ImportError: `save_weights` requires h5py.

In [11]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_1 (Conv2D)            (None, 32, 32, 32)        896       
_________________________________________________________________
activation_1 (Activation)    (None, 32, 32, 32)        0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 30, 30, 32)        9248      
_________________________________________________________________
activation_2 (Activation)    (None, 30, 30, 32)        0         
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 15, 15, 32)        0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 15, 15, 32)        0         
_________________________________________________________________
flatten_1 (Flatten)          (None, 7200)              0         
_________________________________________________________________
dense_1 (Dense)              (None, 512)               3686912   
_________________________________________________________________
activation_3 (Activation)    (None, 512)               0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 512)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 10)                5130      
_________________________________________________________________
activation_4 (Activation)    (None, 10)                0         
=================================================================
Total params: 3,702,186
Trainable params: 3,702,186
Non-trainable params: 0
_________________________________________________________________

In [ ]: