In [173]:
import numpy as np
import matplotlib.pyplot as plt

from keras.datasets import cifar10
from keras.models import Sequential
from keras.layers import Dense, Activation, BatchNormalization, Conv2D, Flatten
from keras.optimizers import SGD, rmsprop
from keras.regularizers import l2
from keras.utils.np_utils import to_categorical

In [174]:
num_classes = 10

def get_data(train_size=49000, validation_size=1000, test_size=10000):
    # The data, shuffled and split between train and test sets:
    (x_train, y_train), (x_test, y_test) = cifar10.load_data()

    mean_image = np.mean(x_train, axis=0)
    x_train = (x_train.astype('float64') - mean_image)
    x_test = (x_test.astype('float64') - mean_image)

    # x_train = x_train.reshape(x_train.shape[0], -1)
    # x_test = x_test.reshape(x_test.shape[0], -1)

    x_val = x_train[train_size:train_size+validation_size]
    y_val = y_train[train_size:train_size+validation_size]
    x_train = x_train[:train_size]
    y_train = y_train[:train_size]
    x_test = x_test[:test_size]
    y_test = y_test[:test_size]
    return x_train, y_train, x_val, y_val, x_test, y_test

In [143]:
# Model: Dense - ReLu - Dense - Softmax
x_train, y_train, x_val, y_val, x_test, y_test = get_data(49000, 1000, 10000)

results = {}
for hidden in [120]:
    for reg in [1.0]:
        for lr in [7e-4]:
            for decay in [8e-3]:
                model = Sequential([
                    Dense(hidden, input_dim=3 * 32 * 32, kernel_regularizer=l2(reg)),
                    Activation('relu'),
                    Dense(10, kernel_regularizer=l2(reg)),
                    Activation('softmax')
                ])
                model.compile(
                    optimizer=SGD(lr=lr, decay=decay, momentum=0.9, nesterov=True),
                    loss='categorical_crossentropy',
                    metrics=['accuracy']
                )
                model.fit(
                    x_train, to_categorical(y_train, num_classes),
                    epochs=10, batch_size=200, shuffle=True, verbose=False
                )
                preds = np.argmax(model.predict(x_test), axis=1)
                accuracy = np.mean(y_test.reshape(-1) == preds)
                print('{} {} {} {}: {}'.format(hidden, reg, lr, decay, accuracy))
                results[(hidden, reg, lr, decay)] = accuracy


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-143-9cfa79be09da> in <module>()
     20                 model.fit(
     21                     x_train, to_categorical(y_train, num_classes),
---> 22                     epochs=10, batch_size=200, shuffle=True, verbose=False
     23                 )
     24                 preds = np.argmax(model.predict(x_test), axis=1)

/Users/asydorchuk/miniconda2/lib/python2.7/site-packages/keras/models.pyc in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
    843                               class_weight=class_weight,
    844                               sample_weight=sample_weight,
--> 845                               initial_epoch=initial_epoch)
    846 
    847     def evaluate(self, x, y, batch_size=32, verbose=1,

/Users/asydorchuk/miniconda2/lib/python2.7/site-packages/keras/engine/training.pyc in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
   1403             class_weight=class_weight,
   1404             check_batch_axis=False,
-> 1405             batch_size=batch_size)
   1406         # prepare validation data
   1407         if validation_data:

/Users/asydorchuk/miniconda2/lib/python2.7/site-packages/keras/engine/training.pyc in _standardize_user_data(self, x, y, sample_weight, class_weight, check_batch_axis, batch_size)
   1293                                     self._feed_input_shapes,
   1294                                     check_batch_axis=False,
-> 1295                                     exception_prefix='model input')
   1296         y = _standardize_input_data(y, self._feed_output_names,
   1297                                     output_shapes,

/Users/asydorchuk/miniconda2/lib/python2.7/site-packages/keras/engine/training.pyc in _standardize_input_data(data, names, shapes, check_batch_axis, exception_prefix)
    119                                  ' to have ' + str(len(shapes[i])) +
    120                                  ' dimensions, but got array with shape ' +
--> 121                                  str(array.shape))
    122             for j, (dim, ref_dim) in enumerate(zip(array.shape, shapes[i])):
    123                 if not j and not check_batch_axis:

ValueError: Error when checking model input: expected dense_851_input to have 2 dimensions, but got array with shape (49000, 32, 32, 3)

In [175]:
x_train, y_train, x_val, y_val, x_test, y_test = get_data(10000, 1000, 10000)
x_test, y_test = x_val, y_val

model = Sequential([
    Conv2D(input_shape=x_train.shape[1:], filters=32, kernel_size=(3,3), padding='same'),
    Activation('relu'),
    Conv2D(32, (3, 3)),
    Activation('relu'),
    MaxPooling2D(pool_size=(2, 2)),
    Dropout(0.25),

    Conv2D(64, (3, 3), padding='same'),
    Activation('relu'),
    Conv2D(64, (3, 3)),
    Activation('relu'),
    MaxPooling2D(pool_size=(2, 2)),
    Dropout(0.25),
    
    Flatten(),
    Dense(512),
    BatchNormalization(),
    Activation('relu'),
    Dropout(0.5),
    Dense(10),
    Activation('softmax')
])
model.compile(
    optimizer=rmsprop(lr=1e-4, decay=1e-6),
    loss='categorical_crossentropy',
    metrics=['accuracy']
)
model.fit(
    x_train, to_categorical(y_train, num_classes),
    epochs=100, batch_size=200, shuffle=True, verbose=True
)
preds = np.argmax(model.predict(x_test), axis=1)
accuracy = np.mean(y_test.reshape(-1) == preds)
print(accuracy)


Epoch 1/100
10000/10000 [==============================] - 42s - loss: 2.5623 - acc: 0.1858      
Epoch 2/100
10000/10000 [==============================] - 33s - loss: 2.1875 - acc: 0.2757     
Epoch 3/100
10000/10000 [==============================] - 33s - loss: 2.0353 - acc: 0.3163     
Epoch 4/100
10000/10000 [==============================] - 33s - loss: 1.9148 - acc: 0.3469     
Epoch 5/100
10000/10000 [==============================] - 33s - loss: 1.8267 - acc: 0.3778     
Epoch 6/100
10000/10000 [==============================] - 34s - loss: 1.7493 - acc: 0.3950     
Epoch 7/100
10000/10000 [==============================] - 35s - loss: 1.6757 - acc: 0.4154     
Epoch 8/100
10000/10000 [==============================] - 34s - loss: 1.6268 - acc: 0.4329     
Epoch 9/100
10000/10000 [==============================] - 35s - loss: 1.5734 - acc: 0.4522     
Epoch 10/100
10000/10000 [==============================] - 35s - loss: 1.5453 - acc: 0.4488     
Epoch 11/100
10000/10000 [==============================] - 36s - loss: 1.4892 - acc: 0.4780     
Epoch 12/100
10000/10000 [==============================] - 37s - loss: 1.4519 - acc: 0.4842     
Epoch 13/100
10000/10000 [==============================] - 35s - loss: 1.4249 - acc: 0.4983     
Epoch 14/100
10000/10000 [==============================] - 33s - loss: 1.3924 - acc: 0.5078     
Epoch 15/100
10000/10000 [==============================] - 35s - loss: 1.3527 - acc: 0.5176     
Epoch 16/100
10000/10000 [==============================] - 34s - loss: 1.3226 - acc: 0.5329     
Epoch 17/100
10000/10000 [==============================] - 37s - loss: 1.3116 - acc: 0.5302     
Epoch 18/100
10000/10000 [==============================] - 38s - loss: 1.2716 - acc: 0.5451     
Epoch 19/100
10000/10000 [==============================] - 38s - loss: 1.2517 - acc: 0.5589     
Epoch 20/100
10000/10000 [==============================] - 35s - loss: 1.2312 - acc: 0.5627     
Epoch 21/100
10000/10000 [==============================] - 40s - loss: 1.2063 - acc: 0.5679     
Epoch 22/100
10000/10000 [==============================] - 37s - loss: 1.1856 - acc: 0.5817     
Epoch 23/100
10000/10000 [==============================] - 36s - loss: 1.1652 - acc: 0.5868     
Epoch 24/100
10000/10000 [==============================] - 35s - loss: 1.1465 - acc: 0.5845     
Epoch 25/100
10000/10000 [==============================] - 37s - loss: 1.1165 - acc: 0.5975     
Epoch 26/100
10000/10000 [==============================] - 38s - loss: 1.1039 - acc: 0.6108     
Epoch 27/100
10000/10000 [==============================] - 37s - loss: 1.0845 - acc: 0.6129     
Epoch 28/100
10000/10000 [==============================] - 37s - loss: 1.0534 - acc: 0.6251     
Epoch 29/100
10000/10000 [==============================] - 36s - loss: 1.0653 - acc: 0.6195     
Epoch 30/100
10000/10000 [==============================] - 34s - loss: 1.0351 - acc: 0.6257     
Epoch 31/100
10000/10000 [==============================] - 35s - loss: 0.9970 - acc: 0.6451     
Epoch 32/100
10000/10000 [==============================] - 37s - loss: 0.9789 - acc: 0.6545     
Epoch 33/100
10000/10000 [==============================] - 36s - loss: 0.9760 - acc: 0.6473     
Epoch 34/100
10000/10000 [==============================] - 36s - loss: 0.9629 - acc: 0.6529     
Epoch 35/100
10000/10000 [==============================] - 36s - loss: 0.9414 - acc: 0.6606     
Epoch 36/100
10000/10000 [==============================] - 38s - loss: 0.9174 - acc: 0.6693     
Epoch 37/100
10000/10000 [==============================] - 36s - loss: 0.9100 - acc: 0.6755     
Epoch 38/100
10000/10000 [==============================] - 37s - loss: 0.8996 - acc: 0.6781     
Epoch 39/100
10000/10000 [==============================] - 37s - loss: 0.8844 - acc: 0.6877     
Epoch 40/100
10000/10000 [==============================] - 37s - loss: 0.8727 - acc: 0.6888     
Epoch 41/100
10000/10000 [==============================] - 35s - loss: 0.8537 - acc: 0.6966     
Epoch 42/100
10000/10000 [==============================] - 34s - loss: 0.8436 - acc: 0.7008     
Epoch 43/100
10000/10000 [==============================] - 34s - loss: 0.8339 - acc: 0.7009     
Epoch 44/100
10000/10000 [==============================] - 39s - loss: 0.8118 - acc: 0.7107     
Epoch 45/100
10000/10000 [==============================] - 33s - loss: 0.8080 - acc: 0.7127     
Epoch 46/100
10000/10000 [==============================] - 33s - loss: 0.7979 - acc: 0.7142     
Epoch 47/100
10000/10000 [==============================] - 35s - loss: 0.7851 - acc: 0.7186     
Epoch 48/100
10000/10000 [==============================] - 34s - loss: 0.7736 - acc: 0.7271     
Epoch 49/100
10000/10000 [==============================] - 34s - loss: 0.7600 - acc: 0.7303     
Epoch 50/100
10000/10000 [==============================] - 35s - loss: 0.7541 - acc: 0.7358     
Epoch 51/100
10000/10000 [==============================] - 33s - loss: 0.7328 - acc: 0.7413     
Epoch 52/100
10000/10000 [==============================] - 34s - loss: 0.7317 - acc: 0.7408     
Epoch 53/100
10000/10000 [==============================] - 35s - loss: 0.7081 - acc: 0.7524     
Epoch 54/100
10000/10000 [==============================] - 33s - loss: 0.7093 - acc: 0.7426     
Epoch 55/100
10000/10000 [==============================] - 35s - loss: 0.6968 - acc: 0.7563     
Epoch 56/100
10000/10000 [==============================] - 35s - loss: 0.6925 - acc: 0.7602     
Epoch 57/100
10000/10000 [==============================] - 36s - loss: 0.6777 - acc: 0.7645     
Epoch 58/100
10000/10000 [==============================] - 35s - loss: 0.6664 - acc: 0.7677     
Epoch 59/100
10000/10000 [==============================] - 35s - loss: 0.6596 - acc: 0.7680     
Epoch 60/100
10000/10000 [==============================] - 35s - loss: 0.6460 - acc: 0.7743     
Epoch 61/100
10000/10000 [==============================] - 34s - loss: 0.6484 - acc: 0.7755     
Epoch 62/100
10000/10000 [==============================] - 38s - loss: 0.6289 - acc: 0.7765     
Epoch 63/100
10000/10000 [==============================] - 35s - loss: 0.6159 - acc: 0.7795     
Epoch 64/100
10000/10000 [==============================] - 36s - loss: 0.6095 - acc: 0.7873     
Epoch 65/100
10000/10000 [==============================] - 38s - loss: 0.6125 - acc: 0.7821     
Epoch 66/100
10000/10000 [==============================] - 33s - loss: 0.5910 - acc: 0.7931     
Epoch 67/100
10000/10000 [==============================] - 36s - loss: 0.5907 - acc: 0.7908     
Epoch 68/100
10000/10000 [==============================] - 35s - loss: 0.5787 - acc: 0.7976     
Epoch 69/100
10000/10000 [==============================] - 34s - loss: 0.5798 - acc: 0.7987     
Epoch 70/100
10000/10000 [==============================] - 34s - loss: 0.5571 - acc: 0.8050     
Epoch 71/100
10000/10000 [==============================] - 34s - loss: 0.5602 - acc: 0.8033     
Epoch 72/100
10000/10000 [==============================] - 35s - loss: 0.5456 - acc: 0.8128     
Epoch 73/100
10000/10000 [==============================] - 37s - loss: 0.5369 - acc: 0.8143     
Epoch 74/100
10000/10000 [==============================] - 37s - loss: 0.5275 - acc: 0.8180     
Epoch 75/100
10000/10000 [==============================] - 36s - loss: 0.5189 - acc: 0.8222     
Epoch 76/100
10000/10000 [==============================] - 36s - loss: 0.5117 - acc: 0.8224     
Epoch 77/100
10000/10000 [==============================] - 36s - loss: 0.5083 - acc: 0.8236     
Epoch 78/100
10000/10000 [==============================] - 33s - loss: 0.5060 - acc: 0.8223     
Epoch 79/100
10000/10000 [==============================] - 33s - loss: 0.4917 - acc: 0.8318     
Epoch 80/100
10000/10000 [==============================] - 34s - loss: 0.4787 - acc: 0.8352     
Epoch 81/100
10000/10000 [==============================] - 33s - loss: 0.4869 - acc: 0.8327     
Epoch 82/100
10000/10000 [==============================] - 33s - loss: 0.4663 - acc: 0.8378     
Epoch 83/100
10000/10000 [==============================] - 33s - loss: 0.4665 - acc: 0.8380     
Epoch 84/100
10000/10000 [==============================] - 34s - loss: 0.4638 - acc: 0.8392     
Epoch 85/100
10000/10000 [==============================] - 33s - loss: 0.4547 - acc: 0.8456     
Epoch 86/100
10000/10000 [==============================] - 33s - loss: 0.4488 - acc: 0.8453     
Epoch 87/100
10000/10000 [==============================] - 33s - loss: 0.4498 - acc: 0.8446     
Epoch 88/100
10000/10000 [==============================] - 33s - loss: 0.4400 - acc: 0.8461     
Epoch 89/100
10000/10000 [==============================] - 34s - loss: 0.4327 - acc: 0.8575     
Epoch 90/100
10000/10000 [==============================] - 34s - loss: 0.4305 - acc: 0.8530     
Epoch 91/100
10000/10000 [==============================] - 34s - loss: 0.4258 - acc: 0.8548     
Epoch 92/100
10000/10000 [==============================] - 34s - loss: 0.4158 - acc: 0.8600     
Epoch 93/100
10000/10000 [==============================] - 34s - loss: 0.4088 - acc: 0.8594     
Epoch 94/100
10000/10000 [==============================] - 34s - loss: 0.4071 - acc: 0.8595     
Epoch 95/100
10000/10000 [==============================] - 33s - loss: 0.3967 - acc: 0.8668     
Epoch 96/100
10000/10000 [==============================] - 33s - loss: 0.3977 - acc: 0.8660     
Epoch 97/100
10000/10000 [==============================] - 33s - loss: 0.3928 - acc: 0.8669     
Epoch 98/100
10000/10000 [==============================] - 34s - loss: 0.3891 - acc: 0.8666     
Epoch 99/100
10000/10000 [==============================] - 36s - loss: 0.3705 - acc: 0.8753     
Epoch 100/100
10000/10000 [==============================] - 36s - loss: 0.3753 - acc: 0.8742     
0.667

In [ ]: