In [1]:
import keras
from keras.datasets import cifar100
from keras.layers import Conv2D, MaxPool2D, Dropout, Dense, Flatten, BatchNormalization
from keras.losses import categorical_crossentropy
from keras.optimizers import adam
from keras.utils import to_categorical

(Xtrain, ytrain), (Xtest, ytest) = cifar100.load_data()
ytrain = to_categorical(ytrain, 100)
ytest = to_categorical(ytest, 100)

Xtrain = Xtrain.astype('float32')
Xtest = Xtest.astype('float32')
Xtrain /= 255
Xtest /= 255


Using TensorFlow backend.

In [2]:
model = keras.models.Sequential()

model.add(Conv2D(30, (5,5), input_shape=(32,32,3), strides=(2,2),activation='elu', padding='valid'))
model.add(BatchNormalization())
model.add(Dropout(0.2))
model.add(MaxPool2D(pool_size=(2,2)))

model.add(Conv2D(64, (3,3), activation='elu', padding='valid', strides=(2,2)))
model.add(Conv2D(128, (2,2), activation='elu', padding='same'))
model.add(BatchNormalization())
model.add(Dropout(0.2))
model.add(MaxPool2D(pool_size=(2,2)))

model.add(Flatten())

model.add(Dense(128, activation='sigmoid'))
model.add(Dense(100, activation='softmax'))

model.compile(loss=categorical_crossentropy, optimizer=adam(lr=0.001), metrics=['accuracy'])
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_1 (Conv2D)            (None, 14, 14, 30)        2280      
_________________________________________________________________
batch_normalization_1 (Batch (None, 14, 14, 30)        120       
_________________________________________________________________
dropout_1 (Dropout)          (None, 14, 14, 30)        0         
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 7, 7, 30)          0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 3, 3, 64)          17344     
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 3, 3, 128)         32896     
_________________________________________________________________
batch_normalization_2 (Batch (None, 3, 3, 128)         512       
_________________________________________________________________
dropout_2 (Dropout)          (None, 3, 3, 128)         0         
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 1, 1, 128)         0         
_________________________________________________________________
flatten_1 (Flatten)          (None, 128)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 128)               16512     
_________________________________________________________________
dense_2 (Dense)              (None, 100)               12900     
=================================================================
Total params: 82,564
Trainable params: 82,248
Non-trainable params: 316
_________________________________________________________________

In [3]:
model.fit(Xtrain, ytrain, epochs=20)
model.evaluate(Xtest, ytest)


Epoch 1/20
50000/50000 [==============================] - 30s - loss: 3.7731 - acc: 0.1270    
Epoch 2/20
50000/50000 [==============================] - 32s - loss: 3.1790 - acc: 0.2196    
Epoch 3/20
50000/50000 [==============================] - 30s - loss: 2.9192 - acc: 0.2684    
Epoch 4/20
50000/50000 [==============================] - 30s - loss: 2.7608 - acc: 0.3004    
Epoch 5/20
50000/50000 [==============================] - 31s - loss: 2.6520 - acc: 0.3208    
Epoch 6/20
50000/50000 [==============================] - 29s - loss: 2.5597 - acc: 0.3404    
Epoch 7/20
50000/50000 [==============================] - 27s - loss: 2.4875 - acc: 0.3565    
Epoch 8/20
50000/50000 [==============================] - 29s - loss: 2.4186 - acc: 0.3714    
Epoch 9/20
50000/50000 [==============================] - 26s - loss: 2.3703 - acc: 0.3814    
Epoch 10/20
50000/50000 [==============================] - 26s - loss: 2.3260 - acc: 0.3905    
Epoch 11/20
50000/50000 [==============================] - 26s - loss: 2.2867 - acc: 0.3972    
Epoch 12/20
50000/50000 [==============================] - 26s - loss: 2.2496 - acc: 0.4077    
Epoch 13/20
50000/50000 [==============================] - 26s - loss: 2.2076 - acc: 0.4147    
Epoch 14/20
50000/50000 [==============================] - 26s - loss: 2.1861 - acc: 0.4180    
Epoch 15/20
50000/50000 [==============================] - 27s - loss: 2.1581 - acc: 0.4264    
Epoch 16/20
50000/50000 [==============================] - 26s - loss: 2.1305 - acc: 0.4296    
Epoch 17/20
50000/50000 [==============================] - 26s - loss: 2.1160 - acc: 0.4349    
Epoch 18/20
50000/50000 [==============================] - 26s - loss: 2.0865 - acc: 0.4414    
Epoch 19/20
50000/50000 [==============================] - 26s - loss: 2.0649 - acc: 0.4426    
Epoch 20/20
50000/50000 [==============================] - 26s - loss: 2.0420 - acc: 0.4499    
 9824/10000 [============================>.] - ETA: 0s
Out[3]:
[2.3901797870635986, 0.38519999999999999]

In [ ]: