In [2]:
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import RMSprop
from keras.callbacks import EarlyStopping, CSVLogger
import matplotlib.pyplot as plt
In [2]:
batch_size = 128
num_classes = 10
epochs = 20
In [3]:
(x_train, y_train), (x_test, y_test) = mnist.load_data()
In [4]:
x_train = x_train.reshape(60000, 784).astype('float32')
x_test = x_test.reshape(10000, 784).astype('float32')
x_train /= 255
x_test /= 255
In [5]:
print(x_train.shape)
print(y_train.shape)
print(x_test.shape)
print(y_test.shape)
In [6]:
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
In [7]:
print(y_train.shape)
print(y_test.shape)
In [20]:
model = Sequential()
model.add(Dense(512, input_shape=(784, )))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
In [9]:
model.summary()
In [21]:
model.compile(loss='categorical_crossentropy',
optimizer=RMSprop(),
metrics=['accuracy'])
In [30]:
es = EarlyStopping(monitor='val_loss', patience=2)
cvs_logger = CSVLogger('training.log')
hist = model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_split=0.1,
callbacks=[es, cvs_logger])
In [31]:
%cat training.log
In [40]:
score = model.evaluate(x_test, y_test, verbose=0)
print('test loss:', score[0])
print('test acc:', score[1])
In [39]:
loss = hist.history['loss']
val_loss = hist.history['val_loss']
epochs = len(loss)
plt.plot(range(epochs), loss, marker='.', label='acc')
plt.plot(range(epochs), val_loss, marker='.', label='val_acc')
plt.legend(loc='best')
plt.grid()
plt.xlabel('epoch')
plt.ylabel('acc')
plt.show()
In [42]:
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
import keras.utils
In [6]:
batch_size = 128
num_classes = 10
epochs = 12
In [7]:
img_rows, img_cols = 28, 28
In [8]:
(x_train, y_train), (x_test, y_test) = mnist.load_data()
In [9]:
print(x_train.shape)
In [10]:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
In [11]:
x_train.shape
Out[11]:
In [12]:
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
In [13]:
x_train /= 255
x_test /= 255
In [41]:
x_test.shape
Out[41]:
In [43]:
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
In [34]:
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
In [35]:
model.summary()
In [36]:
model.layers
Out[36]:
In [38]:
model.layers[0].name
Out[38]:
In [39]:
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
In [45]:
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=1,
verbose=1,
validation_data=(x_test, y_test))
Out[45]:
In [ ]: