In [1]:
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import Adam 
from keras.utils import np_utils
 
from keras.utils.vis_utils import plot_model
import matplotlib.pyplot as plt


Using TensorFlow backend.

In [2]:
def load_mnist(path, kind='train'):
    import os
    import gzip
    import numpy as np

    """Load MNIST data from `path`"""
    labels_path = os.path.join(path,
                               '%s-labels-idx1-ubyte.gz'
                               % kind)
    images_path = os.path.join(path,
                               '%s-images-idx3-ubyte.gz'
                               % kind)

    with gzip.open(labels_path, 'rb') as lbpath:
        labels = np.frombuffer(lbpath.read(), dtype=np.uint8,
                               offset=8)

    with gzip.open(images_path, 'rb') as imgpath:
        images = np.frombuffer(imgpath.read(), dtype=np.uint8,
                               offset=16).reshape(len(labels), 784)

    return images, labels

モデルの作成


In [3]:
def build_model():
    # モデルの作成
    model = Sequential()
    model.add(Dense(512, input_shape=(784,)))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
 
    model.add(Dense(512))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
 
    model.add(Dense(10))
    model.add(Activation('softmax'))
 
    # 損失関数の定義
    model.compile(
        loss='categorical_crossentropy',
        optimizer=Adam(),
        metrics=['accuracy'])
    
    return model

plot用関数


In [4]:
def plot_history(history):
    # 精度の履歴をプロット
    plt.plot(history.history['acc'],"o-",label="accuracy")
    plt.plot(history.history['val_acc'],"o-",label="val_acc")
    plt.title('model accuracy')
    plt.xlabel('epoch')
    plt.ylabel('accuracy')
    plt.legend(loc="lower right")
    plt.show()
 
    # 損失の履歴をプロット
    plt.plot(history.history['loss'],"o-",label="loss",)
    plt.plot(history.history['val_loss'],"o-",label="val_loss")
    plt.title('model loss')
    plt.xlabel('epoch')
    plt.ylabel('loss')
    plt.legend(loc='lower right')
    plt.show()

データセットの読み込み


In [5]:
# MNISTのデータの読み込み
# 訓練データ6万件、テストデータ1万件
# 28ピクセル × 28ピクセル = 784ピクセルのデータ
# 色は0〜255
X_train, y_train = load_mnist('./data/fashion', kind='train')
X_test, y_test = load_mnist('./data/fashion', kind='t10k')
X_train = X_train.reshape(60000, 784).astype('float32')
X_test  = X_test.reshape(10000, 784).astype('float32')
X_train /= 255
X_test  /= 255

In [6]:
# 10次元配列に変換 //数字の5ならこんな感じ[0,0,0,0,1,0,0,0,0,0]
y_train = np_utils.to_categorical(y_train, 10)
y_test  = np_utils.to_categorical(y_test, 10)

In [7]:
nb_epoch = 50 #学習させる回数. pytyonのnb_epochとはrangeの繰り返しのこと
batch_size = 128 #無作為に128画像取得。128という数字に理論的な根拠は考慮していない

In [8]:
# データで訓練
model = build_model()
history = model.fit(X_train, y_train, 
    nb_epoch=nb_epoch, 
    batch_size=batch_size,
    validation_data=(X_test, y_test)
)


/Users/kubota/.pyenv/versions/anaconda3-4.4.0/lib/python3.6/site-packages/keras/models.py:848: UserWarning: The `nb_epoch` argument in `fit` has been renamed `epochs`.
  warnings.warn('The `nb_epoch` argument in `fit` '
Train on 60000 samples, validate on 10000 samples
Epoch 1/50
60000/60000 [==============================] - 11s - loss: 0.5141 - acc: 0.8155 - val_loss: 0.4199 - val_acc: 0.8506
Epoch 2/50
60000/60000 [==============================] - 11s - loss: 0.3802 - acc: 0.8597 - val_loss: 0.3778 - val_acc: 0.8608
Epoch 3/50
60000/60000 [==============================] - 10s - loss: 0.3486 - acc: 0.8708 - val_loss: 0.3531 - val_acc: 0.8711
Epoch 4/50
60000/60000 [==============================] - 12s - loss: 0.3278 - acc: 0.8789 - val_loss: 0.3470 - val_acc: 0.8759
Epoch 5/50
60000/60000 [==============================] - 10s - loss: 0.3096 - acc: 0.8857 - val_loss: 0.3454 - val_acc: 0.8769
Epoch 6/50
60000/60000 [==============================] - 11s - loss: 0.2968 - acc: 0.8898 - val_loss: 0.3446 - val_acc: 0.8745
Epoch 7/50
60000/60000 [==============================] - 13s - loss: 0.2857 - acc: 0.8937 - val_loss: 0.3281 - val_acc: 0.8782
Epoch 8/50
60000/60000 [==============================] - 15s - loss: 0.2759 - acc: 0.8965 - val_loss: 0.3332 - val_acc: 0.8821
Epoch 9/50
60000/60000 [==============================] - 9s - loss: 0.2661 - acc: 0.8992 - val_loss: 0.3252 - val_acc: 0.8858
Epoch 10/50
60000/60000 [==============================] - 10s - loss: 0.2616 - acc: 0.9022 - val_loss: 0.3230 - val_acc: 0.8867
Epoch 11/50
60000/60000 [==============================] - 13s - loss: 0.2561 - acc: 0.9039 - val_loss: 0.3337 - val_acc: 0.8852
Epoch 12/50
60000/60000 [==============================] - 11s - loss: 0.2475 - acc: 0.9070 - val_loss: 0.3030 - val_acc: 0.8914
Epoch 13/50
60000/60000 [==============================] - 13s - loss: 0.2403 - acc: 0.9086 - val_loss: 0.3161 - val_acc: 0.8866
Epoch 14/50
60000/60000 [==============================] - 11s - loss: 0.2384 - acc: 0.9094 - val_loss: 0.3166 - val_acc: 0.8873
Epoch 15/50
60000/60000 [==============================] - 13s - loss: 0.2336 - acc: 0.9101 - val_loss: 0.3074 - val_acc: 0.8895
Epoch 16/50
60000/60000 [==============================] - 12s - loss: 0.2280 - acc: 0.9124 - val_loss: 0.3056 - val_acc: 0.8906
Epoch 17/50
60000/60000 [==============================] - 13s - loss: 0.2210 - acc: 0.9165 - val_loss: 0.3272 - val_acc: 0.8864
Epoch 18/50
60000/60000 [==============================] - 11s - loss: 0.2169 - acc: 0.9181 - val_loss: 0.3082 - val_acc: 0.8945
Epoch 19/50
60000/60000 [==============================] - 11s - loss: 0.2104 - acc: 0.9195 - val_loss: 0.3157 - val_acc: 0.8924
Epoch 20/50
60000/60000 [==============================] - 11s - loss: 0.2061 - acc: 0.9209 - val_loss: 0.3081 - val_acc: 0.8958
Epoch 21/50
60000/60000 [==============================] - 11s - loss: 0.2033 - acc: 0.9224 - val_loss: 0.3230 - val_acc: 0.8947
Epoch 22/50
60000/60000 [==============================] - 10s - loss: 0.2019 - acc: 0.9234 - val_loss: 0.3242 - val_acc: 0.8928
Epoch 23/50
60000/60000 [==============================] - 11s - loss: 0.1948 - acc: 0.9259 - val_loss: 0.3253 - val_acc: 0.8893
Epoch 24/50
60000/60000 [==============================] - 11s - loss: 0.1961 - acc: 0.9243 - val_loss: 0.3227 - val_acc: 0.8918
Epoch 25/50
60000/60000 [==============================] - 10s - loss: 0.1873 - acc: 0.9274 - val_loss: 0.3103 - val_acc: 0.8971
Epoch 26/50
60000/60000 [==============================] - 10s - loss: 0.1846 - acc: 0.9294 - val_loss: 0.3261 - val_acc: 0.8939
Epoch 27/50
60000/60000 [==============================] - 11s - loss: 0.1845 - acc: 0.9287 - val_loss: 0.3346 - val_acc: 0.8953
Epoch 28/50
60000/60000 [==============================] - 10s - loss: 0.1812 - acc: 0.9302 - val_loss: 0.3109 - val_acc: 0.8975
Epoch 29/50
60000/60000 [==============================] - 11s - loss: 0.1747 - acc: 0.9321 - val_loss: 0.3350 - val_acc: 0.8972
Epoch 30/50
60000/60000 [==============================] - 10s - loss: 0.1759 - acc: 0.9321 - val_loss: 0.3392 - val_acc: 0.8950
Epoch 31/50
60000/60000 [==============================] - 11s - loss: 0.1714 - acc: 0.9334 - val_loss: 0.3287 - val_acc: 0.8979
Epoch 32/50
60000/60000 [==============================] - 12s - loss: 0.1681 - acc: 0.9357 - val_loss: 0.3412 - val_acc: 0.8977
Epoch 33/50
60000/60000 [==============================] - 11s - loss: 0.1661 - acc: 0.9366 - val_loss: 0.3320 - val_acc: 0.8949
Epoch 34/50
60000/60000 [==============================] - 9s - loss: 0.1651 - acc: 0.9360 - val_loss: 0.3239 - val_acc: 0.9009
Epoch 35/50
60000/60000 [==============================] - 10s - loss: 0.1591 - acc: 0.9385 - val_loss: 0.3362 - val_acc: 0.9002
Epoch 36/50
60000/60000 [==============================] - 9s - loss: 0.1606 - acc: 0.9386 - val_loss: 0.3263 - val_acc: 0.8996
Epoch 37/50
60000/60000 [==============================] - 9s - loss: 0.1527 - acc: 0.9412 - val_loss: 0.3559 - val_acc: 0.8999
Epoch 38/50
60000/60000 [==============================] - 9s - loss: 0.1566 - acc: 0.9413 - val_loss: 0.3359 - val_acc: 0.8974
Epoch 39/50
60000/60000 [==============================] - 9s - loss: 0.1533 - acc: 0.9409 - val_loss: 0.3553 - val_acc: 0.8974
Epoch 40/50
60000/60000 [==============================] - 9s - loss: 0.1515 - acc: 0.9413 - val_loss: 0.3480 - val_acc: 0.8965
Epoch 41/50
60000/60000 [==============================] - 9s - loss: 0.1446 - acc: 0.9442 - val_loss: 0.3281 - val_acc: 0.9038
Epoch 42/50
60000/60000 [==============================] - 9s - loss: 0.1464 - acc: 0.9433 - val_loss: 0.3479 - val_acc: 0.9000
Epoch 43/50
60000/60000 [==============================] - 9s - loss: 0.1444 - acc: 0.9450 - val_loss: 0.3572 - val_acc: 0.9005
Epoch 44/50
60000/60000 [==============================] - 10s - loss: 0.1429 - acc: 0.9450 - val_loss: 0.3571 - val_acc: 0.8966
Epoch 45/50
60000/60000 [==============================] - 10s - loss: 0.1401 - acc: 0.9463 - val_loss: 0.3572 - val_acc: 0.9007
Epoch 46/50
60000/60000 [==============================] - 10s - loss: 0.1387 - acc: 0.9465 - val_loss: 0.3551 - val_acc: 0.8976
Epoch 47/50
60000/60000 [==============================] - 10s - loss: 0.1382 - acc: 0.9461 - val_loss: 0.3469 - val_acc: 0.8987
Epoch 48/50
60000/60000 [==============================] - 11s - loss: 0.1408 - acc: 0.9454 - val_loss: 0.3517 - val_acc: 0.9007
Epoch 49/50
60000/60000 [==============================] - 11s - loss: 0.1346 - acc: 0.9492 - val_loss: 0.3744 - val_acc: 0.9000
Epoch 50/50
60000/60000 [==============================] - 11s - loss: 0.1327 - acc: 0.9490 - val_loss: 0.3625 - val_acc: 0.9011

In [9]:
#学習モデルの保存
json_string = model.to_json()

#モデルのファイル名 拡張子.json
open('mnist.json', 'w').write(json_string)
#重みファイルの保存 拡張子がhdf5
model.save_weights('out/mnist.hdf5')
 
# モデルの評価を行う
score = model.evaluate(X_test, y_test, verbose=1)
 
print('loss=', score[0])
print('accuracy=', score[1])
    
# modelに学習させた時の変化の様子をplot
plot_history(history)


 9824/10000 [============================>.] - ETA: 0sloss= 0.362501607373
accuracy= 0.9011

In [ ]: