Create Dataset


In [1]:
import os
import numpy as np
from PIL import Image

In [2]:
dataset = []
labels = []

In [3]:
for filename in os.listdir("dataset"):
    if filename.endswith(".png"):
        img_path = os.path.join("dataset", filename)
        im = Image.open(img_path).convert('L')
        dataset.append(np.asarray(im, dtype=np.float32))
        labels.append(filename.replace(".png", ""))

In [5]:
Image.fromarray(np.array(dataset[1], dtype=np.uint8))


Out[5]:

In [6]:
labels[1]


Out[6]:
'ekmyz'

In [7]:
ch2index = {}
index2ch = {}
index = 0
for label in labels:
    for ch in label:
        if ch not in ch2index.keys():
            ch2index[ch] = index
            index2ch[index] = ch
            index += 1

In [8]:
ch2index


Out[8]:
{'4': 0,
 'q': 1,
 'p': 2,
 '5': 3,
 'b': 4,
 'e': 5,
 'k': 6,
 'm': 7,
 'y': 8,
 'z': 9,
 '3': 10,
 'f': 11,
 'u': 12,
 '2': 13,
 'c': 14,
 '7': 15,
 'd': 16,
 'x': 17,
 's': 18,
 'h': 19,
 'v': 20,
 'g': 21,
 '8': 22,
 'n': 23,
 'r': 24,
 'a': 25,
 'w': 26,
 '6': 27,
 'o': 28,
 't': 29,
 '1': 30,
 'l': 31,
 'j': 32,
 '9': 33,
 'i': 34,
 '0': 35}

In [9]:
print("total char type:", len(ch2index))


total char type: 36

In [10]:
y0 = []
y1 = []
y2 = []
y3 = []
y4 = []

for label in labels:
    y0.append(ch2index[label[0]])
    y1.append(ch2index[label[1]])
    y2.append(ch2index[label[2]])
    y3.append(ch2index[label[3]])
    y4.append(ch2index[label[4]])

In [11]:
len(y0)


Out[11]:
11303

In [12]:
X_train = np.array(dataset[:10000]).reshape(-1, 40, 100, 1)
y_train = [
    np.array(y0[:10000]),
    np.array(y1[:10000]),
    np.array(y2[:10000]),
    np.array(y3[:10000]),
    np.array(y4[:10000]),
]

In [13]:
X_train.shape


Out[13]:
(10000, 40, 100, 1)

In [14]:
X_test = np.array(dataset[10000:]).reshape(-1, 40, 100, 1)
y_test = [
    np.array(y0[10000:]),
    np.array(y1[10000:]),
    np.array(y2[10000:]),
    np.array(y3[10000:]),
    np.array(y4[10000:]),
]

In [15]:
X_test.shape


Out[15]:
(1303, 40, 100, 1)

Train the Network


In [16]:
from keras.layers import Activation, Input, Dense, Conv2D, MaxPool2D, Dropout, Flatten, BatchNormalization
from keras.models import Model
from keras.layers.merge import Concatenate


def build_model():
    input_ = Input(shape=(40, 100, 1))

    # conv layer 1
    model = BatchNormalization()(input_)
    model = Conv2D(64, (5, 5), activation ='relu', padding='same')(model)
    model = MaxPool2D(pool_size=(2, 2))(model)

    # conv layer 2
    model = BatchNormalization()(model)
    model = Conv2D(128, (5, 5), activation ='relu', padding='valid')(model)
    model = MaxPool2D(pool_size=(2, 2))(model)

    # conv layer 3
    model = BatchNormalization()(model)
    model = Conv2D(256, (5, 5), activation ='relu', padding='valid')(model)
    model = MaxPool2D(pool_size=(2, 2))(model)
    model = Dropout(0.5)(model)

    # fully connected layer
    model = Flatten()(model)
    model = Dense(1024, activation='relu')(model)
    model = Dropout(0.5)(model)
    model = Dense(512, activation='relu')(model)

    x0 = Dense(36, activation='softmax')(model)
    x1 = Dense(36, activation='softmax')(model)
    x2 = Dense(36, activation='softmax')(model)
    x3 = Dense(36, activation='softmax')(model)
    x4 = Dense(36, activation='softmax')(model)

    x = [x0, x1, x2, x3, x4]

    model = Model(inputs=input_, outputs=x)
    return model


Using TensorFlow backend.

In [17]:
model = build_model()

In [18]:
model.summary()


__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            (None, 40, 100, 1)   0                                            
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 40, 100, 1)   4           input_1[0][0]                    
__________________________________________________________________________________________________
conv2d_1 (Conv2D)               (None, 40, 100, 64)  1664        batch_normalization_1[0][0]      
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D)  (None, 20, 50, 64)   0           conv2d_1[0][0]                   
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 20, 50, 64)   256         max_pooling2d_1[0][0]            
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, 16, 46, 128)  204928      batch_normalization_2[0][0]      
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D)  (None, 8, 23, 128)   0           conv2d_2[0][0]                   
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 8, 23, 128)   512         max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, 4, 19, 256)   819456      batch_normalization_3[0][0]      
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D)  (None, 2, 9, 256)    0           conv2d_3[0][0]                   
__________________________________________________________________________________________________
dropout_1 (Dropout)             (None, 2, 9, 256)    0           max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
flatten_1 (Flatten)             (None, 4608)         0           dropout_1[0][0]                  
__________________________________________________________________________________________________
dense_1 (Dense)                 (None, 1024)         4719616     flatten_1[0][0]                  
__________________________________________________________________________________________________
dropout_2 (Dropout)             (None, 1024)         0           dense_1[0][0]                    
__________________________________________________________________________________________________
dense_2 (Dense)                 (None, 512)          524800      dropout_2[0][0]                  
__________________________________________________________________________________________________
dense_3 (Dense)                 (None, 36)           18468       dense_2[0][0]                    
__________________________________________________________________________________________________
dense_4 (Dense)                 (None, 36)           18468       dense_2[0][0]                    
__________________________________________________________________________________________________
dense_5 (Dense)                 (None, 36)           18468       dense_2[0][0]                    
__________________________________________________________________________________________________
dense_6 (Dense)                 (None, 36)           18468       dense_2[0][0]                    
__________________________________________________________________________________________________
dense_7 (Dense)                 (None, 36)           18468       dense_2[0][0]                    
==================================================================================================
Total params: 6,363,576
Trainable params: 6,363,190
Non-trainable params: 386
__________________________________________________________________________________________________

In [19]:
model.compile(loss='sparse_categorical_crossentropy',
              optimizer="rmsprop",
              metrics=['accuracy'])

In [20]:
model.fit(X_train, y_train, batch_size=64, epochs=20)


Epoch 1/20
10000/10000 [==============================] - 130s 13ms/step - loss: 17.4039 - dense_3_loss: 3.4982 - dense_4_loss: 3.4715 - dense_5_loss: 3.4784 - dense_6_loss: 3.4839 - dense_7_loss: 3.4718 - dense_3_acc: 0.0537 - dense_4_acc: 0.0573 - dense_5_acc: 0.0566 - dense_6_acc: 0.0555 - dense_7_acc: 0.0532
Epoch 2/20
10000/10000 [==============================] - 128s 13ms/step - loss: 16.0153 - dense_3_loss: 3.1650 - dense_4_loss: 3.1789 - dense_5_loss: 3.2152 - dense_6_loss: 3.2110 - dense_7_loss: 3.2452 - dense_3_acc: 0.0993 - dense_4_acc: 0.0992 - dense_5_acc: 0.0889 - dense_6_acc: 0.0953 - dense_7_acc: 0.0823
Epoch 3/20
10000/10000 [==============================] - 129s 13ms/step - loss: 14.5875 - dense_3_loss: 2.6951 - dense_4_loss: 2.8213 - dense_5_loss: 2.9941 - dense_6_loss: 3.0402 - dense_7_loss: 3.0368 - dense_3_acc: 0.2005 - dense_4_acc: 0.1867 - dense_5_acc: 0.1458 - dense_6_acc: 0.1419 - dense_7_acc: 0.1405
Epoch 4/20
10000/10000 [==============================] - 128s 13ms/step - loss: 13.0451 - dense_3_loss: 2.2482 - dense_4_loss: 2.4619 - dense_5_loss: 2.7246 - dense_6_loss: 2.8144 - dense_7_loss: 2.7959 - dense_3_acc: 0.3113 - dense_4_acc: 0.2707 - dense_5_acc: 0.2128 - dense_6_acc: 0.1921 - dense_7_acc: 0.2005
Epoch 5/20
10000/10000 [==============================] - 133s 13ms/step - loss: 11.5126 - dense_3_loss: 1.8725 - dense_4_loss: 2.1362 - dense_5_loss: 2.4230 - dense_6_loss: 2.5449 - dense_7_loss: 2.5360 - dense_3_acc: 0.4062 - dense_4_acc: 0.3502 - dense_5_acc: 0.2851 - dense_6_acc: 0.2603 - dense_7_acc: 0.2593
Epoch 6/20
10000/10000 [==============================] - 131s 13ms/step - loss: 10.1630 - dense_3_loss: 1.5773 - dense_4_loss: 1.8326 - dense_5_loss: 2.1425 - dense_6_loss: 2.3023 - dense_7_loss: 2.3082 - dense_3_acc: 0.4889 - dense_4_acc: 0.4292 - dense_5_acc: 0.3620 - dense_6_acc: 0.3222 - dense_7_acc: 0.3176
Epoch 7/20
10000/10000 [==============================] - 131s 13ms/step - loss: 8.8986 - dense_3_loss: 1.3316 - dense_4_loss: 1.5623 - dense_5_loss: 1.8948 - dense_6_loss: 2.0439 - dense_7_loss: 2.0660 - dense_3_acc: 0.5668 - dense_4_acc: 0.5147 - dense_5_acc: 0.4245 - dense_6_acc: 0.3913 - dense_7_acc: 0.3799
Epoch 8/20
10000/10000 [==============================] - 131s 13ms/step - loss: 7.8921 - dense_3_loss: 1.1384 - dense_4_loss: 1.3268 - dense_5_loss: 1.6781 - dense_6_loss: 1.8549 - dense_7_loss: 1.8939 - dense_3_acc: 0.6269 - dense_4_acc: 0.5827 - dense_5_acc: 0.4915 - dense_6_acc: 0.4496 - dense_7_acc: 0.4375
Epoch 9/20
10000/10000 [==============================] - 131s 13ms/step - loss: 7.0649 - dense_3_loss: 1.0169 - dense_4_loss: 1.1828 - dense_5_loss: 1.4890 - dense_6_loss: 1.6214 - dense_7_loss: 1.7548 - dense_3_acc: 0.6714 - dense_4_acc: 0.6208 - dense_5_acc: 0.5445 - dense_6_acc: 0.5136 - dense_7_acc: 0.4768
Epoch 10/20
10000/10000 [==============================] - 130s 13ms/step - loss: 6.3689 - dense_3_loss: 0.9045 - dense_4_loss: 1.0439 - dense_5_loss: 1.3408 - dense_6_loss: 1.4610 - dense_7_loss: 1.6186 - dense_3_acc: 0.7054 - dense_4_acc: 0.6679 - dense_5_acc: 0.5845 - dense_6_acc: 0.5555 - dense_7_acc: 0.5167
Epoch 11/20
10000/10000 [==============================] - 126s 13ms/step - loss: 5.8352 - dense_3_loss: 0.8410 - dense_4_loss: 0.9421 - dense_5_loss: 1.1900 - dense_6_loss: 1.3581 - dense_7_loss: 1.5040 - dense_3_acc: 0.7303 - dense_4_acc: 0.7036 - dense_5_acc: 0.6323 - dense_6_acc: 0.5874 - dense_7_acc: 0.5525
Epoch 12/20
10000/10000 [==============================] - 129s 13ms/step - loss: 5.4001 - dense_3_loss: 0.7569 - dense_4_loss: 0.8706 - dense_5_loss: 1.1074 - dense_6_loss: 1.2604 - dense_7_loss: 1.4049 - dense_3_acc: 0.7567 - dense_4_acc: 0.7237 - dense_5_acc: 0.6613 - dense_6_acc: 0.6118 - dense_7_acc: 0.5793
Epoch 13/20
10000/10000 [==============================] - 127s 13ms/step - loss: 4.9765 - dense_3_loss: 0.7195 - dense_4_loss: 0.7928 - dense_5_loss: 1.0283 - dense_6_loss: 1.1389 - dense_7_loss: 1.2970 - dense_3_acc: 0.7769 - dense_4_acc: 0.7489 - dense_5_acc: 0.6882 - dense_6_acc: 0.6517 - dense_7_acc: 0.6080
Epoch 14/20
10000/10000 [==============================] - 127s 13ms/step - loss: 4.6713 - dense_3_loss: 0.6568 - dense_4_loss: 0.7508 - dense_5_loss: 0.9450 - dense_6_loss: 1.0637 - dense_7_loss: 1.2550 - dense_3_acc: 0.7946 - dense_4_acc: 0.7651 - dense_5_acc: 0.7113 - dense_6_acc: 0.6732 - dense_7_acc: 0.6259
Epoch 15/20
10000/10000 [==============================] - 129s 13ms/step - loss: 4.3328 - dense_3_loss: 0.6213 - dense_4_loss: 0.6635 - dense_5_loss: 0.8873 - dense_6_loss: 0.9883 - dense_7_loss: 1.1724 - dense_3_acc: 0.8055 - dense_4_acc: 0.7941 - dense_5_acc: 0.7327 - dense_6_acc: 0.7013 - dense_7_acc: 0.6459
Epoch 16/20
10000/10000 [==============================] - 129s 13ms/step - loss: 4.0697 - dense_3_loss: 0.5923 - dense_4_loss: 0.6314 - dense_5_loss: 0.8309 - dense_6_loss: 0.9157 - dense_7_loss: 1.0994 - dense_3_acc: 0.8165 - dense_4_acc: 0.8036 - dense_5_acc: 0.7447 - dense_6_acc: 0.7223 - dense_7_acc: 0.6722
Epoch 17/20
10000/10000 [==============================] - 126s 13ms/step - loss: 3.8859 - dense_3_loss: 0.5622 - dense_4_loss: 0.6152 - dense_5_loss: 0.7800 - dense_6_loss: 0.8736 - dense_7_loss: 1.0548 - dense_3_acc: 0.8262 - dense_4_acc: 0.8146 - dense_5_acc: 0.7622 - dense_6_acc: 0.7366 - dense_7_acc: 0.6857
Epoch 18/20
10000/10000 [==============================] - 129s 13ms/step - loss: 3.6298 - dense_3_loss: 0.5218 - dense_4_loss: 0.5728 - dense_5_loss: 0.7317 - dense_6_loss: 0.8029 - dense_7_loss: 1.0005 - dense_3_acc: 0.8402 - dense_4_acc: 0.8259 - dense_5_acc: 0.7793 - dense_6_acc: 0.7576 - dense_7_acc: 0.7023
Epoch 19/20
10000/10000 [==============================] - 128s 13ms/step - loss: 3.4865 - dense_3_loss: 0.5116 - dense_4_loss: 0.5346 - dense_5_loss: 0.7000 - dense_6_loss: 0.7723 - dense_7_loss: 0.9681 - dense_3_acc: 0.8512 - dense_4_acc: 0.8394 - dense_5_acc: 0.7876 - dense_6_acc: 0.7646 - dense_7_acc: 0.7108
Epoch 20/20
10000/10000 [==============================] - 129s 13ms/step - loss: 3.2949 - dense_3_loss: 0.4881 - dense_4_loss: 0.5199 - dense_5_loss: 0.6318 - dense_6_loss: 0.7477 - dense_7_loss: 0.9074 - dense_3_acc: 0.8540 - dense_4_acc: 0.8432 - dense_5_acc: 0.8089 - dense_6_acc: 0.7674 - dense_7_acc: 0.7261
Out[20]:
<keras.callbacks.History at 0x7fed686c2198>

In [21]:
model.fit(X_train, y_train, batch_size=64, epochs=20) # train another 20 epochs


Epoch 1/20
10000/10000 [==============================] - 134s 13ms/step - loss: 3.1707 - dense_3_loss: 0.4545 - dense_4_loss: 0.5032 - dense_5_loss: 0.6494 - dense_6_loss: 0.7134 - dense_7_loss: 0.8503 - dense_3_acc: 0.8600 - dense_4_acc: 0.8483 - dense_5_acc: 0.8130 - dense_6_acc: 0.7829 - dense_7_acc: 0.7419
Epoch 2/20
10000/10000 [==============================] - 132s 13ms/step - loss: 3.0447 - dense_3_loss: 0.4513 - dense_4_loss: 0.4767 - dense_5_loss: 0.5917 - dense_6_loss: 0.6707 - dense_7_loss: 0.8543 - dense_3_acc: 0.8629 - dense_4_acc: 0.8580 - dense_5_acc: 0.8216 - dense_6_acc: 0.7986 - dense_7_acc: 0.7455
Epoch 3/20
10000/10000 [==============================] - 131s 13ms/step - loss: 2.9093 - dense_3_loss: 0.4492 - dense_4_loss: 0.4482 - dense_5_loss: 0.5578 - dense_6_loss: 0.6419 - dense_7_loss: 0.8121 - dense_3_acc: 0.8690 - dense_4_acc: 0.8612 - dense_5_acc: 0.8293 - dense_6_acc: 0.8092 - dense_7_acc: 0.7608
Epoch 4/20
10000/10000 [==============================] - 131s 13ms/step - loss: 2.8250 - dense_3_loss: 0.3995 - dense_4_loss: 0.4319 - dense_5_loss: 0.5739 - dense_6_loss: 0.6393 - dense_7_loss: 0.7804 - dense_3_acc: 0.8794 - dense_4_acc: 0.8701 - dense_5_acc: 0.8270 - dense_6_acc: 0.8125 - dense_7_acc: 0.7678
Epoch 5/20
10000/10000 [==============================] - 131s 13ms/step - loss: 2.6917 - dense_3_loss: 0.4024 - dense_4_loss: 0.4284 - dense_5_loss: 0.5524 - dense_6_loss: 0.5792 - dense_7_loss: 0.7293 - dense_3_acc: 0.8806 - dense_4_acc: 0.8706 - dense_5_acc: 0.8340 - dense_6_acc: 0.8230 - dense_7_acc: 0.7787
Epoch 6/20
10000/10000 [==============================] - 129s 13ms/step - loss: 2.6624 - dense_3_loss: 0.3926 - dense_4_loss: 0.4176 - dense_5_loss: 0.5179 - dense_6_loss: 0.5949 - dense_7_loss: 0.7394 - dense_3_acc: 0.8878 - dense_4_acc: 0.8771 - dense_5_acc: 0.8453 - dense_6_acc: 0.8238 - dense_7_acc: 0.7814
Epoch 7/20
10000/10000 [==============================] - 130s 13ms/step - loss: 2.5288 - dense_3_loss: 0.3700 - dense_4_loss: 0.3969 - dense_5_loss: 0.4867 - dense_6_loss: 0.5616 - dense_7_loss: 0.7135 - dense_3_acc: 0.8889 - dense_4_acc: 0.8814 - dense_5_acc: 0.8544 - dense_6_acc: 0.8352 - dense_7_acc: 0.7869
Epoch 8/20
10000/10000 [==============================] - 133s 13ms/step - loss: 2.4492 - dense_3_loss: 0.3782 - dense_4_loss: 0.3733 - dense_5_loss: 0.4724 - dense_6_loss: 0.5532 - dense_7_loss: 0.6721 - dense_3_acc: 0.8922 - dense_4_acc: 0.8850 - dense_5_acc: 0.8596 - dense_6_acc: 0.8387 - dense_7_acc: 0.7966
Epoch 9/20
10000/10000 [==============================] - 128s 13ms/step - loss: 2.3338 - dense_3_loss: 0.3563 - dense_4_loss: 0.3776 - dense_5_loss: 0.4586 - dense_6_loss: 0.4986 - dense_7_loss: 0.6426 - dense_3_acc: 0.8975 - dense_4_acc: 0.8913 - dense_5_acc: 0.8694 - dense_6_acc: 0.8494 - dense_7_acc: 0.8065
Epoch 10/20
10000/10000 [==============================] - 130s 13ms/step - loss: 2.2384 - dense_3_loss: 0.3475 - dense_4_loss: 0.3458 - dense_5_loss: 0.4169 - dense_6_loss: 0.5026 - dense_7_loss: 0.6255 - dense_3_acc: 0.9020 - dense_4_acc: 0.8962 - dense_5_acc: 0.8769 - dense_6_acc: 0.8532 - dense_7_acc: 0.8140
Epoch 11/20
10000/10000 [==============================] - 130s 13ms/step - loss: 2.1987 - dense_3_loss: 0.3336 - dense_4_loss: 0.3457 - dense_5_loss: 0.4354 - dense_6_loss: 0.4792 - dense_7_loss: 0.6049 - dense_3_acc: 0.9091 - dense_4_acc: 0.8956 - dense_5_acc: 0.8712 - dense_6_acc: 0.8581 - dense_7_acc: 0.8189
Epoch 12/20
10000/10000 [==============================] - 129s 13ms/step - loss: 2.1599 - dense_3_loss: 0.3192 - dense_4_loss: 0.3414 - dense_5_loss: 0.4284 - dense_6_loss: 0.4850 - dense_7_loss: 0.5858 - dense_3_acc: 0.9067 - dense_4_acc: 0.9009 - dense_5_acc: 0.8727 - dense_6_acc: 0.8565 - dense_7_acc: 0.8291
Epoch 13/20
10000/10000 [==============================] - 128s 13ms/step - loss: 2.1034 - dense_3_loss: 0.3470 - dense_4_loss: 0.3131 - dense_5_loss: 0.4009 - dense_6_loss: 0.4606 - dense_7_loss: 0.5817 - dense_3_acc: 0.9057 - dense_4_acc: 0.9042 - dense_5_acc: 0.8819 - dense_6_acc: 0.8693 - dense_7_acc: 0.8278
Epoch 14/20
10000/10000 [==============================] - 130s 13ms/step - loss: 2.0443 - dense_3_loss: 0.3195 - dense_4_loss: 0.3142 - dense_5_loss: 0.3931 - dense_6_loss: 0.4562 - dense_7_loss: 0.5613 - dense_3_acc: 0.9119 - dense_4_acc: 0.9069 - dense_5_acc: 0.8826 - dense_6_acc: 0.8665 - dense_7_acc: 0.8315
Epoch 15/20
10000/10000 [==============================] - 128s 13ms/step - loss: 1.9769 - dense_3_loss: 0.3062 - dense_4_loss: 0.3095 - dense_5_loss: 0.3891 - dense_6_loss: 0.4337 - dense_7_loss: 0.5383 - dense_3_acc: 0.9148 - dense_4_acc: 0.9100 - dense_5_acc: 0.8837 - dense_6_acc: 0.8726 - dense_7_acc: 0.8340
Epoch 16/20
10000/10000 [==============================] - 128s 13ms/step - loss: 1.9283 - dense_3_loss: 0.3143 - dense_4_loss: 0.3077 - dense_5_loss: 0.3630 - dense_6_loss: 0.4295 - dense_7_loss: 0.5138 - dense_3_acc: 0.9145 - dense_4_acc: 0.9110 - dense_5_acc: 0.8912 - dense_6_acc: 0.8757 - dense_7_acc: 0.8462
Epoch 17/20
10000/10000 [==============================] - 127s 13ms/step - loss: 1.8305 - dense_3_loss: 0.2886 - dense_4_loss: 0.2887 - dense_5_loss: 0.3627 - dense_6_loss: 0.4015 - dense_7_loss: 0.4890 - dense_3_acc: 0.9176 - dense_4_acc: 0.9178 - dense_5_acc: 0.8934 - dense_6_acc: 0.8837 - dense_7_acc: 0.8530
Epoch 18/20
10000/10000 [==============================] - 127s 13ms/step - loss: 1.8004 - dense_3_loss: 0.2878 - dense_4_loss: 0.2850 - dense_5_loss: 0.3465 - dense_6_loss: 0.3780 - dense_7_loss: 0.5030 - dense_3_acc: 0.9199 - dense_4_acc: 0.9177 - dense_5_acc: 0.8972 - dense_6_acc: 0.8901 - dense_7_acc: 0.8513
Epoch 19/20
10000/10000 [==============================] - 130s 13ms/step - loss: 1.7803 - dense_3_loss: 0.2859 - dense_4_loss: 0.2894 - dense_5_loss: 0.3370 - dense_6_loss: 0.3793 - dense_7_loss: 0.4887 - dense_3_acc: 0.9228 - dense_4_acc: 0.9174 - dense_5_acc: 0.9002 - dense_6_acc: 0.8918 - dense_7_acc: 0.8539
Epoch 20/20
10000/10000 [==============================] - 130s 13ms/step - loss: 1.7381 - dense_3_loss: 0.2770 - dense_4_loss: 0.2710 - dense_5_loss: 0.3547 - dense_6_loss: 0.3668 - dense_7_loss: 0.4686 - dense_3_acc: 0.9259 - dense_4_acc: 0.9207 - dense_5_acc: 0.8952 - dense_6_acc: 0.8884 - dense_7_acc: 0.8605
Out[21]:
<keras.callbacks.History at 0x7fed686c2dd8>

In [24]:
model.fit(X_train, y_train, batch_size=64, epochs=20)  # train another 20 epochs


Epoch 1/20
10000/10000 [==============================] - 133s 13ms/step - loss: 1.7041 - dense_3_loss: 0.2622 - dense_4_loss: 0.2790 - dense_5_loss: 0.3335 - dense_6_loss: 0.3717 - dense_7_loss: 0.4577 - dense_3_acc: 0.9266 - dense_4_acc: 0.9215 - dense_5_acc: 0.9062 - dense_6_acc: 0.8905 - dense_7_acc: 0.8598
Epoch 2/20
10000/10000 [==============================] - 134s 13ms/step - loss: 1.6605 - dense_3_loss: 0.2667 - dense_4_loss: 0.2672 - dense_5_loss: 0.3222 - dense_6_loss: 0.3525 - dense_7_loss: 0.4519 - dense_3_acc: 0.9260 - dense_4_acc: 0.9246 - dense_5_acc: 0.9106 - dense_6_acc: 0.8982 - dense_7_acc: 0.8681
Epoch 3/20
10000/10000 [==============================] - 132s 13ms/step - loss: 1.5936 - dense_3_loss: 0.2637 - dense_4_loss: 0.2449 - dense_5_loss: 0.3026 - dense_6_loss: 0.3575 - dense_7_loss: 0.4249 - dense_3_acc: 0.9265 - dense_4_acc: 0.9312 - dense_5_acc: 0.9138 - dense_6_acc: 0.8973 - dense_7_acc: 0.8719
Epoch 4/20
10000/10000 [==============================] - 133s 13ms/step - loss: 1.5142 - dense_3_loss: 0.2496 - dense_4_loss: 0.2322 - dense_5_loss: 0.2959 - dense_6_loss: 0.3256 - dense_7_loss: 0.4109 - dense_3_acc: 0.9312 - dense_4_acc: 0.9342 - dense_5_acc: 0.9128 - dense_6_acc: 0.9065 - dense_7_acc: 0.8753
Epoch 5/20
10000/10000 [==============================] - 138s 14ms/step - loss: 1.5126 - dense_3_loss: 0.2514 - dense_4_loss: 0.2226 - dense_5_loss: 0.3032 - dense_6_loss: 0.3300 - dense_7_loss: 0.4054 - dense_3_acc: 0.9306 - dense_4_acc: 0.9340 - dense_5_acc: 0.9112 - dense_6_acc: 0.9037 - dense_7_acc: 0.8811
Epoch 6/20
10000/10000 [==============================] - 135s 14ms/step - loss: 1.4691 - dense_3_loss: 0.2376 - dense_4_loss: 0.2275 - dense_5_loss: 0.2946 - dense_6_loss: 0.3234 - dense_7_loss: 0.3860 - dense_3_acc: 0.9361 - dense_4_acc: 0.9347 - dense_5_acc: 0.9131 - dense_6_acc: 0.9071 - dense_7_acc: 0.8841
Epoch 7/20
10000/10000 [==============================] - 140s 14ms/step - loss: 1.4910 - dense_3_loss: 0.2498 - dense_4_loss: 0.2253 - dense_5_loss: 0.2930 - dense_6_loss: 0.3253 - dense_7_loss: 0.3976 - dense_3_acc: 0.9318 - dense_4_acc: 0.9334 - dense_5_acc: 0.9139 - dense_6_acc: 0.9043 - dense_7_acc: 0.8857
Epoch 8/20
10000/10000 [==============================] - 141s 14ms/step - loss: 1.3717 - dense_3_loss: 0.2260 - dense_4_loss: 0.2154 - dense_5_loss: 0.2529 - dense_6_loss: 0.2925 - dense_7_loss: 0.3850 - dense_3_acc: 0.9360 - dense_4_acc: 0.9380 - dense_5_acc: 0.9255 - dense_6_acc: 0.9133 - dense_7_acc: 0.8842
Epoch 9/20
10000/10000 [==============================] - 139s 14ms/step - loss: 1.3729 - dense_3_loss: 0.2235 - dense_4_loss: 0.2286 - dense_5_loss: 0.2676 - dense_6_loss: 0.2946 - dense_7_loss: 0.3586 - dense_3_acc: 0.9374 - dense_4_acc: 0.9331 - dense_5_acc: 0.9231 - dense_6_acc: 0.9138 - dense_7_acc: 0.8895
Epoch 10/20
10000/10000 [==============================] - 138s 14ms/step - loss: 1.3544 - dense_3_loss: 0.2196 - dense_4_loss: 0.2309 - dense_5_loss: 0.2574 - dense_6_loss: 0.2875 - dense_7_loss: 0.3590 - dense_3_acc: 0.9385 - dense_4_acc: 0.9367 - dense_5_acc: 0.9263 - dense_6_acc: 0.9145 - dense_7_acc: 0.8911
Epoch 11/20
10000/10000 [==============================] - 136s 14ms/step - loss: 1.3212 - dense_3_loss: 0.2145 - dense_4_loss: 0.2049 - dense_5_loss: 0.2587 - dense_6_loss: 0.2889 - dense_7_loss: 0.3542 - dense_3_acc: 0.9428 - dense_4_acc: 0.9376 - dense_5_acc: 0.9260 - dense_6_acc: 0.9157 - dense_7_acc: 0.8941
Epoch 12/20
10000/10000 [==============================] - 131s 13ms/step - loss: 1.2435 - dense_3_loss: 0.1949 - dense_4_loss: 0.2043 - dense_5_loss: 0.2491 - dense_6_loss: 0.2499 - dense_7_loss: 0.3452 - dense_3_acc: 0.9451 - dense_4_acc: 0.9416 - dense_5_acc: 0.9283 - dense_6_acc: 0.9226 - dense_7_acc: 0.8982
Epoch 13/20
10000/10000 [==============================] - 132s 13ms/step - loss: 1.2486 - dense_3_loss: 0.2117 - dense_4_loss: 0.1967 - dense_5_loss: 0.2392 - dense_6_loss: 0.2653 - dense_7_loss: 0.3356 - dense_3_acc: 0.9425 - dense_4_acc: 0.9434 - dense_5_acc: 0.9290 - dense_6_acc: 0.9237 - dense_7_acc: 0.8976
Epoch 14/20
10000/10000 [==============================] - 130s 13ms/step - loss: 1.2140 - dense_3_loss: 0.1953 - dense_4_loss: 0.1848 - dense_5_loss: 0.2439 - dense_6_loss: 0.2696 - dense_7_loss: 0.3204 - dense_3_acc: 0.9461 - dense_4_acc: 0.9491 - dense_5_acc: 0.9308 - dense_6_acc: 0.9200 - dense_7_acc: 0.9061
Epoch 15/20
10000/10000 [==============================] - 130s 13ms/step - loss: 1.1868 - dense_3_loss: 0.1989 - dense_4_loss: 0.1856 - dense_5_loss: 0.2215 - dense_6_loss: 0.2572 - dense_7_loss: 0.3235 - dense_3_acc: 0.9439 - dense_4_acc: 0.9447 - dense_5_acc: 0.9361 - dense_6_acc: 0.9256 - dense_7_acc: 0.9057
Epoch 16/20
10000/10000 [==============================] - 129s 13ms/step - loss: 1.1371 - dense_3_loss: 0.1906 - dense_4_loss: 0.1700 - dense_5_loss: 0.2244 - dense_6_loss: 0.2469 - dense_7_loss: 0.3051 - dense_3_acc: 0.9517 - dense_4_acc: 0.9504 - dense_5_acc: 0.9339 - dense_6_acc: 0.9269 - dense_7_acc: 0.9102
Epoch 17/20
10000/10000 [==============================] - 131s 13ms/step - loss: 1.1564 - dense_3_loss: 0.1838 - dense_4_loss: 0.1937 - dense_5_loss: 0.2246 - dense_6_loss: 0.2578 - dense_7_loss: 0.2964 - dense_3_acc: 0.9482 - dense_4_acc: 0.9460 - dense_5_acc: 0.9329 - dense_6_acc: 0.9253 - dense_7_acc: 0.9100
Epoch 18/20
10000/10000 [==============================] - 130s 13ms/step - loss: 1.0924 - dense_3_loss: 0.1729 - dense_4_loss: 0.1761 - dense_5_loss: 0.2261 - dense_6_loss: 0.2267 - dense_7_loss: 0.2906 - dense_3_acc: 0.9508 - dense_4_acc: 0.9497 - dense_5_acc: 0.9364 - dense_6_acc: 0.9349 - dense_7_acc: 0.9123
Epoch 19/20
10000/10000 [==============================] - 134s 13ms/step - loss: 1.0852 - dense_3_loss: 0.1775 - dense_4_loss: 0.1768 - dense_5_loss: 0.2042 - dense_6_loss: 0.2506 - dense_7_loss: 0.2761 - dense_3_acc: 0.9516 - dense_4_acc: 0.9515 - dense_5_acc: 0.9394 - dense_6_acc: 0.9280 - dense_7_acc: 0.9169
Epoch 20/20
10000/10000 [==============================] - 134s 13ms/step - loss: 1.0861 - dense_3_loss: 0.1765 - dense_4_loss: 0.1629 - dense_5_loss: 0.2100 - dense_6_loss: 0.2567 - dense_7_loss: 0.2800 - dense_3_acc: 0.9505 - dense_4_acc: 0.9535 - dense_5_acc: 0.9394 - dense_6_acc: 0.9295 - dense_7_acc: 0.9175
Out[24]:
<keras.callbacks.History at 0x7fed3033c9b0>

In [25]:
res = model.evaluate(X_test, y_test)


1303/1303 [==============================] - 4s 3ms/step

In [26]:
res[6:]


Out[26]:
[0.9470452804887468,
 0.9447429011806743,
 0.9109746733264381,
 0.8656945514020239,
 0.8096699927371004]

In [27]:
model.save("weibo.com.h5")

Use in product


In [28]:
Image.fromarray(np.array(X_test[0].reshape(40, 100), dtype=np.uint8))


Out[28]:

In [29]:
res = model.predict(np.array([X_test[0]]).reshape(1, 40, 100, 1))

In [30]:
print(index2ch[res[0].argmax(1)[0]], 
      index2ch[res[1].argmax(1)[0]], 
      index2ch[res[2].argmax(1)[0]], 
      index2ch[res[3].argmax(1)[0]], 
      index2ch[res[4].argmax(1)[0]])


z b b n k

In [49]:
Image.fromarray(np.array(X_test[1].reshape(40, 100), dtype=np.uint8))


Out[49]:

In [50]:
res = model.predict(np.array([X_test[1]]).reshape(1, 40, 100, 1))

In [51]:
print(index2ch[res[0].argmax(1)[0]], 
      index2ch[res[1].argmax(1)[0]], 
      index2ch[res[2].argmax(1)[0]], 
      index2ch[res[3].argmax(1)[0]], 
      index2ch[res[4].argmax(1)[0]])


x e f e d