In [1]:
import numpy
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.utils import np_utils


Using Theano backend.
Using gpu device 0: GeForce GTX 750 (CNMeM is disabled, cuDNN not available)

In [2]:
# fix random seed for reproducibility
numpy.random.seed(7)

In [3]:
# define the raw dataset
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
# create mapping of characters to integers (0-25) and the reverse
char_to_int = dict((c, i) for i, c in enumerate(alphabet))
int_to_char = dict((i, c) for i, c in enumerate(alphabet))

In [17]:
# prepare the dataset of input to output pairs encoded as integers
seq_length = 1
dataX = []
dataY = []
def create_XY(seq_length, alphabet, dataX, dataY):
    for i in range(0, len(alphabet) - seq_length, 1):
        seq_in = alphabet[i:i + seq_length]
        seq_out = alphabet[i + seq_length]
        dataX.append([char_to_int[char] for char in seq_in])
        dataY.append(char_to_int[seq_out])
        print(seq_in, '->', seq_out)

create_XY(seq_length, alphabet, dataX, dataY)


A -> B
B -> C
C -> D
D -> E
E -> F
F -> G
G -> H
H -> I
I -> J
J -> K
K -> L
L -> M
M -> N
N -> O
O -> P
P -> Q
Q -> R
R -> S
S -> T
T -> U
U -> V
V -> W
W -> X
X -> Y
Y -> Z

In [18]:
dataX, dataY
len(dataX)


Out[18]:
([[0],
  [1],
  [2],
  [3],
  [4],
  [5],
  [6],
  [7],
  [8],
  [9],
  [10],
  [11],
  [12],
  [13],
  [14],
  [15],
  [16],
  [17],
  [18],
  [19],
  [20],
  [21],
  [22],
  [23],
  [24]],
 [1,
  2,
  3,
  4,
  5,
  6,
  7,
  8,
  9,
  10,
  11,
  12,
  13,
  14,
  15,
  16,
  17,
  18,
  19,
  20,
  21,
  22,
  23,
  24,
  25])
Out[18]:
25

In [32]:
# reshape X to be [samples, time steps, features]
X = numpy.reshape(dataX, (len(dataX), seq_length, 1))
X.shape
X[0:3]


Out[32]:
(23, 3, 1)
Out[32]:
array([[[0],
        [1],
        [2]],

       [[1],
        [2],
        [3]],

       [[2],
        [3],
        [4]]])

In [7]:
# normalize
X = X / float(len(alphabet))

In [8]:
# one hot encode the output variable
y = np_utils.to_categorical(dataY)

Naive LSTM for Learning One-Char to One-Char Mapping

Let’s start off by designing a simple LSTM to learn how to predict the next character in the alphabet given the context of just one character.

We will frame the problem as a random collection of one-letter input to one-letter output pairs. As we will see this is a difficult framing of the problem for the LSTM to learn.

Let’s define an LSTM network with 32 units and a single output neuron with a softmax activation function for making predictions. Because this is a multi-class classification problem, we can use the log loss function (called “categorical_crossentropy” in Keras), and optimize the network using the ADAM optimization function.

The model is fit over 500 epochs with a batch size of 1.


In [10]:
# create and fit the model
model = Sequential()
model.add(LSTM(32, input_shape=(X.shape[1], X.shape[2])))
model.add(Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X, y, nb_epoch=500, batch_size=1, verbose=2)


INFO (theano.gof.compilelock): Refreshing lock /home/tw/.theano/compiledir_Linux-4.8--generic-x86_64-with-debian-stretch-sid-x86_64-3.5.2-64/lock_dir/lock
Epoch 1/500
0s - loss: 3.2674 - acc: 0.0000e+00
Epoch 2/500
0s - loss: 3.2564 - acc: 0.0400
Epoch 3/500
0s - loss: 3.2521 - acc: 0.0400
Epoch 4/500
0s - loss: 3.2476 - acc: 0.0400
Epoch 5/500
0s - loss: 3.2436 - acc: 0.0400
Epoch 6/500
0s - loss: 3.2389 - acc: 0.0400
Epoch 7/500
0s - loss: 3.2352 - acc: 0.0400
Epoch 8/500
0s - loss: 3.2307 - acc: 0.0400
Epoch 9/500
0s - loss: 3.2262 - acc: 0.0400
Epoch 10/500
0s - loss: 3.2220 - acc: 0.0400
Epoch 11/500
0s - loss: 3.2179 - acc: 0.0400
Epoch 12/500
0s - loss: 3.2135 - acc: 0.0400
Epoch 13/500
0s - loss: 3.2085 - acc: 0.0400
Epoch 14/500
0s - loss: 3.2041 - acc: 0.0400
Epoch 15/500
0s - loss: 3.1992 - acc: 0.0400
Epoch 16/500
0s - loss: 3.1943 - acc: 0.0800
Epoch 17/500
0s - loss: 3.1890 - acc: 0.0800
Epoch 18/500
0s - loss: 3.1841 - acc: 0.0800
Epoch 19/500
0s - loss: 3.1787 - acc: 0.0800
Epoch 20/500
0s - loss: 3.1729 - acc: 0.0800
Epoch 21/500
0s - loss: 3.1664 - acc: 0.0800
Epoch 22/500
0s - loss: 3.1603 - acc: 0.0400
Epoch 23/500
0s - loss: 3.1540 - acc: 0.0800
Epoch 24/500
0s - loss: 3.1472 - acc: 0.0800
Epoch 25/500
0s - loss: 3.1407 - acc: 0.0800
Epoch 26/500
0s - loss: 3.1336 - acc: 0.0800
Epoch 27/500
0s - loss: 3.1258 - acc: 0.0800
Epoch 28/500
0s - loss: 3.1181 - acc: 0.0800
Epoch 29/500
0s - loss: 3.1106 - acc: 0.0800
Epoch 30/500
0s - loss: 3.1015 - acc: 0.0800
Epoch 31/500
0s - loss: 3.0943 - acc: 0.0800
Epoch 32/500
0s - loss: 3.0844 - acc: 0.0800
Epoch 33/500
0s - loss: 3.0745 - acc: 0.0800
Epoch 34/500
0s - loss: 3.0652 - acc: 0.0400
Epoch 35/500
0s - loss: 3.0560 - acc: 0.0400
Epoch 36/500
0s - loss: 3.0454 - acc: 0.0400
Epoch 37/500
0s - loss: 3.0346 - acc: 0.0400
Epoch 38/500
0s - loss: 3.0236 - acc: 0.0800
Epoch 39/500
0s - loss: 3.0132 - acc: 0.0800
Epoch 40/500
0s - loss: 3.0017 - acc: 0.0800
Epoch 41/500
0s - loss: 2.9894 - acc: 0.0800
Epoch 42/500
0s - loss: 2.9777 - acc: 0.0400
Epoch 43/500
0s - loss: 2.9659 - acc: 0.0800
Epoch 44/500
0s - loss: 2.9533 - acc: 0.0800
Epoch 45/500
0s - loss: 2.9415 - acc: 0.0800
Epoch 46/500
0s - loss: 2.9288 - acc: 0.0800
Epoch 47/500
0s - loss: 2.9174 - acc: 0.0800
Epoch 48/500
0s - loss: 2.9071 - acc: 0.0800
Epoch 49/500
0s - loss: 2.8913 - acc: 0.1200
Epoch 50/500
0s - loss: 2.8798 - acc: 0.1200
Epoch 51/500
0s - loss: 2.8668 - acc: 0.1200
Epoch 52/500
0s - loss: 2.8549 - acc: 0.1200
Epoch 53/500
0s - loss: 2.8426 - acc: 0.0800
Epoch 54/500
0s - loss: 2.8314 - acc: 0.0800
Epoch 55/500
0s - loss: 2.8199 - acc: 0.0800
Epoch 56/500
0s - loss: 2.8079 - acc: 0.0800
Epoch 57/500
0s - loss: 2.7979 - acc: 0.0400
Epoch 58/500
0s - loss: 2.7871 - acc: 0.0800
Epoch 59/500
0s - loss: 2.7754 - acc: 0.0400
Epoch 60/500
0s - loss: 2.7651 - acc: 0.0800
Epoch 61/500
0s - loss: 2.7554 - acc: 0.0800
Epoch 62/500
0s - loss: 2.7455 - acc: 0.0400
Epoch 63/500
0s - loss: 2.7357 - acc: 0.0800
Epoch 64/500
0s - loss: 2.7261 - acc: 0.0800
Epoch 65/500
0s - loss: 2.7176 - acc: 0.0400
Epoch 66/500
0s - loss: 2.7078 - acc: 0.0400
Epoch 67/500
0s - loss: 2.6990 - acc: 0.0400
Epoch 68/500
0s - loss: 2.6907 - acc: 0.0800
Epoch 69/500
0s - loss: 2.6816 - acc: 0.0800
Epoch 70/500
0s - loss: 2.6746 - acc: 0.0800
Epoch 71/500
0s - loss: 2.6675 - acc: 0.0800
Epoch 72/500
0s - loss: 2.6586 - acc: 0.0800
Epoch 73/500
0s - loss: 2.6523 - acc: 0.0400
Epoch 74/500
0s - loss: 2.6441 - acc: 0.0400
Epoch 75/500
0s - loss: 2.6375 - acc: 0.0800
Epoch 76/500
0s - loss: 2.6304 - acc: 0.0400
Epoch 77/500
0s - loss: 2.6238 - acc: 0.0800
Epoch 78/500
0s - loss: 2.6163 - acc: 0.0800
Epoch 79/500
0s - loss: 2.6100 - acc: 0.0400
Epoch 80/500
0s - loss: 2.6037 - acc: 0.0400
Epoch 81/500
0s - loss: 2.5960 - acc: 0.0800
Epoch 82/500
0s - loss: 2.5898 - acc: 0.0400
Epoch 83/500
0s - loss: 2.5841 - acc: 0.0800
Epoch 84/500
0s - loss: 2.5778 - acc: 0.0800
Epoch 85/500
0s - loss: 2.5707 - acc: 0.0800
Epoch 86/500
0s - loss: 2.5650 - acc: 0.0800
Epoch 87/500
0s - loss: 2.5598 - acc: 0.0800
Epoch 88/500
0s - loss: 2.5549 - acc: 0.0800
Epoch 89/500
0s - loss: 2.5484 - acc: 0.0800
Epoch 90/500
0s - loss: 2.5423 - acc: 0.0800
Epoch 91/500
0s - loss: 2.5362 - acc: 0.0800
Epoch 92/500
0s - loss: 2.5293 - acc: 0.0800
Epoch 93/500
0s - loss: 2.5251 - acc: 0.0800
Epoch 94/500
0s - loss: 2.5190 - acc: 0.1200
Epoch 95/500
0s - loss: 2.5132 - acc: 0.1200
Epoch 96/500
0s - loss: 2.5063 - acc: 0.1200
Epoch 97/500
0s - loss: 2.5013 - acc: 0.2000
Epoch 98/500
0s - loss: 2.4956 - acc: 0.1600
Epoch 99/500
0s - loss: 2.4901 - acc: 0.2000
Epoch 100/500
0s - loss: 2.4857 - acc: 0.2000
Epoch 101/500
0s - loss: 2.4791 - acc: 0.2000
Epoch 102/500
0s - loss: 2.4729 - acc: 0.2000
Epoch 103/500
0s - loss: 2.4700 - acc: 0.1200
Epoch 104/500
0s - loss: 2.4628 - acc: 0.1600
Epoch 105/500
0s - loss: 2.4568 - acc: 0.1200
Epoch 106/500
0s - loss: 2.4514 - acc: 0.1200
Epoch 107/500
0s - loss: 2.4466 - acc: 0.1600
Epoch 108/500
0s - loss: 2.4404 - acc: 0.1600
Epoch 109/500
0s - loss: 2.4346 - acc: 0.1600
Epoch 110/500
0s - loss: 2.4300 - acc: 0.1600
Epoch 111/500
0s - loss: 2.4246 - acc: 0.1200
Epoch 112/500
0s - loss: 2.4187 - acc: 0.1200
Epoch 113/500
0s - loss: 2.4139 - acc: 0.1600
Epoch 114/500
0s - loss: 2.4092 - acc: 0.1200
Epoch 115/500
0s - loss: 2.4039 - acc: 0.1200
Epoch 116/500
0s - loss: 2.3993 - acc: 0.1200
Epoch 117/500
0s - loss: 2.3936 - acc: 0.1600
Epoch 118/500
0s - loss: 2.3890 - acc: 0.1600
Epoch 119/500
0s - loss: 2.3836 - acc: 0.1600
Epoch 120/500
0s - loss: 2.3791 - acc: 0.1600
Epoch 121/500
0s - loss: 2.3744 - acc: 0.1600
Epoch 122/500
0s - loss: 2.3693 - acc: 0.1600
Epoch 123/500
0s - loss: 2.3658 - acc: 0.1200
Epoch 124/500
0s - loss: 2.3596 - acc: 0.1600
Epoch 125/500
0s - loss: 2.3560 - acc: 0.1600
Epoch 126/500
0s - loss: 2.3505 - acc: 0.1600
Epoch 127/500
0s - loss: 2.3439 - acc: 0.1600
Epoch 128/500
0s - loss: 2.3414 - acc: 0.1600
Epoch 129/500
0s - loss: 2.3375 - acc: 0.1200
Epoch 130/500
0s - loss: 2.3314 - acc: 0.1200
Epoch 131/500
0s - loss: 2.3287 - acc: 0.1600
Epoch 132/500
0s - loss: 2.3250 - acc: 0.1600
Epoch 133/500
0s - loss: 2.3204 - acc: 0.1600
Epoch 134/500
0s - loss: 2.3145 - acc: 0.1600
Epoch 135/500
0s - loss: 2.3119 - acc: 0.2000
Epoch 136/500
0s - loss: 2.3075 - acc: 0.2000
Epoch 137/500
0s - loss: 2.3028 - acc: 0.1600
Epoch 138/500
0s - loss: 2.2992 - acc: 0.1600
Epoch 139/500
0s - loss: 2.2952 - acc: 0.2000
Epoch 140/500
0s - loss: 2.2904 - acc: 0.2000
Epoch 141/500
0s - loss: 2.2876 - acc: 0.1600
Epoch 142/500
0s - loss: 2.2840 - acc: 0.2000
Epoch 143/500
0s - loss: 2.2794 - acc: 0.1600
Epoch 144/500
0s - loss: 2.2771 - acc: 0.1200
Epoch 145/500
0s - loss: 2.2704 - acc: 0.1200
Epoch 146/500
0s - loss: 2.2667 - acc: 0.2000
Epoch 147/500
0s - loss: 2.2624 - acc: 0.2400
Epoch 148/500
0s - loss: 2.2600 - acc: 0.2400
Epoch 149/500
0s - loss: 2.2555 - acc: 0.2400
Epoch 150/500
0s - loss: 2.2530 - acc: 0.1600
Epoch 151/500
0s - loss: 2.2484 - acc: 0.1600
Epoch 152/500
0s - loss: 2.2460 - acc: 0.2800
Epoch 153/500
0s - loss: 2.2422 - acc: 0.2000
Epoch 154/500
0s - loss: 2.2381 - acc: 0.2400
Epoch 155/500
0s - loss: 2.2356 - acc: 0.2800
Epoch 156/500
0s - loss: 2.2324 - acc: 0.2800
Epoch 157/500
0s - loss: 2.2276 - acc: 0.2000
Epoch 158/500
0s - loss: 2.2260 - acc: 0.2400
Epoch 159/500
0s - loss: 2.2233 - acc: 0.1600
Epoch 160/500
0s - loss: 2.2190 - acc: 0.2800
Epoch 161/500
0s - loss: 2.2145 - acc: 0.2400
Epoch 162/500
0s - loss: 2.2119 - acc: 0.2800
Epoch 163/500
0s - loss: 2.2088 - acc: 0.3200
Epoch 164/500
0s - loss: 2.2052 - acc: 0.2400
Epoch 165/500
0s - loss: 2.2029 - acc: 0.2400
Epoch 166/500
0s - loss: 2.1995 - acc: 0.2400
Epoch 167/500
0s - loss: 2.1971 - acc: 0.2800
Epoch 168/500
0s - loss: 2.1940 - acc: 0.2800
Epoch 169/500
0s - loss: 2.1903 - acc: 0.2800
Epoch 170/500
0s - loss: 2.1878 - acc: 0.2800
Epoch 171/500
0s - loss: 2.1825 - acc: 0.3200
Epoch 172/500
0s - loss: 2.1824 - acc: 0.2400
Epoch 173/500
0s - loss: 2.1785 - acc: 0.2400
Epoch 174/500
0s - loss: 2.1749 - acc: 0.2800
Epoch 175/500
0s - loss: 2.1727 - acc: 0.3200
Epoch 176/500
0s - loss: 2.1678 - acc: 0.2400
Epoch 177/500
0s - loss: 2.1673 - acc: 0.3200
Epoch 178/500
0s - loss: 2.1637 - acc: 0.2800
Epoch 179/500
0s - loss: 2.1608 - acc: 0.4000
Epoch 180/500
0s - loss: 2.1573 - acc: 0.3200
Epoch 181/500
0s - loss: 2.1552 - acc: 0.4000
Epoch 182/500
0s - loss: 2.1510 - acc: 0.4000
Epoch 183/500
0s - loss: 2.1501 - acc: 0.3200
Epoch 184/500
0s - loss: 2.1472 - acc: 0.4000
Epoch 185/500
0s - loss: 2.1444 - acc: 0.3600
Epoch 186/500
0s - loss: 2.1399 - acc: 0.3200
Epoch 187/500
0s - loss: 2.1380 - acc: 0.3200
Epoch 188/500
0s - loss: 2.1361 - acc: 0.3200
Epoch 189/500
0s - loss: 2.1335 - acc: 0.4000
Epoch 190/500
0s - loss: 2.1304 - acc: 0.3600
Epoch 191/500
0s - loss: 2.1293 - acc: 0.3600
Epoch 192/500
0s - loss: 2.1261 - acc: 0.4400
Epoch 193/500
0s - loss: 2.1230 - acc: 0.2800
Epoch 194/500
0s - loss: 2.1205 - acc: 0.3600
Epoch 195/500
0s - loss: 2.1183 - acc: 0.3600
Epoch 196/500
0s - loss: 2.1147 - acc: 0.3600
Epoch 197/500
0s - loss: 2.1114 - acc: 0.4000
Epoch 198/500
0s - loss: 2.1096 - acc: 0.2800
Epoch 199/500
0s - loss: 2.1066 - acc: 0.4000
Epoch 200/500
0s - loss: 2.1045 - acc: 0.4800
Epoch 201/500
0s - loss: 2.1023 - acc: 0.2800
Epoch 202/500
0s - loss: 2.0990 - acc: 0.3200
Epoch 203/500
0s - loss: 2.0949 - acc: 0.3200
Epoch 204/500
0s - loss: 2.0939 - acc: 0.4800
Epoch 205/500
0s - loss: 2.0910 - acc: 0.4400
Epoch 206/500
0s - loss: 2.0890 - acc: 0.3600
Epoch 207/500
0s - loss: 2.0858 - acc: 0.4000
Epoch 208/500
0s - loss: 2.0853 - acc: 0.4400
Epoch 209/500
0s - loss: 2.0805 - acc: 0.4800
Epoch 210/500
0s - loss: 2.0803 - acc: 0.4400
Epoch 211/500
0s - loss: 2.0772 - acc: 0.4000
Epoch 212/500
0s - loss: 2.0737 - acc: 0.4800
Epoch 213/500
0s - loss: 2.0737 - acc: 0.4800
Epoch 214/500
0s - loss: 2.0698 - acc: 0.4400
Epoch 215/500
0s - loss: 2.0667 - acc: 0.3200
Epoch 216/500
0s - loss: 2.0643 - acc: 0.4800
Epoch 217/500
0s - loss: 2.0622 - acc: 0.3600
Epoch 218/500
0s - loss: 2.0597 - acc: 0.4000
Epoch 219/500
0s - loss: 2.0572 - acc: 0.3200
Epoch 220/500
0s - loss: 2.0537 - acc: 0.4400
Epoch 221/500
0s - loss: 2.0535 - acc: 0.4000
Epoch 222/500
0s - loss: 2.0504 - acc: 0.4800
Epoch 223/500
0s - loss: 2.0497 - acc: 0.4400
Epoch 224/500
0s - loss: 2.0457 - acc: 0.4800
Epoch 225/500
0s - loss: 2.0449 - acc: 0.4800
Epoch 226/500
0s - loss: 2.0425 - acc: 0.4800
Epoch 227/500
0s - loss: 2.0402 - acc: 0.5600
Epoch 228/500
0s - loss: 2.0384 - acc: 0.5200
Epoch 229/500
0s - loss: 2.0374 - acc: 0.4400
Epoch 230/500
0s - loss: 2.0340 - acc: 0.4800
Epoch 231/500
0s - loss: 2.0309 - acc: 0.5200
Epoch 232/500
0s - loss: 2.0287 - acc: 0.4800
Epoch 233/500
0s - loss: 2.0263 - acc: 0.4800
Epoch 234/500
0s - loss: 2.0237 - acc: 0.6000
Epoch 235/500
0s - loss: 2.0228 - acc: 0.4400
Epoch 236/500
0s - loss: 2.0223 - acc: 0.4400
Epoch 237/500
0s - loss: 2.0192 - acc: 0.5600
Epoch 238/500
0s - loss: 2.0159 - acc: 0.4400
Epoch 239/500
0s - loss: 2.0142 - acc: 0.4800
Epoch 240/500
0s - loss: 2.0125 - acc: 0.4400
Epoch 241/500
0s - loss: 2.0111 - acc: 0.4400
Epoch 242/500
0s - loss: 2.0075 - acc: 0.5600
Epoch 243/500
0s - loss: 2.0043 - acc: 0.4800
Epoch 244/500
0s - loss: 2.0028 - acc: 0.4800
Epoch 245/500
0s - loss: 1.9995 - acc: 0.5200
Epoch 246/500
0s - loss: 2.0012 - acc: 0.4000
Epoch 247/500
0s - loss: 1.9981 - acc: 0.5200
Epoch 248/500
0s - loss: 1.9976 - acc: 0.5200
Epoch 249/500
0s - loss: 1.9936 - acc: 0.4800
Epoch 250/500
0s - loss: 1.9927 - acc: 0.4800
Epoch 251/500
0s - loss: 1.9904 - acc: 0.4800
Epoch 252/500
0s - loss: 1.9868 - acc: 0.4400
Epoch 253/500
0s - loss: 1.9863 - acc: 0.4800
Epoch 254/500
0s - loss: 1.9828 - acc: 0.6800
Epoch 255/500
0s - loss: 1.9835 - acc: 0.5200
Epoch 256/500
0s - loss: 1.9816 - acc: 0.4800
Epoch 257/500
0s - loss: 1.9786 - acc: 0.5600
Epoch 258/500
0s - loss: 1.9765 - acc: 0.5600
Epoch 259/500
0s - loss: 1.9738 - acc: 0.5200
Epoch 260/500
0s - loss: 1.9710 - acc: 0.6000
Epoch 261/500
0s - loss: 1.9715 - acc: 0.5600
Epoch 262/500
0s - loss: 1.9698 - acc: 0.5200
Epoch 263/500
0s - loss: 1.9688 - acc: 0.4400
Epoch 264/500
0s - loss: 1.9651 - acc: 0.6000
Epoch 265/500
0s - loss: 1.9627 - acc: 0.6000
Epoch 266/500
0s - loss: 1.9611 - acc: 0.6000
Epoch 267/500
0s - loss: 1.9590 - acc: 0.5200
Epoch 268/500
0s - loss: 1.9586 - acc: 0.5600
Epoch 269/500
0s - loss: 1.9569 - acc: 0.6000
Epoch 270/500
0s - loss: 1.9544 - acc: 0.5200
Epoch 271/500
0s - loss: 1.9532 - acc: 0.5600
Epoch 272/500
0s - loss: 1.9513 - acc: 0.5600
Epoch 273/500
0s - loss: 1.9502 - acc: 0.4400
Epoch 274/500
0s - loss: 1.9476 - acc: 0.5200
Epoch 275/500
0s - loss: 1.9461 - acc: 0.5600
Epoch 276/500
0s - loss: 1.9453 - acc: 0.5200
Epoch 277/500
0s - loss: 1.9417 - acc: 0.4800
Epoch 278/500
0s - loss: 1.9427 - acc: 0.6400
Epoch 279/500
0s - loss: 1.9371 - acc: 0.6000
Epoch 280/500
0s - loss: 1.9383 - acc: 0.6000
Epoch 281/500
0s - loss: 1.9359 - acc: 0.5200
Epoch 282/500
0s - loss: 1.9336 - acc: 0.4800
Epoch 283/500
0s - loss: 1.9319 - acc: 0.4800
Epoch 284/500
0s - loss: 1.9303 - acc: 0.5600
Epoch 285/500
0s - loss: 1.9295 - acc: 0.6000
Epoch 286/500
0s - loss: 1.9279 - acc: 0.6000
Epoch 287/500
0s - loss: 1.9248 - acc: 0.6000
Epoch 288/500
0s - loss: 1.9252 - acc: 0.5200
Epoch 289/500
0s - loss: 1.9232 - acc: 0.5600
Epoch 290/500
0s - loss: 1.9221 - acc: 0.6000
Epoch 291/500
0s - loss: 1.9188 - acc: 0.5600
Epoch 292/500
0s - loss: 1.9185 - acc: 0.6400
Epoch 293/500
0s - loss: 1.9170 - acc: 0.5600
Epoch 294/500
0s - loss: 1.9158 - acc: 0.5600
Epoch 295/500
0s - loss: 1.9132 - acc: 0.6000
Epoch 296/500
0s - loss: 1.9131 - acc: 0.6400
Epoch 297/500
0s - loss: 1.9105 - acc: 0.4800
Epoch 298/500
0s - loss: 1.9102 - acc: 0.5200
Epoch 299/500
0s - loss: 1.9080 - acc: 0.4400
Epoch 300/500
0s - loss: 1.9063 - acc: 0.5600
Epoch 301/500
0s - loss: 1.9051 - acc: 0.6800
Epoch 302/500
0s - loss: 1.9030 - acc: 0.5600
Epoch 303/500
0s - loss: 1.9023 - acc: 0.6400
Epoch 304/500
0s - loss: 1.8996 - acc: 0.5600
Epoch 305/500
0s - loss: 1.8973 - acc: 0.6000
Epoch 306/500
0s - loss: 1.8954 - acc: 0.6800
Epoch 307/500
0s - loss: 1.8958 - acc: 0.5600
Epoch 308/500
0s - loss: 1.8947 - acc: 0.6000
Epoch 309/500
0s - loss: 1.8918 - acc: 0.6400
Epoch 310/500
0s - loss: 1.8915 - acc: 0.6400
Epoch 311/500
0s - loss: 1.8884 - acc: 0.6800
Epoch 312/500
0s - loss: 1.8883 - acc: 0.6400
Epoch 313/500
0s - loss: 1.8861 - acc: 0.6000
Epoch 314/500
0s - loss: 1.8872 - acc: 0.5600
Epoch 315/500
0s - loss: 1.8850 - acc: 0.6800
Epoch 316/500
0s - loss: 1.8835 - acc: 0.6800
Epoch 317/500
0s - loss: 1.8786 - acc: 0.6800
Epoch 318/500
0s - loss: 1.8801 - acc: 0.6000
Epoch 319/500
0s - loss: 1.8783 - acc: 0.5200
Epoch 320/500
0s - loss: 1.8757 - acc: 0.6400
Epoch 321/500
0s - loss: 1.8761 - acc: 0.6400
Epoch 322/500
0s - loss: 1.8731 - acc: 0.5600
Epoch 323/500
0s - loss: 1.8721 - acc: 0.5600
Epoch 324/500
0s - loss: 1.8727 - acc: 0.6000
Epoch 325/500
0s - loss: 1.8698 - acc: 0.6400
Epoch 326/500
0s - loss: 1.8693 - acc: 0.6400
Epoch 327/500
0s - loss: 1.8668 - acc: 0.6800
Epoch 328/500
0s - loss: 1.8656 - acc: 0.6800
Epoch 329/500
0s - loss: 1.8637 - acc: 0.6800
Epoch 330/500
0s - loss: 1.8642 - acc: 0.6400
Epoch 331/500
0s - loss: 1.8628 - acc: 0.6000
Epoch 332/500
0s - loss: 1.8610 - acc: 0.6400
Epoch 333/500
0s - loss: 1.8576 - acc: 0.6800
Epoch 334/500
0s - loss: 1.8581 - acc: 0.6400
Epoch 335/500
0s - loss: 1.8573 - acc: 0.5600
Epoch 336/500
0s - loss: 1.8573 - acc: 0.6400
Epoch 337/500
0s - loss: 1.8550 - acc: 0.6400
Epoch 338/500
0s - loss: 1.8526 - acc: 0.7200
Epoch 339/500
0s - loss: 1.8509 - acc: 0.6400
Epoch 340/500
0s - loss: 1.8515 - acc: 0.7200
Epoch 341/500
0s - loss: 1.8496 - acc: 0.7200
Epoch 342/500
0s - loss: 1.8469 - acc: 0.6400
Epoch 343/500
0s - loss: 1.8469 - acc: 0.7200
Epoch 344/500
0s - loss: 1.8452 - acc: 0.6400
Epoch 345/500
0s - loss: 1.8440 - acc: 0.6400
Epoch 346/500
0s - loss: 1.8428 - acc: 0.6800
Epoch 347/500
0s - loss: 1.8392 - acc: 0.6000
Epoch 348/500
0s - loss: 1.8409 - acc: 0.7200
Epoch 349/500
0s - loss: 1.8378 - acc: 0.6400
Epoch 350/500
0s - loss: 1.8366 - acc: 0.7600
Epoch 351/500
0s - loss: 1.8356 - acc: 0.7200
Epoch 352/500
0s - loss: 1.8351 - acc: 0.7600
Epoch 353/500
0s - loss: 1.8347 - acc: 0.6400
Epoch 354/500
0s - loss: 1.8332 - acc: 0.6400
Epoch 355/500
0s - loss: 1.8312 - acc: 0.7200
Epoch 356/500
0s - loss: 1.8305 - acc: 0.6400
Epoch 357/500
0s - loss: 1.8277 - acc: 0.6800
Epoch 358/500
0s - loss: 1.8272 - acc: 0.6800
Epoch 359/500
0s - loss: 1.8262 - acc: 0.6800
Epoch 360/500
0s - loss: 1.8257 - acc: 0.7200
Epoch 361/500
0s - loss: 1.8233 - acc: 0.6400
Epoch 362/500
0s - loss: 1.8232 - acc: 0.6400
Epoch 363/500
0s - loss: 1.8220 - acc: 0.7200
Epoch 364/500
0s - loss: 1.8197 - acc: 0.7200
Epoch 365/500
0s - loss: 1.8203 - acc: 0.8000
Epoch 366/500
0s - loss: 1.8177 - acc: 0.6800
Epoch 367/500
0s - loss: 1.8169 - acc: 0.7200
Epoch 368/500
0s - loss: 1.8157 - acc: 0.7600
Epoch 369/500
0s - loss: 1.8150 - acc: 0.7200
Epoch 370/500
0s - loss: 1.8159 - acc: 0.7200
Epoch 371/500
0s - loss: 1.8137 - acc: 0.7200
Epoch 372/500
0s - loss: 1.8134 - acc: 0.7200
Epoch 373/500
0s - loss: 1.8108 - acc: 0.7200
Epoch 374/500
0s - loss: 1.8101 - acc: 0.7600
Epoch 375/500
0s - loss: 1.8070 - acc: 0.8000
Epoch 376/500
0s - loss: 1.8078 - acc: 0.6800
Epoch 377/500
0s - loss: 1.8081 - acc: 0.7600
Epoch 378/500
0s - loss: 1.8059 - acc: 0.7200
Epoch 379/500
0s - loss: 1.8048 - acc: 0.7200
Epoch 380/500
0s - loss: 1.8024 - acc: 0.7200
Epoch 381/500
0s - loss: 1.8029 - acc: 0.6800
Epoch 382/500
0s - loss: 1.8007 - acc: 0.6800
Epoch 383/500
0s - loss: 1.7996 - acc: 0.6000
Epoch 384/500
0s - loss: 1.7972 - acc: 0.6800
Epoch 385/500
0s - loss: 1.7951 - acc: 0.7200
Epoch 386/500
0s - loss: 1.7953 - acc: 0.6800
Epoch 387/500
0s - loss: 1.7951 - acc: 0.7200
Epoch 388/500
0s - loss: 1.7932 - acc: 0.8000
Epoch 389/500
0s - loss: 1.7931 - acc: 0.6800
Epoch 390/500
0s - loss: 1.7922 - acc: 0.7200
Epoch 391/500
0s - loss: 1.7909 - acc: 0.6800
Epoch 392/500
0s - loss: 1.7887 - acc: 0.7200
Epoch 393/500
0s - loss: 1.7869 - acc: 0.6400
Epoch 394/500
0s - loss: 1.7875 - acc: 0.6400
Epoch 395/500
0s - loss: 1.7847 - acc: 0.7600
Epoch 396/500
0s - loss: 1.7853 - acc: 0.7600
Epoch 397/500
0s - loss: 1.7822 - acc: 0.8000
Epoch 398/500
0s - loss: 1.7822 - acc: 0.7200
Epoch 399/500
0s - loss: 1.7818 - acc: 0.7600
Epoch 400/500
0s - loss: 1.7773 - acc: 0.8000
Epoch 401/500
0s - loss: 1.7796 - acc: 0.6800
Epoch 402/500
0s - loss: 1.7764 - acc: 0.7600
Epoch 403/500
0s - loss: 1.7768 - acc: 0.7600
Epoch 404/500
0s - loss: 1.7769 - acc: 0.7600
Epoch 405/500
0s - loss: 1.7760 - acc: 0.8000
Epoch 406/500
0s - loss: 1.7746 - acc: 0.7200
Epoch 407/500
0s - loss: 1.7732 - acc: 0.7600
Epoch 408/500
0s - loss: 1.7714 - acc: 0.7600
Epoch 409/500
0s - loss: 1.7728 - acc: 0.7200
Epoch 410/500
0s - loss: 1.7707 - acc: 0.7600
Epoch 411/500
0s - loss: 1.7692 - acc: 0.7600
Epoch 412/500
0s - loss: 1.7676 - acc: 0.7600
Epoch 413/500
0s - loss: 1.7673 - acc: 0.7600
Epoch 414/500
0s - loss: 1.7656 - acc: 0.7200
Epoch 415/500
0s - loss: 1.7640 - acc: 0.7200
Epoch 416/500
0s - loss: 1.7628 - acc: 0.7200
Epoch 417/500
0s - loss: 1.7621 - acc: 0.7600
Epoch 418/500
0s - loss: 1.7591 - acc: 0.7200
Epoch 419/500
0s - loss: 1.7617 - acc: 0.7600
Epoch 420/500
0s - loss: 1.7612 - acc: 0.7600
Epoch 421/500
0s - loss: 1.7589 - acc: 0.7200
Epoch 422/500
0s - loss: 1.7583 - acc: 0.7600
Epoch 423/500
0s - loss: 1.7583 - acc: 0.7200
Epoch 424/500
0s - loss: 1.7543 - acc: 0.7600
Epoch 425/500
0s - loss: 1.7537 - acc: 0.7600
Epoch 426/500
0s - loss: 1.7516 - acc: 0.8400
Epoch 427/500
0s - loss: 1.7541 - acc: 0.6800
Epoch 428/500
0s - loss: 1.7530 - acc: 0.7600
Epoch 429/500
0s - loss: 1.7515 - acc: 0.8000
Epoch 430/500
0s - loss: 1.7488 - acc: 0.8000
Epoch 431/500
0s - loss: 1.7484 - acc: 0.6800
Epoch 432/500
0s - loss: 1.7480 - acc: 0.6400
Epoch 433/500
0s - loss: 1.7446 - acc: 0.7600
Epoch 434/500
0s - loss: 1.7462 - acc: 0.6800
Epoch 435/500
0s - loss: 1.7458 - acc: 0.7600
Epoch 436/500
0s - loss: 1.7441 - acc: 0.7200
Epoch 437/500
0s - loss: 1.7442 - acc: 0.8000
Epoch 438/500
0s - loss: 1.7432 - acc: 0.7200
Epoch 439/500
0s - loss: 1.7427 - acc: 0.6800
Epoch 440/500
0s - loss: 1.7399 - acc: 0.7200
Epoch 441/500
0s - loss: 1.7399 - acc: 0.7600
Epoch 442/500
0s - loss: 1.7396 - acc: 0.8000
Epoch 443/500
0s - loss: 1.7366 - acc: 0.8000
Epoch 444/500
0s - loss: 1.7381 - acc: 0.6800
Epoch 445/500
0s - loss: 1.7360 - acc: 0.8000
Epoch 446/500
0s - loss: 1.7342 - acc: 0.7200
Epoch 447/500
0s - loss: 1.7339 - acc: 0.7600
Epoch 448/500
0s - loss: 1.7340 - acc: 0.7200
Epoch 449/500
0s - loss: 1.7310 - acc: 0.7600
Epoch 450/500
0s - loss: 1.7309 - acc: 0.8400
Epoch 451/500
0s - loss: 1.7305 - acc: 0.7600
Epoch 452/500
0s - loss: 1.7305 - acc: 0.8000
Epoch 453/500
0s - loss: 1.7293 - acc: 0.6800
Epoch 454/500
0s - loss: 1.7274 - acc: 0.7200
Epoch 455/500
0s - loss: 1.7264 - acc: 0.7600
Epoch 456/500
0s - loss: 1.7242 - acc: 0.8000
Epoch 457/500
0s - loss: 1.7234 - acc: 0.8400
Epoch 458/500
0s - loss: 1.7238 - acc: 0.8000
Epoch 459/500
0s - loss: 1.7214 - acc: 0.8400
Epoch 460/500
0s - loss: 1.7218 - acc: 0.8400
Epoch 461/500
0s - loss: 1.7212 - acc: 0.7200
Epoch 462/500
0s - loss: 1.7219 - acc: 0.7600
Epoch 463/500
0s - loss: 1.7189 - acc: 0.8400
Epoch 464/500
0s - loss: 1.7191 - acc: 0.8000
Epoch 465/500
0s - loss: 1.7186 - acc: 0.8000
Epoch 466/500
0s - loss: 1.7171 - acc: 0.8000
Epoch 467/500
0s - loss: 1.7172 - acc: 0.7600
Epoch 468/500
0s - loss: 1.7136 - acc: 0.7600
Epoch 469/500
0s - loss: 1.7144 - acc: 0.8000
Epoch 470/500
0s - loss: 1.7122 - acc: 0.7600
Epoch 471/500
0s - loss: 1.7130 - acc: 0.7600
Epoch 472/500
0s - loss: 1.7114 - acc: 0.8400
Epoch 473/500
0s - loss: 1.7115 - acc: 0.7600
Epoch 474/500
0s - loss: 1.7100 - acc: 0.8000
Epoch 475/500
0s - loss: 1.7082 - acc: 0.7200
Epoch 476/500
0s - loss: 1.7088 - acc: 0.7600
Epoch 477/500
0s - loss: 1.7074 - acc: 0.8400
Epoch 478/500
0s - loss: 1.7044 - acc: 0.8000
Epoch 479/500
0s - loss: 1.7044 - acc: 0.8400
Epoch 480/500
0s - loss: 1.7030 - acc: 0.8800
Epoch 481/500
0s - loss: 1.7028 - acc: 0.8000
Epoch 482/500
0s - loss: 1.7026 - acc: 0.7600
Epoch 483/500
0s - loss: 1.7051 - acc: 0.7600
Epoch 484/500
0s - loss: 1.7011 - acc: 0.8000
Epoch 485/500
0s - loss: 1.7006 - acc: 0.8000
Epoch 486/500
0s - loss: 1.6988 - acc: 0.7600
Epoch 487/500
0s - loss: 1.6990 - acc: 0.8400
Epoch 488/500
0s - loss: 1.6960 - acc: 0.8400
Epoch 489/500
0s - loss: 1.6951 - acc: 0.8000
Epoch 490/500
0s - loss: 1.6950 - acc: 0.8000
Epoch 491/500
0s - loss: 1.6953 - acc: 0.7600
Epoch 492/500
0s - loss: 1.6951 - acc: 0.7600
Epoch 493/500
0s - loss: 1.6930 - acc: 0.7600
Epoch 494/500
0s - loss: 1.6903 - acc: 0.8400
Epoch 495/500
0s - loss: 1.6922 - acc: 0.8000
Epoch 496/500
0s - loss: 1.6893 - acc: 0.7600
Epoch 497/500
0s - loss: 1.6891 - acc: 0.8000
Epoch 498/500
0s - loss: 1.6889 - acc: 0.8400
Epoch 499/500
0s - loss: 1.6880 - acc: 0.8000
Epoch 500/500
0s - loss: 1.6861 - acc: 0.8400
Out[10]:
<keras.callbacks.History at 0x7f7b131ae898>

In [11]:
# summarize performance of the model
scores = model.evaluate(X, y, verbose=0)
print("Model Accuracy: %.2f%%" % (scores[1]*100))


INFO (theano.gof.compilelock): Refreshing lock /home/tw/.theano/compiledir_Linux-4.8--generic-x86_64-with-debian-stretch-sid-x86_64-3.5.2-64/lock_dir/lock
Model Accuracy: 84.00%

In [31]:
# demonstrate some model predictions
def predict(dataX):
    for pattern in dataX:
        x = numpy.reshape(pattern, (1, len(pattern), 1))
        print(x.shape)
        x = x / float(len(alphabet))
        prediction = model.predict(x, verbose=0)
        index = numpy.argmax(prediction)
        result = int_to_char[index]
        seq_in = [int_to_char[value] for value in pattern]
        print(seq_in, "->", result)

predict(dataX)


(1, 3, 1)
['A', 'B', 'C'] -> D
(1, 3, 1)
['B', 'C', 'D'] -> E
(1, 3, 1)
['C', 'D', 'E'] -> F
(1, 3, 1)
['D', 'E', 'F'] -> G
(1, 3, 1)
['E', 'F', 'G'] -> H
(1, 3, 1)
['F', 'G', 'H'] -> I
(1, 3, 1)
['G', 'H', 'I'] -> J
(1, 3, 1)
['H', 'I', 'J'] -> K
(1, 3, 1)
['I', 'J', 'K'] -> L
(1, 3, 1)
['J', 'K', 'L'] -> M
(1, 3, 1)
['K', 'L', 'M'] -> N
(1, 3, 1)
['L', 'M', 'N'] -> O
(1, 3, 1)
['M', 'N', 'O'] -> P
(1, 3, 1)
['N', 'O', 'P'] -> Q
(1, 3, 1)
['O', 'P', 'Q'] -> R
(1, 3, 1)
['P', 'Q', 'R'] -> S
(1, 3, 1)
['Q', 'R', 'S'] -> T
(1, 3, 1)
['R', 'S', 'T'] -> U
(1, 3, 1)
['S', 'T', 'U'] -> V
(1, 3, 1)
['T', 'U', 'V'] -> W
(1, 3, 1)
['U', 'V', 'W'] -> X
(1, 3, 1)
['V', 'W', 'X'] -> Y
(1, 3, 1)
['W', 'X', 'Y'] -> Z

We can see that this problem is indeed difficult for the network to learn.

The reason is, the poor LSTM units do not have any context to work with. Each input-output pattern is shown to the network in a random order and the state of the network is reset after each pattern (each batch where each batch contains one pattern).

This is abuse of the LSTM network architecture, treating it like a standard multilayer Perceptron.

Naive LSTM for a Three-Char Feature Window to One-Char Mapping

A popular approach to adding more context to data for multilayer Perceptrons is to use the window method.

This is where previous steps in the sequence are provided as additional input features to the network. We can try the same trick to provide more context to the LSTM network.


In [20]:
# prepare the dataset of input to output pairs encoded as integers
seq_length = 3

In [21]:
dataX, dataY = [], []
create_XY(seq_length, alphabet, dataX, dataY)


ABC -> D
BCD -> E
CDE -> F
DEF -> G
EFG -> H
FGH -> I
GHI -> J
HIJ -> K
IJK -> L
JKL -> M
KLM -> N
LMN -> O
MNO -> P
NOP -> Q
OPQ -> R
PQR -> S
QRS -> T
RST -> U
STU -> V
TUV -> W
UVW -> X
VWX -> Y
WXY -> Z

In [22]:
# reshape X to be [samples, time steps, features]
X = numpy.reshape(dataX, (len(dataX), seq_length, 1))
X.shape
X[0:3]


Out[22]:
(23, 3, 1)
Out[22]:
array([[[0],
        [1],
        [2]],

       [[1],
        [2],
        [3]],

       [[2],
        [3],
        [4]]])

In [23]:
# normalize
X = X / float(len(alphabet))

In [24]:
# one hot encode the output variable
y = np_utils.to_categorical(dataY)

In [6]:
# reshape X to be [samples, time steps, features]
X = numpy.reshape(dataX, (len(dataX), seq_length, 1))
X.shape
X[0:3]


Out[6]:
(25, 1, 1)
Out[6]:
array([[[0]],

       [[1]],

       [[2]]])

In [7]:
# normalize
X = X / float(len(alphabet))

In [8]:
# one hot encode the output variable
y = np_utils.to_categorical(dataY)

In [ ]:


In [26]:
# create and fit the model
model = Sequential()
model.add(LSTM(32, input_shape=(X.shape[1], X.shape[2])))
model.add(Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X, y, nb_epoch=500, batch_size=1, verbose=2)


Epoch 1/500
0s - loss: 3.2739 - acc: 0.0000e+00
Epoch 2/500
0s - loss: 3.2530 - acc: 0.0870
Epoch 3/500
0s - loss: 3.2396 - acc: 0.0435
Epoch 4/500
0s - loss: 3.2285 - acc: 0.0435
Epoch 5/500
0s - loss: 3.2167 - acc: 0.0435
Epoch 6/500
0s - loss: 3.2046 - acc: 0.0435
Epoch 7/500
0s - loss: 3.1929 - acc: 0.0435
Epoch 8/500
0s - loss: 3.1794 - acc: 0.0435
Epoch 9/500
0s - loss: 3.1664 - acc: 0.0435
Epoch 10/500
0s - loss: 3.1537 - acc: 0.0435
Epoch 11/500
0s - loss: 3.1392 - acc: 0.0435
Epoch 12/500
0s - loss: 3.1245 - acc: 0.0435
Epoch 13/500
0s - loss: 3.1113 - acc: 0.0435
Epoch 14/500
0s - loss: 3.0961 - acc: 0.0435
Epoch 15/500
0s - loss: 3.0846 - acc: 0.0435
Epoch 16/500
0s - loss: 3.0686 - acc: 0.0435
Epoch 17/500
0s - loss: 3.0540 - acc: 0.0435
Epoch 18/500
0s - loss: 3.0411 - acc: 0.0435
Epoch 19/500
0s - loss: 3.0268 - acc: 0.0435
Epoch 20/500
0s - loss: 3.0129 - acc: 0.0435
Epoch 21/500
0s - loss: 2.9961 - acc: 0.0870
Epoch 22/500
0s - loss: 2.9814 - acc: 0.0870
Epoch 23/500
0s - loss: 2.9628 - acc: 0.1304
Epoch 24/500
0s - loss: 2.9447 - acc: 0.1304
Epoch 25/500
0s - loss: 2.9226 - acc: 0.1304
Epoch 26/500
0s - loss: 2.9027 - acc: 0.0870
Epoch 27/500
0s - loss: 2.8827 - acc: 0.0870
Epoch 28/500
0s - loss: 2.8583 - acc: 0.0870
Epoch 29/500
0s - loss: 2.8314 - acc: 0.0870
Epoch 30/500
0s - loss: 2.8049 - acc: 0.1304
Epoch 31/500
0s - loss: 2.7770 - acc: 0.0870
Epoch 32/500
0s - loss: 2.7516 - acc: 0.0870
Epoch 33/500
0s - loss: 2.7201 - acc: 0.0870
Epoch 34/500
0s - loss: 2.6892 - acc: 0.0870
Epoch 35/500
0s - loss: 2.6654 - acc: 0.0870
Epoch 36/500
0s - loss: 2.6346 - acc: 0.0870
Epoch 37/500
0s - loss: 2.6074 - acc: 0.0870
Epoch 38/500
0s - loss: 2.5783 - acc: 0.0870
Epoch 39/500
0s - loss: 2.5567 - acc: 0.0870
Epoch 40/500
0s - loss: 2.5316 - acc: 0.0870
Epoch 41/500
0s - loss: 2.5024 - acc: 0.0870
Epoch 42/500
0s - loss: 2.4870 - acc: 0.0870
Epoch 43/500
0s - loss: 2.4668 - acc: 0.0870
Epoch 44/500
0s - loss: 2.4417 - acc: 0.0435
Epoch 45/500
0s - loss: 2.4192 - acc: 0.1304
Epoch 46/500
0s - loss: 2.3922 - acc: 0.0435
Epoch 47/500
0s - loss: 2.3684 - acc: 0.0870
Epoch 48/500
0s - loss: 2.3494 - acc: 0.1304
Epoch 49/500
0s - loss: 2.3185 - acc: 0.1304
Epoch 50/500
0s - loss: 2.2951 - acc: 0.1739
Epoch 51/500
0s - loss: 2.2701 - acc: 0.1304
Epoch 52/500
0s - loss: 2.2495 - acc: 0.2174
Epoch 53/500
0s - loss: 2.2247 - acc: 0.1739
Epoch 54/500
0s - loss: 2.2015 - acc: 0.2174
Epoch 55/500
0s - loss: 2.1720 - acc: 0.1739
Epoch 56/500
0s - loss: 2.1487 - acc: 0.2174
Epoch 57/500
0s - loss: 2.1279 - acc: 0.2609
Epoch 58/500
0s - loss: 2.1133 - acc: 0.2609
Epoch 59/500
0s - loss: 2.0880 - acc: 0.2174
Epoch 60/500
0s - loss: 2.0701 - acc: 0.1739
Epoch 61/500
0s - loss: 2.0483 - acc: 0.2609
Epoch 62/500
0s - loss: 2.0330 - acc: 0.1739
Epoch 63/500
0s - loss: 2.0162 - acc: 0.2609
Epoch 64/500
0s - loss: 1.9993 - acc: 0.2174
Epoch 65/500
0s - loss: 1.9795 - acc: 0.3043
Epoch 66/500
0s - loss: 1.9561 - acc: 0.3043
Epoch 67/500
0s - loss: 1.9430 - acc: 0.3478
Epoch 68/500
0s - loss: 1.9216 - acc: 0.3043
Epoch 69/500
0s - loss: 1.9086 - acc: 0.2609
Epoch 70/500
0s - loss: 1.8974 - acc: 0.4783
Epoch 71/500
0s - loss: 1.8814 - acc: 0.3043
Epoch 72/500
0s - loss: 1.8623 - acc: 0.4783
Epoch 73/500
0s - loss: 1.8550 - acc: 0.4348
Epoch 74/500
0s - loss: 1.8422 - acc: 0.3913
Epoch 75/500
0s - loss: 1.8211 - acc: 0.3478
Epoch 76/500
0s - loss: 1.8128 - acc: 0.4348
Epoch 77/500
0s - loss: 1.8096 - acc: 0.5217
Epoch 78/500
0s - loss: 1.7944 - acc: 0.4783
Epoch 79/500
0s - loss: 1.7859 - acc: 0.5652
Epoch 80/500
0s - loss: 1.7642 - acc: 0.5652
Epoch 81/500
0s - loss: 1.7560 - acc: 0.5652
Epoch 82/500
0s - loss: 1.7445 - acc: 0.4783
Epoch 83/500
0s - loss: 1.7357 - acc: 0.4348
Epoch 84/500
0s - loss: 1.7273 - acc: 0.3913
Epoch 85/500
0s - loss: 1.7154 - acc: 0.5217
Epoch 86/500
0s - loss: 1.6995 - acc: 0.5217
Epoch 87/500
0s - loss: 1.6952 - acc: 0.5652
Epoch 88/500
0s - loss: 1.6889 - acc: 0.5217
Epoch 89/500
0s - loss: 1.6744 - acc: 0.5217
Epoch 90/500
0s - loss: 1.6683 - acc: 0.5217
Epoch 91/500
0s - loss: 1.6507 - acc: 0.6087
Epoch 92/500
0s - loss: 1.6477 - acc: 0.6087
Epoch 93/500
0s - loss: 1.6368 - acc: 0.6522
Epoch 94/500
0s - loss: 1.6232 - acc: 0.5652
Epoch 95/500
0s - loss: 1.6183 - acc: 0.6087
Epoch 96/500
0s - loss: 1.6101 - acc: 0.6087
Epoch 97/500
0s - loss: 1.6011 - acc: 0.5652
Epoch 98/500
0s - loss: 1.5924 - acc: 0.5652
Epoch 99/500
0s - loss: 1.5823 - acc: 0.5652
Epoch 100/500
0s - loss: 1.5805 - acc: 0.5652
Epoch 101/500
0s - loss: 1.5638 - acc: 0.5652
Epoch 102/500
0s - loss: 1.5551 - acc: 0.6087
Epoch 103/500
0s - loss: 1.5487 - acc: 0.6087
Epoch 104/500
0s - loss: 1.5451 - acc: 0.6957
Epoch 105/500
0s - loss: 1.5308 - acc: 0.6522
Epoch 106/500
0s - loss: 1.5305 - acc: 0.6522
Epoch 107/500
0s - loss: 1.5228 - acc: 0.7826
Epoch 108/500
0s - loss: 1.5121 - acc: 0.6522
Epoch 109/500
0s - loss: 1.4985 - acc: 0.6522
Epoch 110/500
0s - loss: 1.5073 - acc: 0.6087
Epoch 111/500
0s - loss: 1.4845 - acc: 0.7391
Epoch 112/500
0s - loss: 1.4838 - acc: 0.6522
Epoch 113/500
0s - loss: 1.4659 - acc: 0.6957
Epoch 114/500
0s - loss: 1.4603 - acc: 0.7391
Epoch 115/500
0s - loss: 1.4581 - acc: 0.6522
Epoch 116/500
0s - loss: 1.4494 - acc: 0.6957
Epoch 117/500
0s - loss: 1.4476 - acc: 0.6522
Epoch 118/500
0s - loss: 1.4302 - acc: 0.7391
Epoch 119/500
0s - loss: 1.4310 - acc: 0.7826
Epoch 120/500
0s - loss: 1.4193 - acc: 0.7391
Epoch 121/500
0s - loss: 1.4144 - acc: 0.7391
Epoch 122/500
0s - loss: 1.3997 - acc: 0.6957
Epoch 123/500
0s - loss: 1.4017 - acc: 0.6957
Epoch 124/500
0s - loss: 1.3864 - acc: 0.7391
Epoch 125/500
0s - loss: 1.3838 - acc: 0.7391
Epoch 126/500
0s - loss: 1.3814 - acc: 0.8261
Epoch 127/500
0s - loss: 1.3779 - acc: 0.7826
Epoch 128/500
0s - loss: 1.3688 - acc: 0.7826
Epoch 129/500
0s - loss: 1.3582 - acc: 0.8261
Epoch 130/500
0s - loss: 1.3550 - acc: 0.7391
Epoch 131/500
0s - loss: 1.3399 - acc: 0.7826
Epoch 132/500
0s - loss: 1.3436 - acc: 0.6522
Epoch 133/500
0s - loss: 1.3337 - acc: 0.7391
Epoch 134/500
0s - loss: 1.3243 - acc: 0.8261
Epoch 135/500
0s - loss: 1.3163 - acc: 0.8261
Epoch 136/500
0s - loss: 1.3098 - acc: 0.8261
Epoch 137/500
0s - loss: 1.3022 - acc: 0.8261
Epoch 138/500
0s - loss: 1.3000 - acc: 0.7826
Epoch 139/500
0s - loss: 1.2911 - acc: 0.8261
Epoch 140/500
0s - loss: 1.2901 - acc: 0.8261
Epoch 141/500
0s - loss: 1.2738 - acc: 0.8261
Epoch 142/500
0s - loss: 1.2675 - acc: 0.8261
Epoch 143/500
0s - loss: 1.2671 - acc: 0.8261
Epoch 144/500
0s - loss: 1.2609 - acc: 0.8261
Epoch 145/500
0s - loss: 1.2502 - acc: 0.8696
Epoch 146/500
0s - loss: 1.2485 - acc: 0.7826
Epoch 147/500
0s - loss: 1.2431 - acc: 0.7826
Epoch 148/500
0s - loss: 1.2312 - acc: 0.8696
Epoch 149/500
0s - loss: 1.2341 - acc: 0.7826
Epoch 150/500
0s - loss: 1.2221 - acc: 0.8261
Epoch 151/500
0s - loss: 1.2176 - acc: 0.8261
Epoch 152/500
0s - loss: 1.2071 - acc: 0.8261
Epoch 153/500
0s - loss: 1.1953 - acc: 0.8696
Epoch 154/500
0s - loss: 1.1968 - acc: 0.8261
Epoch 155/500
0s - loss: 1.1877 - acc: 0.8696
Epoch 156/500
0s - loss: 1.1869 - acc: 0.8261
Epoch 157/500
0s - loss: 1.1817 - acc: 0.8261
Epoch 158/500
0s - loss: 1.1715 - acc: 0.8261
Epoch 159/500
0s - loss: 1.1654 - acc: 0.7826
Epoch 160/500
0s - loss: 1.1573 - acc: 0.8696
Epoch 161/500
0s - loss: 1.1584 - acc: 0.8696
Epoch 162/500
0s - loss: 1.1478 - acc: 0.8261
Epoch 163/500
0s - loss: 1.1457 - acc: 0.8261
Epoch 164/500
0s - loss: 1.1359 - acc: 0.8696
Epoch 165/500
0s - loss: 1.1379 - acc: 0.7826
Epoch 166/500
0s - loss: 1.1268 - acc: 0.8696
Epoch 167/500
0s - loss: 1.1178 - acc: 0.8696
Epoch 168/500
0s - loss: 1.1122 - acc: 0.8696
Epoch 169/500
0s - loss: 1.1096 - acc: 0.8696
Epoch 170/500
0s - loss: 1.1090 - acc: 0.9130
Epoch 171/500
0s - loss: 1.1040 - acc: 0.9130
Epoch 172/500
0s - loss: 1.0949 - acc: 0.9130
Epoch 173/500
0s - loss: 1.0844 - acc: 0.8696
Epoch 174/500
0s - loss: 1.0788 - acc: 0.8696
Epoch 175/500
0s - loss: 1.0773 - acc: 0.8696
Epoch 176/500
0s - loss: 1.0695 - acc: 0.8696
Epoch 177/500
0s - loss: 1.0729 - acc: 0.8696
Epoch 178/500
0s - loss: 1.0682 - acc: 0.8261
Epoch 179/500
0s - loss: 1.0594 - acc: 0.8696
Epoch 180/500
0s - loss: 1.0497 - acc: 0.8696
Epoch 181/500
0s - loss: 1.0429 - acc: 0.9130
Epoch 182/500
0s - loss: 1.0377 - acc: 0.8696
Epoch 183/500
0s - loss: 1.0383 - acc: 0.8696
Epoch 184/500
0s - loss: 1.0253 - acc: 0.9130
Epoch 185/500
0s - loss: 1.0192 - acc: 0.8696
Epoch 186/500
0s - loss: 1.0239 - acc: 0.9130
Epoch 187/500
0s - loss: 1.0167 - acc: 0.9130
Epoch 188/500
0s - loss: 1.0075 - acc: 0.8696
Epoch 189/500
0s - loss: 1.0084 - acc: 0.9565
Epoch 190/500
0s - loss: 0.9993 - acc: 0.9130
Epoch 191/500
0s - loss: 0.9990 - acc: 0.9565
Epoch 192/500
0s - loss: 0.9912 - acc: 0.9565
Epoch 193/500
0s - loss: 0.9903 - acc: 0.9130
Epoch 194/500
0s - loss: 0.9798 - acc: 0.8696
Epoch 195/500
0s - loss: 0.9747 - acc: 0.8696
Epoch 196/500
0s - loss: 0.9750 - acc: 0.9130
Epoch 197/500
0s - loss: 0.9790 - acc: 0.8696
Epoch 198/500
0s - loss: 0.9696 - acc: 0.9565
Epoch 199/500
0s - loss: 0.9597 - acc: 0.9130
Epoch 200/500
0s - loss: 0.9531 - acc: 0.8696
Epoch 201/500
0s - loss: 0.9510 - acc: 0.9565
Epoch 202/500
0s - loss: 0.9465 - acc: 0.9130
Epoch 203/500
0s - loss: 0.9366 - acc: 0.9565
Epoch 204/500
0s - loss: 0.9375 - acc: 0.9130
Epoch 205/500
0s - loss: 0.9336 - acc: 0.9565
Epoch 206/500
0s - loss: 0.9227 - acc: 0.9130
Epoch 207/500
0s - loss: 0.9243 - acc: 0.9565
Epoch 208/500
0s - loss: 0.9204 - acc: 0.9565
Epoch 209/500
0s - loss: 0.9199 - acc: 0.9130
Epoch 210/500
0s - loss: 0.9093 - acc: 0.9130
Epoch 211/500
0s - loss: 0.9118 - acc: 0.9565
Epoch 212/500
0s - loss: 0.9008 - acc: 0.9565
Epoch 213/500
0s - loss: 0.8985 - acc: 0.9130
Epoch 214/500
0s - loss: 0.9006 - acc: 0.9130
Epoch 215/500
0s - loss: 0.8886 - acc: 0.9565
Epoch 216/500
0s - loss: 0.8743 - acc: 0.9565
Epoch 217/500
0s - loss: 0.8729 - acc: 0.9565
Epoch 218/500
0s - loss: 0.8750 - acc: 0.9130
Epoch 219/500
0s - loss: 0.8694 - acc: 0.9130
Epoch 220/500
0s - loss: 0.8647 - acc: 0.9565
Epoch 221/500
0s - loss: 0.8614 - acc: 0.9130
Epoch 222/500
0s - loss: 0.8542 - acc: 0.9565
Epoch 223/500
0s - loss: 0.8555 - acc: 0.9565
Epoch 224/500
0s - loss: 0.8499 - acc: 0.9130
Epoch 225/500
0s - loss: 0.8386 - acc: 0.9130
Epoch 226/500
0s - loss: 0.8464 - acc: 0.9130
Epoch 227/500
0s - loss: 0.8385 - acc: 0.9565
Epoch 228/500
0s - loss: 0.8345 - acc: 0.9565
Epoch 229/500
0s - loss: 0.8272 - acc: 0.9130
Epoch 230/500
0s - loss: 0.8263 - acc: 0.9130
Epoch 231/500
0s - loss: 0.8200 - acc: 0.9565
Epoch 232/500
0s - loss: 0.8170 - acc: 0.9565
Epoch 233/500
0s - loss: 0.8124 - acc: 0.9565
Epoch 234/500
0s - loss: 0.8094 - acc: 0.9565
Epoch 235/500
0s - loss: 0.8018 - acc: 0.9565
Epoch 236/500
0s - loss: 0.8014 - acc: 0.9565
Epoch 237/500
0s - loss: 0.7914 - acc: 0.9565
Epoch 238/500
0s - loss: 0.7874 - acc: 0.9565
Epoch 239/500
0s - loss: 0.7882 - acc: 0.9565
Epoch 240/500
0s - loss: 0.7889 - acc: 0.9565
Epoch 241/500
0s - loss: 0.7790 - acc: 0.9130
Epoch 242/500
0s - loss: 0.7750 - acc: 0.9565
Epoch 243/500
0s - loss: 0.7662 - acc: 0.9565
Epoch 244/500
0s - loss: 0.7647 - acc: 0.9565
Epoch 245/500
0s - loss: 0.7830 - acc: 0.8696
Epoch 246/500
0s - loss: 0.7678 - acc: 0.9565
Epoch 247/500
0s - loss: 0.7537 - acc: 0.9565
Epoch 248/500
0s - loss: 0.7526 - acc: 0.9565
Epoch 249/500
0s - loss: 0.7521 - acc: 0.9565
Epoch 250/500
0s - loss: 0.7452 - acc: 0.9565
Epoch 251/500
0s - loss: 0.7376 - acc: 0.9565
Epoch 252/500
0s - loss: 0.7324 - acc: 0.9565
Epoch 253/500
0s - loss: 0.7367 - acc: 0.9565
Epoch 254/500
0s - loss: 0.7312 - acc: 1.0000
Epoch 255/500
0s - loss: 0.7304 - acc: 0.9565
Epoch 256/500
0s - loss: 0.7191 - acc: 0.9565
Epoch 257/500
0s - loss: 0.7169 - acc: 0.9565
Epoch 258/500
0s - loss: 0.7191 - acc: 0.9565
Epoch 259/500
0s - loss: 0.7153 - acc: 0.9565
Epoch 260/500
0s - loss: 0.7039 - acc: 1.0000
Epoch 261/500
0s - loss: 0.7023 - acc: 0.9565
Epoch 262/500
0s - loss: 0.7020 - acc: 0.9565
Epoch 263/500
0s - loss: 0.6964 - acc: 0.9565
Epoch 264/500
0s - loss: 0.6912 - acc: 1.0000
Epoch 265/500
0s - loss: 0.6958 - acc: 0.9565
Epoch 266/500
0s - loss: 0.6872 - acc: 0.9565
Epoch 267/500
0s - loss: 0.6845 - acc: 1.0000
Epoch 268/500
0s - loss: 0.6866 - acc: 1.0000
Epoch 269/500
0s - loss: 0.6764 - acc: 0.9565
Epoch 270/500
0s - loss: 0.6666 - acc: 0.9565
Epoch 271/500
0s - loss: 0.6718 - acc: 0.9565
Epoch 272/500
0s - loss: 0.6697 - acc: 0.9565
Epoch 273/500
0s - loss: 0.6646 - acc: 0.9565
Epoch 274/500
0s - loss: 0.6606 - acc: 1.0000
Epoch 275/500
0s - loss: 0.6605 - acc: 0.9565
Epoch 276/500
0s - loss: 0.6516 - acc: 0.9565
Epoch 277/500
0s - loss: 0.6490 - acc: 1.0000
Epoch 278/500
0s - loss: 0.6477 - acc: 0.9565
Epoch 279/500
0s - loss: 0.6394 - acc: 0.9565
Epoch 280/500
0s - loss: 0.6474 - acc: 0.9565
Epoch 281/500
0s - loss: 0.6472 - acc: 0.9130
Epoch 282/500
0s - loss: 0.6343 - acc: 1.0000
Epoch 283/500
0s - loss: 0.6359 - acc: 0.9565
Epoch 284/500
0s - loss: 0.6348 - acc: 0.9565
Epoch 285/500
0s - loss: 0.6266 - acc: 0.9565
Epoch 286/500
0s - loss: 0.6216 - acc: 1.0000
Epoch 287/500
0s - loss: 0.6179 - acc: 0.9565
Epoch 288/500
0s - loss: 0.6123 - acc: 0.9565
Epoch 289/500
0s - loss: 0.6105 - acc: 0.9565
Epoch 290/500
0s - loss: 0.6078 - acc: 1.0000
Epoch 291/500
0s - loss: 0.6046 - acc: 0.9565
Epoch 292/500
0s - loss: 0.6029 - acc: 1.0000
Epoch 293/500
0s - loss: 0.5970 - acc: 0.9565
Epoch 294/500
0s - loss: 0.5984 - acc: 1.0000
Epoch 295/500
0s - loss: 0.5939 - acc: 0.9565
Epoch 296/500
0s - loss: 0.5839 - acc: 0.9565
Epoch 297/500
0s - loss: 0.5902 - acc: 0.9565
Epoch 298/500
0s - loss: 0.5818 - acc: 1.0000
Epoch 299/500
0s - loss: 0.5833 - acc: 1.0000
Epoch 300/500
0s - loss: 0.5733 - acc: 1.0000
Epoch 301/500
0s - loss: 0.5718 - acc: 0.9565
Epoch 302/500
0s - loss: 0.5678 - acc: 1.0000
Epoch 303/500
0s - loss: 0.5684 - acc: 0.9565
Epoch 304/500
0s - loss: 0.5670 - acc: 1.0000
Epoch 305/500
0s - loss: 0.5716 - acc: 1.0000
Epoch 306/500
0s - loss: 0.5563 - acc: 1.0000
Epoch 307/500
0s - loss: 0.5558 - acc: 0.9565
Epoch 308/500
0s - loss: 0.5551 - acc: 0.9565
Epoch 309/500
0s - loss: 0.5442 - acc: 0.9565
Epoch 310/500
0s - loss: 0.5442 - acc: 0.9565
Epoch 311/500
0s - loss: 0.5452 - acc: 1.0000
Epoch 312/500
0s - loss: 0.5365 - acc: 1.0000
Epoch 313/500
0s - loss: 0.5358 - acc: 1.0000
Epoch 314/500
0s - loss: 0.5349 - acc: 1.0000
Epoch 315/500
0s - loss: 0.5340 - acc: 1.0000
Epoch 316/500
0s - loss: 0.5245 - acc: 0.9565
Epoch 317/500
0s - loss: 0.5249 - acc: 0.9565
Epoch 318/500
0s - loss: 0.5226 - acc: 1.0000
Epoch 319/500
0s - loss: 0.5211 - acc: 1.0000
Epoch 320/500
0s - loss: 0.5180 - acc: 0.9565
Epoch 321/500
0s - loss: 0.5170 - acc: 1.0000
Epoch 322/500
0s - loss: 0.5092 - acc: 1.0000
Epoch 323/500
0s - loss: 0.5102 - acc: 0.9565
Epoch 324/500
0s - loss: 0.5069 - acc: 0.9565
Epoch 325/500
0s - loss: 0.5020 - acc: 0.9565
Epoch 326/500
0s - loss: 0.4999 - acc: 0.9565
Epoch 327/500
0s - loss: 0.4985 - acc: 0.9565
Epoch 328/500
0s - loss: 0.4988 - acc: 1.0000
Epoch 329/500
0s - loss: 0.4993 - acc: 1.0000
Epoch 330/500
0s - loss: 0.4935 - acc: 0.9565
Epoch 331/500
0s - loss: 0.4886 - acc: 0.9565
Epoch 332/500
0s - loss: 0.4891 - acc: 1.0000
Epoch 333/500
0s - loss: 0.4878 - acc: 1.0000
Epoch 334/500
0s - loss: 0.4906 - acc: 0.9565
Epoch 335/500
0s - loss: 0.4796 - acc: 0.9565
Epoch 336/500
0s - loss: 0.4783 - acc: 0.9565
Epoch 337/500
0s - loss: 0.4669 - acc: 1.0000
Epoch 338/500
0s - loss: 0.4697 - acc: 1.0000
Epoch 339/500
0s - loss: 0.4640 - acc: 0.9565
Epoch 340/500
0s - loss: 0.4617 - acc: 1.0000
Epoch 341/500
0s - loss: 0.4566 - acc: 1.0000
Epoch 342/500
0s - loss: 0.4564 - acc: 1.0000
Epoch 343/500
0s - loss: 0.4528 - acc: 1.0000
Epoch 344/500
0s - loss: 0.4561 - acc: 1.0000
Epoch 345/500
0s - loss: 0.4562 - acc: 0.9565
Epoch 346/500
0s - loss: 0.4501 - acc: 0.9565
Epoch 347/500
0s - loss: 0.4464 - acc: 1.0000
Epoch 348/500
0s - loss: 0.4451 - acc: 0.9565
Epoch 349/500
0s - loss: 0.4456 - acc: 1.0000
Epoch 350/500
0s - loss: 0.4393 - acc: 1.0000
Epoch 351/500
0s - loss: 0.4361 - acc: 0.9565
Epoch 352/500
0s - loss: 0.4386 - acc: 1.0000
Epoch 353/500
0s - loss: 0.4268 - acc: 1.0000
Epoch 354/500
0s - loss: 0.4341 - acc: 0.9565
Epoch 355/500
0s - loss: 0.4358 - acc: 1.0000
Epoch 356/500
0s - loss: 0.4337 - acc: 1.0000
Epoch 357/500
0s - loss: 0.4274 - acc: 0.9565
Epoch 358/500
0s - loss: 0.4235 - acc: 0.9565
Epoch 359/500
0s - loss: 0.4208 - acc: 1.0000
Epoch 360/500
0s - loss: 0.4161 - acc: 0.9565
Epoch 361/500
0s - loss: 0.4166 - acc: 0.9565
Epoch 362/500
0s - loss: 0.4118 - acc: 1.0000
Epoch 363/500
0s - loss: 0.4089 - acc: 1.0000
Epoch 364/500
0s - loss: 0.4079 - acc: 1.0000
Epoch 365/500
0s - loss: 0.4138 - acc: 1.0000
Epoch 366/500
0s - loss: 0.3999 - acc: 1.0000
Epoch 367/500
0s - loss: 0.3986 - acc: 0.9565
Epoch 368/500
0s - loss: 0.3994 - acc: 1.0000
Epoch 369/500
0s - loss: 0.3936 - acc: 1.0000
Epoch 370/500
0s - loss: 0.3919 - acc: 0.9565
Epoch 371/500
0s - loss: 0.3924 - acc: 1.0000
Epoch 372/500
0s - loss: 0.3885 - acc: 1.0000
Epoch 373/500
0s - loss: 0.3829 - acc: 1.0000
Epoch 374/500
0s - loss: 0.3850 - acc: 1.0000
Epoch 375/500
0s - loss: 0.3860 - acc: 1.0000
Epoch 376/500
0s - loss: 0.3836 - acc: 1.0000
Epoch 377/500
0s - loss: 0.3815 - acc: 1.0000
Epoch 378/500
0s - loss: 0.3775 - acc: 1.0000
Epoch 379/500
0s - loss: 0.3754 - acc: 1.0000
Epoch 380/500
0s - loss: 0.3746 - acc: 1.0000
Epoch 381/500
0s - loss: 0.3769 - acc: 1.0000
Epoch 382/500
0s - loss: 0.3651 - acc: 1.0000
Epoch 383/500
0s - loss: 0.3691 - acc: 1.0000
Epoch 384/500
0s - loss: 0.3855 - acc: 0.9565
Epoch 385/500
0s - loss: 0.3689 - acc: 1.0000
Epoch 386/500
0s - loss: 0.3638 - acc: 1.0000
Epoch 387/500
0s - loss: 0.3605 - acc: 1.0000
Epoch 388/500
0s - loss: 0.3611 - acc: 0.9565
Epoch 389/500
0s - loss: 0.3588 - acc: 1.0000
Epoch 390/500
0s - loss: 0.3535 - acc: 1.0000
Epoch 391/500
0s - loss: 0.3572 - acc: 1.0000
Epoch 392/500
0s - loss: 0.3585 - acc: 1.0000
Epoch 393/500
0s - loss: 0.3630 - acc: 1.0000
Epoch 394/500
0s - loss: 0.3503 - acc: 1.0000
Epoch 395/500
0s - loss: 0.3418 - acc: 1.0000
Epoch 396/500
0s - loss: 0.3416 - acc: 1.0000
Epoch 397/500
0s - loss: 0.3398 - acc: 1.0000
Epoch 398/500
0s - loss: 0.3350 - acc: 1.0000
Epoch 399/500
0s - loss: 0.3328 - acc: 1.0000
Epoch 400/500
0s - loss: 0.3347 - acc: 1.0000
Epoch 401/500
0s - loss: 0.3335 - acc: 1.0000
Epoch 402/500
0s - loss: 0.3444 - acc: 0.9565
Epoch 403/500
0s - loss: 0.3283 - acc: 1.0000
Epoch 404/500
0s - loss: 0.3276 - acc: 1.0000
Epoch 405/500
0s - loss: 0.3263 - acc: 1.0000
Epoch 406/500
0s - loss: 0.3276 - acc: 0.9565
Epoch 407/500
0s - loss: 0.3185 - acc: 1.0000
Epoch 408/500
0s - loss: 0.3217 - acc: 1.0000
Epoch 409/500
0s - loss: 0.3202 - acc: 1.0000
Epoch 410/500
0s - loss: 0.3157 - acc: 1.0000
Epoch 411/500
0s - loss: 0.3182 - acc: 1.0000
Epoch 412/500
0s - loss: 0.3145 - acc: 1.0000
Epoch 413/500
0s - loss: 0.3121 - acc: 1.0000
Epoch 414/500
0s - loss: 0.3111 - acc: 1.0000
Epoch 415/500
0s - loss: 0.3121 - acc: 0.9565
Epoch 416/500
0s - loss: 0.3102 - acc: 1.0000
Epoch 417/500
0s - loss: 0.3041 - acc: 1.0000
Epoch 418/500
0s - loss: 0.3029 - acc: 1.0000
Epoch 419/500
0s - loss: 0.3019 - acc: 1.0000
Epoch 420/500
0s - loss: 0.3037 - acc: 1.0000
Epoch 421/500
0s - loss: 0.3055 - acc: 1.0000
Epoch 422/500
0s - loss: 0.2979 - acc: 1.0000
Epoch 423/500
0s - loss: 0.3016 - acc: 1.0000
Epoch 424/500
0s - loss: 0.3050 - acc: 1.0000
Epoch 425/500
0s - loss: 0.2956 - acc: 1.0000
Epoch 426/500
0s - loss: 0.2899 - acc: 1.0000
Epoch 427/500
0s - loss: 0.2900 - acc: 1.0000
Epoch 428/500
0s - loss: 0.2863 - acc: 1.0000
Epoch 429/500
0s - loss: 0.2865 - acc: 1.0000
Epoch 430/500
0s - loss: 0.2845 - acc: 1.0000
Epoch 431/500
0s - loss: 0.2833 - acc: 1.0000
Epoch 432/500
0s - loss: 0.2867 - acc: 1.0000
Epoch 433/500
0s - loss: 0.2788 - acc: 1.0000
Epoch 434/500
0s - loss: 0.2791 - acc: 1.0000
Epoch 435/500
0s - loss: 0.2782 - acc: 1.0000
Epoch 436/500
0s - loss: 0.2756 - acc: 1.0000
Epoch 437/500
0s - loss: 0.2746 - acc: 1.0000
Epoch 438/500
0s - loss: 0.2721 - acc: 1.0000
Epoch 439/500
0s - loss: 0.2727 - acc: 1.0000
Epoch 440/500
0s - loss: 0.2652 - acc: 1.0000
Epoch 441/500
0s - loss: 0.2686 - acc: 1.0000
Epoch 442/500
0s - loss: 0.2666 - acc: 1.0000
Epoch 443/500
0s - loss: 0.2700 - acc: 1.0000
Epoch 444/500
0s - loss: 0.2669 - acc: 1.0000
Epoch 445/500
0s - loss: 0.2654 - acc: 1.0000
Epoch 446/500
0s - loss: 0.2778 - acc: 1.0000
Epoch 447/500
0s - loss: 0.2784 - acc: 1.0000
Epoch 448/500
0s - loss: 0.2708 - acc: 1.0000
Epoch 449/500
0s - loss: 0.2608 - acc: 1.0000
Epoch 450/500
0s - loss: 0.2545 - acc: 1.0000
Epoch 451/500
0s - loss: 0.2578 - acc: 1.0000
Epoch 452/500
0s - loss: 0.2658 - acc: 1.0000
Epoch 453/500
0s - loss: 0.2528 - acc: 1.0000
Epoch 454/500
0s - loss: 0.2636 - acc: 1.0000
Epoch 455/500
0s - loss: 0.2564 - acc: 0.9565
Epoch 456/500
0s - loss: 0.2480 - acc: 1.0000
Epoch 457/500
0s - loss: 0.2470 - acc: 1.0000
Epoch 458/500
0s - loss: 0.2483 - acc: 1.0000
Epoch 459/500
0s - loss: 0.2393 - acc: 1.0000
Epoch 460/500
0s - loss: 0.2449 - acc: 1.0000
Epoch 461/500
0s - loss: 0.2398 - acc: 1.0000
Epoch 462/500
0s - loss: 0.2406 - acc: 1.0000
Epoch 463/500
0s - loss: 0.2350 - acc: 1.0000
Epoch 464/500
0s - loss: 0.2473 - acc: 1.0000
Epoch 465/500
0s - loss: 0.2373 - acc: 1.0000
Epoch 466/500
0s - loss: 0.2318 - acc: 1.0000
Epoch 467/500
0s - loss: 0.2412 - acc: 1.0000
Epoch 468/500
0s - loss: 0.2382 - acc: 1.0000
Epoch 469/500
0s - loss: 0.2350 - acc: 1.0000
Epoch 470/500
0s - loss: 0.2263 - acc: 1.0000
Epoch 471/500
0s - loss: 0.2293 - acc: 1.0000
Epoch 472/500
0s - loss: 0.2247 - acc: 1.0000
Epoch 473/500
0s - loss: 0.2218 - acc: 1.0000
Epoch 474/500
0s - loss: 0.2210 - acc: 1.0000
Epoch 475/500
0s - loss: 0.2209 - acc: 1.0000
Epoch 476/500
0s - loss: 0.2232 - acc: 1.0000
Epoch 477/500
0s - loss: 0.2578 - acc: 0.9565
Epoch 478/500
0s - loss: 0.2278 - acc: 1.0000
Epoch 479/500
0s - loss: 0.2194 - acc: 1.0000
Epoch 480/500
0s - loss: 0.2283 - acc: 1.0000
Epoch 481/500
0s - loss: 0.2214 - acc: 1.0000
Epoch 482/500
0s - loss: 0.2158 - acc: 1.0000
Epoch 483/500
0s - loss: 0.2115 - acc: 1.0000
Epoch 484/500
0s - loss: 0.2121 - acc: 1.0000
Epoch 485/500
0s - loss: 0.2086 - acc: 1.0000
Epoch 486/500
0s - loss: 0.2172 - acc: 1.0000
Epoch 487/500
0s - loss: 0.2065 - acc: 1.0000
Epoch 488/500
0s - loss: 0.2086 - acc: 1.0000
Epoch 489/500
0s - loss: 0.2111 - acc: 1.0000
Epoch 490/500
0s - loss: 0.2055 - acc: 1.0000
Epoch 491/500
0s - loss: 0.2014 - acc: 1.0000
Epoch 492/500
0s - loss: 0.2038 - acc: 1.0000
Epoch 493/500
0s - loss: 0.2274 - acc: 1.0000
Epoch 494/500
0s - loss: 0.2144 - acc: 1.0000
Epoch 495/500
0s - loss: 0.2177 - acc: 0.9565
Epoch 496/500
0s - loss: 0.2069 - acc: 1.0000
Epoch 497/500
0s - loss: 0.2026 - acc: 1.0000
Epoch 498/500
0s - loss: 0.1956 - acc: 1.0000
Epoch 499/500
0s - loss: 0.1930 - acc: 1.0000
Epoch 500/500
0s - loss: 0.2007 - acc: 1.0000
Out[26]:
<keras.callbacks.History at 0x7f7b0335f4a8>

In [29]:
# summarize performance of the model
scores = model.evaluate(X, y, verbose=0)
print("Model Accuracy: %.2f%%" % (scores[1]*100))


Model Accuracy: 100.00%

In [30]:
predict(dataX)


['A', 'B', 'C'] -> D
['B', 'C', 'D'] -> E
['C', 'D', 'E'] -> F
['D', 'E', 'F'] -> G
['E', 'F', 'G'] -> H
['F', 'G', 'H'] -> I
['G', 'H', 'I'] -> J
['H', 'I', 'J'] -> K
['I', 'J', 'K'] -> L
['J', 'K', 'L'] -> M
['K', 'L', 'M'] -> N
['L', 'M', 'N'] -> O
['M', 'N', 'O'] -> P
['N', 'O', 'P'] -> Q
['O', 'P', 'Q'] -> R
['P', 'Q', 'R'] -> S
['Q', 'R', 'S'] -> T
['R', 'S', 'T'] -> U
['S', 'T', 'U'] -> V
['T', 'U', 'V'] -> W
['U', 'V', 'W'] -> X
['V', 'W', 'X'] -> Y
['W', 'X', 'Y'] -> Z

Stateful LSTM for a One-Char to One-Char Mapping

We have seen that we can break-up our raw data into fixed size sequences and that this representation can be learned by the LSTM, but only to learn random mappings of 3 characters to 1 character.

We have also seen that we can pervert batch size to offer more sequence to the network, but only during training.

Ideally, we want to expose the network to the entire sequence and let it learn the inter-dependencies, rather than us define those dependencies explicitly in the framing of the problem.

We can do this in Keras by making the LSTM layers stateful and manually resetting the state of the network at the end of the epoch, which is also the end of the training sequence.

This is truly how the LSTM networks are intended to be used. We find that by allowing the network itself to learn the dependencies between the characters, that we need a smaller network (half the number of units) and fewer training epochs (almost half).

We first need to define our LSTM layer as stateful. In so doing, we must explicitly specify the batch size as a dimension on the input shape. This also means that when we evaluate the network or make predictions, we must also specify and adhere to this same batch size. This is not a problem now as we are using a batch size of 1. This could introduce difficulties when making predictions when the batch size is not one as predictions will need to be made in batch and in sequence.


In [38]:
seq_length = 1
dataX = []
dataY = []
create_XY(seq_length, alphabet, dataX, dataY)


A -> B
B -> C
C -> D
D -> E
E -> F
F -> G
G -> H
H -> I
I -> J
J -> K
K -> L
L -> M
M -> N
N -> O
O -> P
P -> Q
Q -> R
R -> S
S -> T
T -> U
U -> V
V -> W
W -> X
X -> Y
Y -> Z

In [42]:
# reshape X to be [samples, time steps, features]
X = numpy.reshape(dataX, (len(dataX), seq_length, 1))
# normalize
X = X / float(len(alphabet))
# one hot encode the output variable
y = np_utils.to_categorical(dataY)

In [44]:
# create and fit the model
batch_size = 1
model = Sequential()
model.add(LSTM(16, batch_input_shape=(batch_size, X.shape[1], X.shape[2]), stateful=True))
model.add(Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

In [45]:
for i in range(300):
    model.fit(X, y, nb_epoch=1, batch_size=batch_size, verbose=2, shuffle=False)
    model.reset_states()


INFO (theano.gof.compilelock): Refreshing lock /home/tw/.theano/compiledir_Linux-4.8--generic-x86_64-with-debian-stretch-sid-x86_64-3.5.2-64/lock_dir/lock
Epoch 1/1
0s - loss: 3.2944 - acc: 0.0000e+00
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 3.2666 - acc: 0.0400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 3.2508 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 3.2369 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 3.2241 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 3.2119 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 3.2000 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 3.1881 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 3.1760 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 3.1635 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 3.1506 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 3.1372 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 3.1232 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 3.1087 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 3.0937 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 3.0787 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 3.0643 - acc: 0.0800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 3.0516 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 3.0416 - acc: 0.1200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 3.0340 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 3.0262 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 3.0147 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.9986 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 2.9791 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.9572 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 2.9336 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 2.9086 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.8823 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.8554 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 2.8281 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 2.8008 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.7737 - acc: 0.1600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.7469 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 2.7199 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 2.6927 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.6638 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.6338 - acc: 0.2000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569128>
Epoch 1/1
0s - loss: 2.6042 - acc: 0.2400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 2.5735 - acc: 0.2400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.5427 - acc: 0.2800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.5126 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 2.4779 - acc: 0.2800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 2.4503 - acc: 0.2800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.4190 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.3893 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 2.3623 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 2.3339 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.3074 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.2810 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 2.2553 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 2.2310 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.2062 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.1846 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 2.1601 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 2.1403 - acc: 0.2800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.1225 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.0977 - acc: 0.3200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 2.0835 - acc: 0.2800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 2.0638 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 2.0462 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 2.0275 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 2.0124 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 1.9937 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 1.9784 - acc: 0.3600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.9628 - acc: 0.4000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.9496 - acc: 0.4000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 1.9348 - acc: 0.4000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 1.9167 - acc: 0.4800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.9042 - acc: 0.5200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 1.8903 - acc: 0.5200
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.8749 - acc: 0.5200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 1.8602 - acc: 0.5600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.8486 - acc: 0.5600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.8325 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.8226 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 1.8067 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.7954 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 1.7854 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.7678 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 1.7568 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.7447 - acc: 0.5600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 1.7315 - acc: 0.5600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.7196 - acc: 0.5600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 1.7053 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 1.6956 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.6834 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 1.6706 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.6594 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 1.6490 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.6369 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 1.6235 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.6135 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 1.6050 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 1.5933 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 1.5842 - acc: 0.6000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.5714 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.5621 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 1.5541 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 1.5433 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.5333 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.5243 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 1.5147 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.5041 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.4968 - acc: 0.6400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.4869 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569128>
Epoch 1/1
0s - loss: 1.4771 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 1.4687 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.4610 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.4488 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 1.4422 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 1.4342 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.4250 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.4160 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 1.4074 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.3987 - acc: 0.6800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.3922 - acc: 0.7200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.3814 - acc: 0.7200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 1.3741 - acc: 0.7200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 1.3675 - acc: 0.7200
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.3585 - acc: 0.7200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.3499 - acc: 0.7200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 1.3413 - acc: 0.7600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.3346 - acc: 0.7600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.3265 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.3172 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569128>
Epoch 1/1
0s - loss: 1.3103 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 1.3033 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.2958 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.2869 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 1.2809 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 1.2734 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.2664 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.2583 - acc: 0.7600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569128>
Epoch 1/1
0s - loss: 1.2518 - acc: 0.7600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 1.2454 - acc: 0.7600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.2372 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.2307 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 1.2251 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.2175 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.2109 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.2031 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 1.1975 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 1.1911 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.1838 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.1784 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 1.1718 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.1642 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.1579 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.1530 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 1.1451 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569128>
Epoch 1/1
0s - loss: 1.1400 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.1333 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.1277 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 1.1210 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 1.1156 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.1085 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.1031 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 1.0962 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.0908 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.0840 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.0777 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 1.0719 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 1.0651 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.0582 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.0528 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 1.0463 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 1.0406 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.0345 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.0290 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 1.0221 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 1.0173 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 1.0115 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 1.0056 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 1.0000 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 0.9949 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.9893 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.9825 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569128>
Epoch 1/1
0s - loss: 0.9770 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 0.9719 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.9660 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.9605 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 0.9550 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 0.9490 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.9435 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.9384 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 0.9329 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 0.9275 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.9216 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.9162 - acc: 0.8000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 0.9109 - acc: 0.8400
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.9060 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.9005 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.8955 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 0.8905 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 0.8858 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.8808 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.8762 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 0.8706 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 0.8659 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.8608 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.8560 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 0.8508 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 0.8469 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.8419 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.8380 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.8333 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 0.8287 - acc: 0.8800
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 0.8241 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.8199 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.8152 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 0.8110 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 0.8069 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.8027 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.7981 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 0.7938 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 0.7897 - acc: 0.9200
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.7853 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.7812 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 0.7768 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.7725 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.7682 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.7638 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 0.7591 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 0.7549 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.7508 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.7462 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 0.7418 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 0.7373 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 0.7329 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.7283 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.7235 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.7191 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 0.7146 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.7097 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 0.7053 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.7007 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690b8>
Epoch 1/1
0s - loss: 0.6959 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.6911 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 0.6868 - acc: 0.9600
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.6814 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 0.6772 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.6727 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 0.6674 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.6633 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 0.6582 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.6540 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 0.6491 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.6445 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.6398 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.6354 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 0.6303 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.6263 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 0.6214 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.6164 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 0.6121 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 0.6079 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.6038 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.5996 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.5956 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 0.5916 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.5871 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.5834 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569400>
Epoch 1/1
0s - loss: 0.5800 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 0.5753 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.5711 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.5674 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569080>
Epoch 1/1
0s - loss: 0.5632 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 0.5586 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.5547 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 0.5517 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.5474 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.5440 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.5395 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569198>
Epoch 1/1
0s - loss: 0.5355 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.5319 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 0.5275 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.5241 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 0.5192 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 0.5157 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.5116 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.5070 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 0.5047 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569358>
Epoch 1/1
0s - loss: 0.5011 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.4964 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.4927 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569160>
Epoch 1/1
0s - loss: 0.4890 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569048>
Epoch 1/1
0s - loss: 0.4850 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.4814 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.4777 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 0.4734 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5690f0>
Epoch 1/1
0s - loss: 0.4702 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.4667 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.4626 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569240>
Epoch 1/1
0s - loss: 0.4592 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5693c8>
Epoch 1/1
0s - loss: 0.4563 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.4518 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>
Epoch 1/1
0s - loss: 0.4494 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b569208>
Epoch 1/1
0s - loss: 0.4454 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b1b5692b0>
Epoch 1/1
0s - loss: 0.4423 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7afd924d30>
Epoch 1/1
0s - loss: 0.4392 - acc: 1.0000
Out[45]:
<keras.callbacks.History at 0x7f7b008827b8>

In [46]:
# summarize performance of the model
scores = model.evaluate(X, y, batch_size=batch_size, verbose=0)
model.reset_states()
print("Model Accuracy: %.2f%%" % (scores[1]*100))


Model Accuracy: 100.00%

In [47]:
# demonstrate some model predictions
seed = [char_to_int[alphabet[0]]]
for i in range(0, len(alphabet)-1):
    x = numpy.reshape(seed, (1, len(seed), 1))
    x = x / float(len(alphabet))
    prediction = model.predict(x, verbose=0)
    index = numpy.argmax(prediction)
    print(int_to_char[seed[0]], "->", int_to_char[index])
    seed = [index]
model.reset_states()


A -> B
B -> C
C -> D
D -> E
E -> F
F -> G
G -> H
H -> I
I -> J
J -> K
K -> L
L -> M
M -> N
N -> O
O -> P
P -> Q
Q -> R
R -> S
S -> T
T -> U
U -> V
V -> W
W -> X
X -> Y
Y -> Z

In [49]:
# demonstrate a random starting point
letter = "K"
seed = [char_to_int[letter]]
print("New start: ", letter)
for i in range(0, 5):
    x = numpy.reshape(seed, (1, len(seed), 1))
    x = x / float(len(alphabet))
    prediction = model.predict(x, verbose=0)
    index = numpy.argmax(prediction)
    print(int_to_char[seed[0]], "->", int_to_char[index])
    seed = [index]
model.reset_states()


New start:  K
K -> B
B -> C
C -> D
D -> E
E -> F

LSTM with Variable-Length Input to One-Char Output

In the previous section, we discovered that the Keras “stateful” LSTM was really only a shortcut to replaying the first n-sequences, but didn’t really help us learn a generic model of the alphabet.

In this section we explore a variation of the “stateless” LSTM that learns random subsequences of the alphabet and an effort to build a model that can be given arbitrary letters or subsequences of letters and predict the next letter in the alphabet.

Firstly, we are changing the framing of the problem. To simplify we will define a maximum input sequence length and set it to a small value like 5 to speed up training. This defines the maximum length of subsequences of the alphabet will be drawn for training. In extensions, this could just as set to the full alphabet (26) or longer if we allow looping back to the start of the sequence.

We also need to define the number of random sequences to create, in this case 1000. This too could be more or less. I expect less patterns are actually required.


In [51]:
# prepare the dataset of input to output pairs encoded as integers
num_inputs = 1000
max_len = 5
dataX = []
dataY = []
for i in range(num_inputs):
    start = numpy.random.randint(len(alphabet)-2)
    end = numpy.random.randint(start, min(start+max_len,len(alphabet)-1))
    sequence_in = alphabet[start:end+1]
    sequence_out = alphabet[end + 1]
    dataX.append([char_to_int[char] for char in sequence_in])
    dataY.append(char_to_int[sequence_out])
    print(sequence_in, '->', sequence_out)


CDE -> F
G -> H
VWX -> Y
UVWX -> Y
H -> I
JKLMN -> O
UVWX -> Y
ST -> U
B -> C
NOPQ -> R
TU -> V
J -> K
NOPQ -> R
V -> W
QRSTU -> V
CDE -> F
GHIJK -> L
OP -> Q
FG -> H
EFGHI -> J
G -> H
A -> B
GHI -> J
H -> I
KLMNO -> P
BC -> D
W -> X
EFG -> H
FGH -> I
F -> G
WXY -> Z
FGH -> I
FGH -> I
CDE -> F
G -> H
XY -> Z
QR -> S
UVW -> X
LMNOP -> Q
RS -> T
WX -> Y
IJKLM -> N
BCDE -> F
AB -> C
RSTU -> V
VW -> X
TUVW -> X
NOP -> Q
DEF -> G
QR -> S
AB -> C
VWXY -> Z
KLMNO -> P
EF -> G
XY -> Z
L -> M
KLMN -> O
IJKLM -> N
S -> T
XY -> Z
FG -> H
R -> S
CDEFG -> H
AB -> C
BCDEF -> G
N -> O
VWX -> Y
I -> J
Q -> R
I -> J
I -> J
TU -> V
LM -> N
QRSTU -> V
KLMNO -> P
FGHI -> J
O -> P
FGH -> I
MN -> O
KLMN -> O
PQR -> S
B -> C
X -> Y
JKLM -> N
IJKL -> M
BCDE -> F
E -> F
VWXY -> Z
ABCDE -> F
UVW -> X
TUV -> W
F -> G
E -> F
PQR -> S
S -> T
UVWX -> Y
VWX -> Y
CDEFG -> H
ABC -> D
TU -> V
CDEF -> G
TUVWX -> Y
BCDEF -> G
PQ -> R
KLM -> N
T -> U
N -> O
OPQR -> S
W -> X
CD -> E
NOPQR -> S
RSTU -> V
KLM -> N
WX -> Y
O -> P
O -> P
MNO -> P
EFGHI -> J
LMN -> O
N -> O
IJKL -> M
FGHI -> J
G -> H
FG -> H
ABCD -> E
N -> O
ABC -> D
EFGHI -> J
VW -> X
BC -> D
E -> F
D -> E
XY -> Z
HIJ -> K
LMNOP -> Q
KLMNO -> P
H -> I
HI -> J
TUV -> W
KLMNO -> P
JK -> L
LMN -> O
IJKLM -> N
QRSTU -> V
IJKLM -> N
GHIJ -> K
B -> C
EFGHI -> J
FGHIJ -> K
NOPQ -> R
U -> V
EFGH -> I
XY -> Z
PQRS -> T
S -> T
JKL -> M
F -> G
KLMN -> O
H -> I
OP -> Q
RSTU -> V
CDE -> F
B -> C
NOP -> Q
G -> H
KLMNO -> P
IJ -> K
FGHI -> J
A -> B
K -> L
STUV -> W
TU -> V
JKLM -> N
K -> L
T -> U
N -> O
QRST -> U
BC -> D
UVWX -> Y
HIJKL -> M
ABCDE -> F
VWX -> Y
RSTUV -> W
UVW -> X
LM -> N
O -> P
IJK -> L
QRSTU -> V
F -> G
ABC -> D
QR -> S
TUVW -> X
HI -> J
C -> D
GH -> I
DEFG -> H
H -> I
QR -> S
NO -> P
TUVW -> X
KLMN -> O
K -> L
CD -> E
W -> X
IJ -> K
QRSTU -> V
PQRS -> T
LMN -> O
EFGHI -> J
BC -> D
FGH -> I
FGHIJ -> K
AB -> C
HIJK -> L
IJKL -> M
STUVW -> X
K -> L
W -> X
BCDEF -> G
GHI -> J
TUVWX -> Y
JKL -> M
UVWX -> Y
RS -> T
UV -> W
BC -> D
FGHIJ -> K
RST -> U
K -> L
EFG -> H
T -> U
GHI -> J
VWXY -> Z
VW -> X
GHIJK -> L
STUV -> W
J -> K
PQRST -> U
C -> D
EF -> G
DEFG -> H
GHIJ -> K
IJ -> K
MN -> O
GHIJK -> L
CDEF -> G
KLMN -> O
XY -> Z
TU -> V
PQ -> R
X -> Y
ABCD -> E
RST -> U
VWXY -> Z
UVW -> X
BCD -> E
IJKL -> M
CD -> E
UV -> W
X -> Y
GH -> I
NOPQR -> S
LMN -> O
FGHIJ -> K
NOPQR -> S
GHI -> J
LMNOP -> Q
T -> U
P -> Q
BC -> D
XY -> Z
N -> O
GHI -> J
A -> B
VWX -> Y
FG -> H
LM -> N
VW -> X
OPQRS -> T
LM -> N
NOPQ -> R
MNOPQ -> R
STU -> V
FGH -> I
J -> K
MNO -> P
QR -> S
RST -> U
OPQRS -> T
STUVW -> X
QR -> S
EFGH -> I
WXY -> Z
Q -> R
D -> E
NOPQ -> R
JKLMN -> O
MNOP -> Q
X -> Y
W -> X
PQRST -> U
DEFGH -> I
UV -> W
TUVWX -> Y
KLMN -> O
FG -> H
KL -> M
AB -> C
STUV -> W
WX -> Y
KL -> M
NOPQR -> S
DEFG -> H
FGHIJ -> K
ABC -> D
X -> Y
IJK -> L
PQ -> R
WX -> Y
ABCD -> E
KL -> M
BCDEF -> G
T -> U
HIJK -> L
J -> K
C -> D
STUV -> W
OPQ -> R
KLMNO -> P
JKL -> M
R -> S
WXY -> Z
UVWXY -> Z
LMNO -> P
ABC -> D
STUVW -> X
BC -> D
JKL -> M
RS -> T
LMNOP -> Q
PQRS -> T
FGH -> I
V -> W
NO -> P
IJKL -> M
W -> X
FGH -> I
TUV -> W
GHIJK -> L
HIJKL -> M
QRS -> T
KLMNO -> P
MNOP -> Q
TUV -> W
UVWX -> Y
HIJK -> L
M -> N
UV -> W
NO -> P
DEFG -> H
PQR -> S
W -> X
LMNOP -> Q
PQRS -> T
J -> K
T -> U
FGHIJ -> K
PQ -> R
VWX -> Y
BCDE -> F
BCD -> E
MNOPQ -> R
NOPQ -> R
LM -> N
MN -> O
CDEFG -> H
QRSTU -> V
LMNOP -> Q
NOPQ -> R
EFG -> H
LMNO -> P
VWX -> Y
L -> M
K -> L
RSTU -> V
T -> U
E -> F
D -> E
J -> K
M -> N
GHI -> J
CDE -> F
OPQ -> R
IJ -> K
W -> X
X -> Y
RSTUV -> W
HI -> J
LMN -> O
KLMN -> O
IJKLM -> N
MNOPQ -> R
IJ -> K
L -> M
PQ -> R
CDE -> F
F -> G
M -> N
UVW -> X
EF -> G
K -> L
GHI -> J
FGHI -> J
QRST -> U
CDE -> F
STUV -> W
LM -> N
W -> X
LMNO -> P
ABCD -> E
OP -> Q
AB -> C
V -> W
C -> D
STUV -> W
STUV -> W
RST -> U
L -> M
EFG -> H
GH -> I
ST -> U
DEFGH -> I
DEF -> G
IJK -> L
XY -> Z
IJKL -> M
MNOP -> Q
U -> V
PQ -> R
PQ -> R
AB -> C
WX -> Y
M -> N
CD -> E
C -> D
OPQRS -> T
MNOPQ -> R
BC -> D
RS -> T
R -> S
KLMN -> O
KLM -> N
GHIJ -> K
JK -> L
MNOPQ -> R
G -> H
EFG -> H
XY -> Z
FGH -> I
JKL -> M
VWXY -> Z
IJKLM -> N
QRSTU -> V
FGHI -> J
BCD -> E
CDEF -> G
I -> J
O -> P
WXY -> Z
OP -> Q
FGH -> I
FGH -> I
HIJ -> K
KLM -> N
UV -> W
OPQ -> R
U -> V
X -> Y
EFG -> H
KLMNO -> P
BCDEF -> G
RSTUV -> W
WXY -> Z
I -> J
RSTUV -> W
STU -> V
Q -> R
ABCD -> E
N -> O
XY -> Z
V -> W
BCDEF -> G
MNOPQ -> R
ABCD -> E
A -> B
B -> C
STUV -> W
U -> V
STU -> V
JK -> L
UVW -> X
NO -> P
X -> Y
A -> B
FGH -> I
UVWX -> Y
VWX -> Y
KL -> M
WX -> Y
P -> Q
DE -> F
IJKL -> M
U -> V
IJKLM -> N
EFGH -> I
UV -> W
TUV -> W
NOP -> Q
OPQRS -> T
L -> M
FGHIJ -> K
P -> Q
O -> P
VWX -> Y
EFGH -> I
EF -> G
DEF -> G
JKLM -> N
DEFG -> H
VWXY -> Z
NOPQ -> R
Q -> R
WXY -> Z
NO -> P
LMNOP -> Q
W -> X
QRST -> U
TU -> V
IJ -> K
EFGH -> I
DEFGH -> I
V -> W
QRS -> T
S -> T
IJKLM -> N
ABCDE -> F
I -> J
HI -> J
ABCDE -> F
BC -> D
STU -> V
OPQR -> S
LMNOP -> Q
OPQ -> R
UVWX -> Y
RS -> T
W -> X
KLMN -> O
ABCD -> E
OPQRS -> T
OPQRS -> T
GH -> I
LMN -> O
XY -> Z
PQRS -> T
STUVW -> X
PQRST -> U
N -> O
NOP -> Q
XY -> Z
VWXY -> Z
GHI -> J
KLM -> N
PQRST -> U
PQRS -> T
PQRST -> U
IJKLM -> N
STUVW -> X
DE -> F
CDEF -> G
K -> L
GHIJK -> L
T -> U
H -> I
STUVW -> X
IJKL -> M
JKL -> M
TUVW -> X
ABCD -> E
BC -> D
GH -> I
QRS -> T
TUVWX -> Y
NOP -> Q
ABC -> D
VWXY -> Z
JKL -> M
OPQR -> S
RSTU -> V
X -> Y
PQR -> S
E -> F
OPQR -> S
IJ -> K
FGHI -> J
FGHIJ -> K
BCD -> E
IJK -> L
OP -> Q
LMNOP -> Q
ABCDE -> F
XY -> Z
KLMN -> O
P -> Q
UV -> W
I -> J
HIJKL -> M
AB -> C
GHI -> J
BCD -> E
N -> O
FGH -> I
BC -> D
CDEFG -> H
CDEF -> G
UV -> W
L -> M
LMNOP -> Q
LMN -> O
BC -> D
EFGHI -> J
PQRS -> T
RSTU -> V
Q -> R
XY -> Z
QRSTU -> V
HIJ -> K
A -> B
LMNOP -> Q
MN -> O
QR -> S
TU -> V
LMNOP -> Q
RSTU -> V
KL -> M
E -> F
TUV -> W
QR -> S
U -> V
IJKL -> M
O -> P
RSTUV -> W
Q -> R
GHIJK -> L
X -> Y
KLMN -> O
FGH -> I
J -> K
WX -> Y
PQR -> S
QR -> S
DEFGH -> I
D -> E
NO -> P
QR -> S
VW -> X
JKL -> M
JK -> L
FGHIJ -> K
M -> N
UVWXY -> Z
VW -> X
D -> E
WX -> Y
PQR -> S
X -> Y
STUV -> W
J -> K
CD -> E
XY -> Z
LMN -> O
HIJK -> L
HI -> J
EFGH -> I
UVWX -> Y
NOPQR -> S
MNOPQ -> R
DEFGH -> I
RS -> T
RST -> U
IJKLM -> N
L -> M
FGHI -> J
UVWXY -> Z
WX -> Y
KL -> M
WXY -> Z
U -> V
C -> D
EF -> G
E -> F
WXY -> Z
TUVWX -> Y
STUVW -> X
ABCD -> E
JK -> L
CDE -> F
T -> U
TUVW -> X
E -> F
CDEFG -> H
N -> O
RSTUV -> W
ABCDE -> F
A -> B
U -> V
UVWX -> Y
A -> B
FG -> H
G -> H
KLMNO -> P
PQRST -> U
MNOP -> Q
WX -> Y
VWXY -> Z
PQRS -> T
NO -> P
NOP -> Q
STUV -> W
JKLM -> N
R -> S
IJ -> K
TUVW -> X
KLMN -> O
UVWX -> Y
TUVW -> X
ABCD -> E
O -> P
QRST -> U
WX -> Y
LMNO -> P
HI -> J
HIJKL -> M
GHIJ -> K
HIJK -> L
DEFGH -> I
DE -> F
UV -> W
QR -> S
IJKL -> M
UVWX -> Y
PQRS -> T
IJKLM -> N
K -> L
NOPQR -> S
UVW -> X
O -> P
O -> P
ST -> U
QRSTU -> V
IJK -> L
BCD -> E
OPQRS -> T
MNO -> P
GHIJ -> K
L -> M
D -> E
B -> C
B -> C
HIJKL -> M
EFGH -> I
JKLMN -> O
HIJKL -> M
P -> Q
CDEF -> G
F -> G
KLM -> N
LMN -> O
LMN -> O
MNO -> P
QRSTU -> V
JKL -> M
WX -> Y
IJKLM -> N
NOP -> Q
LMNOP -> Q
IJKLM -> N
KLMN -> O
TUV -> W
EF -> G
CDE -> F
ABCDE -> F
AB -> C
ABCDE -> F
OPQRS -> T
FGHI -> J
PQRST -> U
FGH -> I
ST -> U
ST -> U
AB -> C
R -> S
K -> L
IJKLM -> N
BCDEF -> G
VWX -> Y
BCDE -> F
FGHI -> J
N -> O
D -> E
X -> Y
UVW -> X
TUVW -> X
FGHI -> J
OPQ -> R
H -> I
XY -> Z
L -> M
CDE -> F
MNO -> P
EFGH -> I
D -> E
PQ -> R
KLMN -> O
KLM -> N
QR -> S
J -> K
U -> V
WX -> Y
AB -> C
ABC -> D
XY -> Z
VWX -> Y
Q -> R
MNO -> P
V -> W
ST -> U
OPQRS -> T
MNO -> P
MNO -> P
CD -> E
P -> Q
JK -> L
OP -> Q
NOPQR -> S
LMNOP -> Q
EFG -> H
UVWXY -> Z
U -> V
KLMN -> O
BCDE -> F
F -> G
QRST -> U
TUVW -> X
GHI -> J
TU -> V
AB -> C
STUVW -> X
VW -> X
GH -> I
HI -> J
G -> H
KLMNO -> P
RS -> T
VWX -> Y
GHI -> J
I -> J
OP -> Q
EFGHI -> J
VW -> X
M -> N
TUV -> W
Q -> R
WX -> Y
FGHI -> J
JKLM -> N
B -> C
EFGH -> I
P -> Q
S -> T
BC -> D
OPQ -> R
X -> Y
QRST -> U
IJ -> K
A -> B
OPQRS -> T
O -> P
RSTUV -> W
JKL -> M
MN -> O
KL -> M
STU -> V
ABCD -> E
OPQR -> S
QRSTU -> V
N -> O
NOP -> Q
FGHIJ -> K
NOP -> Q
VWX -> Y
TUVWX -> Y
ABC -> D
IJ -> K
GHI -> J
STUV -> W
F -> G
JKLMN -> O
JK -> L
LMNOP -> Q
X -> Y
RS -> T
AB -> C
PQR -> S
LMNO -> P
G -> H
GHI -> J
E -> F
Q -> R
CD -> E
TUVWX -> Y
F -> G
QRSTU -> V
P -> Q
CDE -> F
P -> Q
PQRST -> U
P -> Q
QRST -> U
FGHIJ -> K
T -> U
H -> I
GH -> I
HIJKL -> M
STUV -> W
OPQR -> S
I -> J
MNOPQ -> R
I -> J
LMNOP -> Q
DEFGH -> I
JKLMN -> O
VW -> X
FGHIJ -> K
RSTUV -> W
BCD -> E
FGHIJ -> K
STUVW -> X
FGH -> I
F -> G
XY -> Z
CD -> E
EFGHI -> J
EF -> G
TUV -> W
ABC -> D
KLMN -> O
IJK -> L
DEFGH -> I
NOPQ -> R
OP -> Q
DEFG -> H
LMN -> O
KLMNO -> P
BCDE -> F
ABCD -> E
UVWX -> Y
STUVW -> X
STU -> V
MNOPQ -> R
JK -> L
WX -> Y
IJKLM -> N
S -> T
EFGH -> I
VW -> X
VW -> X
UVWXY -> Z
JKLMN -> O
H -> I
UVW -> X
TUVWX -> Y
STU -> V
OPQR -> S
IJKLM -> N
WXY -> Z
C -> D
MN -> O
GHI -> J
TU -> V
ABC -> D
O -> P
D -> E
KLMN -> O
EFG -> H
CD -> E
STU -> V
VWXY -> Z
FGHI -> J
EF -> G

In [55]:
from keras.preprocessing.sequence import pad_sequences
X = pad_sequences(dataX, max_len)
X.shape
X[:3]


Out[55]:
(1000, 5)
Out[55]:
array([[ 0,  0,  2,  3,  4],
       [ 0,  0,  0,  0,  6],
       [ 0,  0, 21, 22, 23]], dtype=int32)

In [56]:
# reshape X to be [samples, time steps, features]
X = numpy.reshape(X, (X.shape[0], max_len, 1))
# normalize
X = X / float(len(alphabet))
# one hot encode the output variable
y = np_utils.to_categorical(dataY)

In [57]:
# create and fit the model
batch_size = 1
model = Sequential()
model.add(LSTM(32, input_shape=(X.shape[1], 1)))
model.add(Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

In [59]:
model.fit(X, y, nb_epoch=50, batch_size=batch_size, verbose=2)


Epoch 1/50
6s - loss: 0.8105 - acc: 0.7280
Epoch 2/50
6s - loss: 0.7723 - acc: 0.7390
Epoch 3/50
6s - loss: 0.7692 - acc: 0.7560
Epoch 4/50
6s - loss: 0.7571 - acc: 0.7680
Epoch 5/50
6s - loss: 0.7461 - acc: 0.7610
Epoch 6/50
6s - loss: 0.7195 - acc: 0.7710
Epoch 7/50
6s - loss: 0.7321 - acc: 0.7670
Epoch 8/50
6s - loss: 0.6983 - acc: 0.7780
Epoch 9/50
6s - loss: 0.6872 - acc: 0.7740
Epoch 10/50
6s - loss: 0.7077 - acc: 0.7600
Epoch 11/50
6s - loss: 0.6713 - acc: 0.7850
Epoch 12/50
7s - loss: 0.6815 - acc: 0.7650
Epoch 13/50
6s - loss: 0.6459 - acc: 0.7910
Epoch 14/50
6s - loss: 0.6518 - acc: 0.7730
Epoch 15/50
6s - loss: 0.6791 - acc: 0.7730
Epoch 16/50
6s - loss: 0.6044 - acc: 0.8110
Epoch 17/50
6s - loss: 0.6428 - acc: 0.7990
Epoch 18/50
6s - loss: 0.6214 - acc: 0.8020
Epoch 19/50
6s - loss: 0.6191 - acc: 0.7970
Epoch 20/50
6s - loss: 0.6133 - acc: 0.7920
Epoch 21/50
6s - loss: 0.5888 - acc: 0.8130
Epoch 22/50
7s - loss: 0.6025 - acc: 0.8050
Epoch 23/50
6s - loss: 0.5919 - acc: 0.7990
Epoch 24/50
6s - loss: 0.5849 - acc: 0.7970
Epoch 25/50
6s - loss: 0.5699 - acc: 0.8240
Epoch 26/50
6s - loss: 0.5561 - acc: 0.8190
Epoch 27/50
6s - loss: 0.5703 - acc: 0.8090
Epoch 28/50
6s - loss: 0.6058 - acc: 0.8050
Epoch 29/50
6s - loss: 0.5318 - acc: 0.8360
Epoch 30/50
6s - loss: 0.5292 - acc: 0.8210
Epoch 31/50
7s - loss: 0.5469 - acc: 0.8070
Epoch 32/50
6s - loss: 0.5193 - acc: 0.8330
Epoch 33/50
6s - loss: 0.5507 - acc: 0.8140
Epoch 34/50
6s - loss: 0.5262 - acc: 0.8300
Epoch 35/50
6s - loss: 0.5302 - acc: 0.8240
Epoch 36/50
6s - loss: 0.5027 - acc: 0.8280
Epoch 37/50
6s - loss: 0.4922 - acc: 0.8330
Epoch 38/50
6s - loss: 0.5253 - acc: 0.8140
Epoch 39/50
6s - loss: 0.5092 - acc: 0.8280
Epoch 40/50
6s - loss: 0.5235 - acc: 0.8260
Epoch 41/50
6s - loss: 0.4802 - acc: 0.8520
Epoch 42/50
6s - loss: 0.5068 - acc: 0.8280
Epoch 43/50
6s - loss: 0.4766 - acc: 0.8400
Epoch 44/50
6s - loss: 0.4865 - acc: 0.8380
Epoch 45/50
6s - loss: 0.4781 - acc: 0.8270
Epoch 46/50
6s - loss: 0.4627 - acc: 0.8460
Epoch 47/50
6s - loss: 0.5142 - acc: 0.8280
Epoch 48/50
6s - loss: 0.4418 - acc: 0.8560
Epoch 49/50
6s - loss: 0.4817 - acc: 0.8360
Epoch 50/50
6s - loss: 0.5089 - acc: 0.8250
Out[59]:
<keras.callbacks.History at 0x7f7afecd6978>

In [60]:
# summarize performance of the model
scores = model.evaluate(X, y, verbose=0)
print("Model Accuracy: %.2f%%" % (scores[1]*100))


Model Accuracy: 79.10%

In [61]:
# demonstrate some model predictions
for i in range(20):
    pattern_index = numpy.random.randint(len(dataX))
    pattern = dataX[pattern_index]
    x = pad_sequences([pattern], maxlen=max_len)
    x = numpy.reshape(x, (1, max_len, 1))
    x = x / float(len(alphabet))
    prediction = model.predict(x, verbose=0)
    index = numpy.argmax(prediction)
    result = int_to_char[index]
    seq_in = [int_to_char[value] for value in pattern]
    print(seq_in, "->", result)


['K', 'L', 'M', 'N'] -> O
['G', 'H', 'I'] -> J
['I', 'J', 'K', 'L', 'M'] -> N
['U'] -> U
['E', 'F'] -> G
['W', 'X'] -> Y
['H', 'I', 'J', 'K', 'L'] -> M
['K', 'L', 'M', 'N'] -> O
['S'] -> U
['B', 'C'] -> D
['I', 'J'] -> K
['I', 'J', 'K', 'L', 'M'] -> N
['N'] -> O
['G'] -> G
['I', 'J', 'K', 'L', 'M'] -> N
['N', 'O', 'P'] -> Q
['K'] -> K
['S', 'T', 'U', 'V', 'W'] -> X
['Q', 'R', 'S', 'T'] -> U
['Q', 'R', 'S', 'T', 'U'] -> W

We can see that although the model did not learn the alphabet perfectly from the randomly generated subsequences, it did very well. The model was not tuned and may require more training or a larger network, or both (an exercise for the reader).

This is a good natural extension to the “all sequential input examples in each batch” alphabet model learned above in that it can handle ad hoc queries, but this time of arbitrary sequence length (up to the max length).


In [ ]: