Created a new notebook

by Sergii Surnin

(C) 2017


In [3]:
from scipy import misc
f = misc.face()
misc.imsave('face.png', f)

import matplotlib.pyplot as plt
plt.imshow(f)
plt.show()



In [4]:
import sys

sys.executable


Out[4]:
'C:\\Program Files\\Anaconda3\\python.exe'

In [5]:
from scipy import misc
import numpy as np
face = misc.imread('face.png')
misc.imsave('face.png', face) # First we need to create the PNG file

face = misc.imread('face.png')
type(face) 
#face.shape, face.dtype

face.tofile('face.raw') # Create raw file
face_from_raw = np.fromfile('face.raw', dtype=np.uint8)
face_from_raw.shape

face_from_raw.shape = (768, 1024, 3)

#face_memmap = np.memmap('face.raw', dtype=np.uint8, shape=(768, 1024, 3))

for i in range(10):
    im = np.random.randint(0, 255, 10000).reshape((100, 100))
    misc.imsave('random_%02d.png' % i, im)
from glob import glob
filelist = glob('random*.png')
filelist.sort()

In [6]:
from scipy import misc
import numpy as np
f = misc.face(gray=True)  # retrieve a grayscale image
import matplotlib.pyplot as plt
plt.imshow(f, cmap=plt.cm.gray)
plt.show()

plt.imshow(f, cmap=plt.cm.gray, vmin=30, vmax=200)
# Remove axes and ticks
plt.axis('off')
plt.show()

plt.contour(f, [50, 200])
plt.show()



In [7]:
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline

image = Image.open('face.png')
data = np.asarray(
    image.resize((300, 300), Image.NEAREST), 
    dtype='uint8'
)

data = np.stack([data, data, data], axis=2)
data = data/255.0

data.max()
#plt.imshow(data); # doesn't work - throws the exception
#plt.axis('off');


Out[7]:
1.0

In [8]:
import scipy.misc
import matplotlib.pyplot as plt

f = scipy.misc.face(gray=True)

plt.figure(figsize=(10, 3.6))

plt.subplot(131)
plt.imshow(f, cmap=plt.cm.gray)

plt.subplot(132)
plt.imshow(f, cmap=plt.cm.gray, vmin=30, vmax=200)
plt.axis('off')

plt.subplot(133)
plt.imshow(f, cmap=plt.cm.gray)
plt.contour(f, [50, 200])
plt.axis('off')

plt.subplots_adjust(wspace=0, hspace=0., top=0.99, bottom=0.01, left=0.05,
                    right=0.99)
plt.show()



In [9]:
import scipy.misc
import matplotlib.pyplot as plt

f = scipy.misc.face(gray=True)

plt.figure(figsize=(8, 4))

plt.subplot(1, 2, 1)
plt.imshow(f[320:340, 510:530], cmap=plt.cm.gray)
plt.axis('off')

plt.subplot(1, 2, 1)
plt.imshow(f[320:340, 510:530], cmap=plt.cm.gray, interpolation='nearest')
plt.axis('off')

plt.subplot(1, 2, 2)
plt.imshow(f[320:340, 510:530], cmap=plt.cm.gray, interpolation='lanczos')
plt.axis('off')

plt.subplots_adjust(wspace=0.02, hspace=0.02, top=1, bottom=0, left=0, right=1)
plt.show()



In [10]:
import numpy as np
import scipy
import scipy.misc
import matplotlib.pyplot as plt

face = scipy.misc.face(gray=True)
face[10:13, 20:23]
face[100:120] = 255

lx, ly = face.shape
X, Y = np.ogrid[0:lx, 0:ly]
mask = (X - lx/2)**2 + (Y - ly/2)**2 > lx*ly/4
face[mask] = 0
face[range(400), range(400)] = 255

plt.figure(figsize=(3, 3))
plt.axes([0, 0, 1, 1])
plt.imshow(face, cmap=plt.cm.gray)
plt.axis('off')

plt.show()



In [12]:
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img

datagen = ImageDataGenerator(
        rotation_range=40,
        width_shift_range=0.2,
        height_shift_range=0.2,
        shear_range=0.2,
        zoom_range=0.2,
        horizontal_flip=True,
        fill_mode='nearest')

img = load_img('../train/001.ak47/001_0001.jpg')  # this is a PIL image
x = img_to_array(img)  # this is a Numpy array with shape (3, 150, 150)
x = x.reshape((1,) + x.shape)  # this is a Numpy array with shape (1, 3, 150, 150)

# the .flow() command below generates batches of randomly transformed images
# and saves the results to the `preview/` directory
i = 0
for batch in datagen.flow(x, batch_size=1, save_to_dir='preview', save_prefix='ak-47', save_format='jpeg'):
    i += 1
    if i > 20:
        break  # otherwise the generator would loop indefinitely


Using TensorFlow backend.

In [2]:
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense, Input
from keras import backend as K

# dimensions of our images.
img_width, img_height = 64, 64

train_data_dir = '../train_small'
validation_data_dir = '../validation_small'
nb_train_samples = 372
nb_validation_samples = 99
epochs = 512
batch_size = 16
rescale_koef = 1. / 255

'''
if K.image_data_format() == 'channels_first':   # actually, 'channels_last'
    input_shape = (3, img_width, img_height)
else:
    input_shape = (img_width, img_height, 3)
'''
print(K.image_data_format()) #channels_last, so:
current_input_shape = (img_width, img_height, 3)  #channels_last
    
print(current_input_shape)


model = Sequential()

model.add(Flatten(input_shape=current_input_shape) )
model.add(Dense(60, kernel_initializer='he_uniform', activation='relu'))
model.add(Dense(5, activation='softmax'))

'''
model.add(Conv2D(32, (3, 3), input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))

model.add(Dropout(0.5))

model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))

model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))

model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))

model.add(Dense(1))
model.add(Activation('sigmoid'))
'''

model.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])

# this is the augmentation configuration we will use for training
train_datagen = ImageDataGenerator(
    rescale=rescale_koef,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True)

# this is the augmentation configuration we will use for testing:
# only rescaling
test_datagen = ImageDataGenerator(rescale=rescale_koef)

train_generator = train_datagen.flow_from_directory(
    train_data_dir,
    target_size=(img_width, img_height),
    batch_size=batch_size,
    class_mode='categorical')

validation_generator = test_datagen.flow_from_directory(
    validation_data_dir,
    target_size=(img_width, img_height),
    batch_size=batch_size,
    class_mode='categorical')

model.fit_generator(
    train_generator,
    steps_per_epoch=nb_train_samples // batch_size,
    epochs=epochs,
    validation_data=validation_generator,
    validation_steps=nb_validation_samples // batch_size)

model.save_weights('first_try.h5')

model.count_params()

model.summary()


channels_last
(64, 64, 3)
Found 372 images belonging to 5 classes.
Found 99 images belonging to 5 classes.
Epoch 1/512
23/23 [==============================] - 4s - loss: 10.9747 - acc: 0.2174 - val_loss: 11.0038 - val_acc: 0.1979
Epoch 2/512
23/23 [==============================] - 1s - loss: 12.7496 - acc: 0.1769 - val_loss: 9.5643 - val_acc: 0.2292
Epoch 3/512
23/23 [==============================] - 1s - loss: 9.5748 - acc: 0.3146 - val_loss: 7.3328 - val_acc: 0.4819
Epoch 4/512
23/23 [==============================] - 1s - loss: 9.8636 - acc: 0.3177 - val_loss: 7.9151 - val_acc: 0.4217
Epoch 5/512
23/23 [==============================] - 1s - loss: 9.7985 - acc: 0.2718 - val_loss: 7.6155 - val_acc: 0.4578
Epoch 6/512
23/23 [==============================] - 1s - loss: 9.1757 - acc: 0.3228 - val_loss: 8.7606 - val_acc: 0.3012
Epoch 7/512
23/23 [==============================] - 1s - loss: 8.5164 - acc: 0.3561 - val_loss: 6.8790 - val_acc: 0.4578
Epoch 8/512
23/23 [==============================] - 1s - loss: 8.7084 - acc: 0.2990 - val_loss: 6.5579 - val_acc: 0.4940
Epoch 9/512
23/23 [==============================] - 1s - loss: 8.3631 - acc: 0.3367 - val_loss: 7.3831 - val_acc: 0.3614
Epoch 10/512
23/23 [==============================] - 1s - loss: 8.4977 - acc: 0.3262 - val_loss: 7.6131 - val_acc: 0.3735
Epoch 11/512
23/23 [==============================] - 1s - loss: 8.6617 - acc: 0.3507 - val_loss: 7.6941 - val_acc: 0.4699
Epoch 12/512
23/23 [==============================] - 1s - loss: 8.5223 - acc: 0.3017 - val_loss: 6.7860 - val_acc: 0.4819
Epoch 13/512
23/23 [==============================] - 1s - loss: 8.2910 - acc: 0.3316 - val_loss: 7.3665 - val_acc: 0.4096
Epoch 14/512
23/23 [==============================] - 1s - loss: 8.7810 - acc: 0.3102 - val_loss: 6.6461 - val_acc: 0.4578
Epoch 15/512
23/23 [==============================] - 1s - loss: 8.0647 - acc: 0.3558 - val_loss: 6.7200 - val_acc: 0.4337
Epoch 16/512
23/23 [==============================] - 1s - loss: 8.0866 - acc: 0.3449 - val_loss: 7.9986 - val_acc: 0.2892
Epoch 17/512
23/23 [==============================] - 1s - loss: 7.4197 - acc: 0.4051 - val_loss: 6.6172 - val_acc: 0.4578
Epoch 18/512
23/23 [==============================] - 1s - loss: 8.6564 - acc: 0.2990 - val_loss: 6.7880 - val_acc: 0.3735
Epoch 19/512
23/23 [==============================] - 1s - loss: 8.0846 - acc: 0.3507 - val_loss: 6.8330 - val_acc: 0.4819
Epoch 20/512
23/23 [==============================] - 1s - loss: 7.4507 - acc: 0.3697 - val_loss: 6.6400 - val_acc: 0.4578
Epoch 21/512
23/23 [==============================] - 1s - loss: 8.3852 - acc: 0.2799 - val_loss: 7.0306 - val_acc: 0.4940
Epoch 22/512
23/23 [==============================] - 1s - loss: 7.7282 - acc: 0.3157 - val_loss: 7.1360 - val_acc: 0.4940
Epoch 23/512
23/23 [==============================] - 1s - loss: 5.7888 - acc: 0.3725 - val_loss: 3.0646 - val_acc: 0.3133
Epoch 24/512
23/23 [==============================] - 1s - loss: 2.6377 - acc: 0.3690 - val_loss: 1.6928 - val_acc: 0.5181
Epoch 25/512
23/23 [==============================] - 1s - loss: 1.5799 - acc: 0.3859 - val_loss: 1.2895 - val_acc: 0.4337 - ETA: 0s - loss: 1.8199 - acc: 0.3542
Epoch 26/512
23/23 [==============================] - 1s - loss: 1.4084 - acc: 0.3507 - val_loss: 1.1916 - val_acc: 0.5181
Epoch 27/512
23/23 [==============================] - 1s - loss: 1.4425 - acc: 0.3153 - val_loss: 1.3375 - val_acc: 0.3855
Epoch 28/512
23/23 [==============================] - 1s - loss: 1.3562 - acc: 0.3667 - val_loss: 1.2676 - val_acc: 0.3614
Epoch 29/512
23/23 [==============================] - 1s - loss: 1.3965 - acc: 0.3402 - val_loss: 1.4347 - val_acc: 0.2169
Epoch 30/512
23/23 [==============================] - 1s - loss: 1.4415 - acc: 0.3810 - val_loss: 1.2448 - val_acc: 0.3735
Epoch 31/512
23/23 [==============================] - 1s - loss: 1.3227 - acc: 0.4456 - val_loss: 1.1617 - val_acc: 0.3614
Epoch 32/512
23/23 [==============================] - 1s - loss: 1.3112 - acc: 0.4129 - val_loss: 1.1815 - val_acc: 0.4217
Epoch 33/512
23/23 [==============================] - 1s - loss: 1.2722 - acc: 0.4425 - val_loss: 1.2954 - val_acc: 0.3855
Epoch 34/512
23/23 [==============================] - 1s - loss: 1.3502 - acc: 0.4235 - val_loss: 1.2683 - val_acc: 0.3735
Epoch 35/512
23/23 [==============================] - 1s - loss: 1.2657 - acc: 0.4055 - val_loss: 1.2681 - val_acc: 0.3976
Epoch 36/512
23/23 [==============================] - 1s - loss: 1.3229 - acc: 0.3810 - val_loss: 1.1879 - val_acc: 0.4217
Epoch 37/512
23/23 [==============================] - 1s - loss: 1.3172 - acc: 0.4429 - val_loss: 1.2306 - val_acc: 0.4337
Epoch 38/512
23/23 [==============================] - 1s - loss: 1.2741 - acc: 0.4126 - val_loss: 1.3078 - val_acc: 0.2892
Epoch 39/512
23/23 [==============================] - 1s - loss: 1.3622 - acc: 0.3674 - val_loss: 1.2466 - val_acc: 0.3976
Epoch 40/512
23/23 [==============================] - 1s - loss: 1.2084 - acc: 0.4320 - val_loss: 1.1535 - val_acc: 0.4217 - ETA: 0s - loss: 1.1638 - acc: 0.4549
Epoch 41/512
23/23 [==============================] - 1s - loss: 1.1890 - acc: 0.4401 - val_loss: 1.1524 - val_acc: 0.4337
Epoch 42/512
23/23 [==============================] - 1s - loss: 1.2964 - acc: 0.4245 - val_loss: 1.1964 - val_acc: 0.5181
Epoch 43/512
23/23 [==============================] - 1s - loss: 1.1269 - acc: 0.5517 - val_loss: 1.1162 - val_acc: 0.5542
Epoch 44/512
23/23 [==============================] - 1s - loss: 1.2470 - acc: 0.4973 - val_loss: 1.1619 - val_acc: 0.5422
Epoch 45/512
23/23 [==============================] - 1s - loss: 1.2852 - acc: 0.4677 - val_loss: 1.1265 - val_acc: 0.6024
Epoch 46/512
23/23 [==============================] - 1s - loss: 1.1468 - acc: 0.5490 - val_loss: 1.1658 - val_acc: 0.5301
Epoch 47/512
23/23 [==============================] - 1s - loss: 1.2074 - acc: 0.4898 - val_loss: 1.2316 - val_acc: 0.5060
Epoch 48/512
23/23 [==============================] - 1s - loss: 1.2231 - acc: 0.4976 - val_loss: 1.1820 - val_acc: 0.4819
Epoch 49/512
23/23 [==============================] - 1s - loss: 1.1538 - acc: 0.5109 - val_loss: 1.0814 - val_acc: 0.5663
Epoch 50/512
23/23 [==============================] - 1s - loss: 1.1370 - acc: 0.5381 - val_loss: 1.1382 - val_acc: 0.5663
Epoch 51/512
23/23 [==============================] - 1s - loss: 1.1885 - acc: 0.5248 - val_loss: 1.1626 - val_acc: 0.5060
Epoch 52/512
23/23 [==============================] - 1s - loss: 1.0738 - acc: 0.5514 - val_loss: 1.0798 - val_acc: 0.5422
Epoch 53/512
23/23 [==============================] - 1s - loss: 1.2062 - acc: 0.5486 - val_loss: 1.2806 - val_acc: 0.5422
Epoch 54/512
23/23 [==============================] - 1s - loss: 1.1276 - acc: 0.5350 - val_loss: 1.2267 - val_acc: 0.4217
Epoch 55/512
23/23 [==============================] - 1s - loss: 1.1938 - acc: 0.5187 - val_loss: 1.1500 - val_acc: 0.5301
Epoch 56/512
23/23 [==============================] - 1s - loss: 1.1772 - acc: 0.5112 - val_loss: 1.1566 - val_acc: 0.5060
Epoch 57/512
23/23 [==============================] - 1s - loss: 1.1057 - acc: 0.5405 - val_loss: 2.3479 - val_acc: 0.4819
Epoch 58/512
23/23 [==============================] - 1s - loss: 1.2563 - acc: 0.5136 - val_loss: 1.1058 - val_acc: 0.6145
Epoch 59/512
23/23 [==============================] - 1s - loss: 1.1321 - acc: 0.5463 - val_loss: 1.1970 - val_acc: 0.5181
Epoch 60/512
23/23 [==============================] - 1s - loss: 1.1540 - acc: 0.5405 - val_loss: 1.2151 - val_acc: 0.5181
Epoch 61/512
23/23 [==============================] - 1s - loss: 1.0775 - acc: 0.5622 - val_loss: 1.1483 - val_acc: 0.5783
Epoch 62/512
23/23 [==============================] - 1s - loss: 1.0624 - acc: 0.5622 - val_loss: 0.9730 - val_acc: 0.5783
Epoch 63/512
23/23 [==============================] - 1s - loss: 1.1849 - acc: 0.5412 - val_loss: 1.0594 - val_acc: 0.5904
Epoch 64/512
23/23 [==============================] - 1s - loss: 1.1259 - acc: 0.5245 - val_loss: 1.2127 - val_acc: 0.4940
Epoch 65/512
23/23 [==============================] - 1s - loss: 1.1419 - acc: 0.5303 - val_loss: 1.1340 - val_acc: 0.5301
Epoch 66/512
23/23 [==============================] - 1s - loss: 1.0717 - acc: 0.5680 - val_loss: 1.0767 - val_acc: 0.5663
Epoch 67/512
23/23 [==============================] - 1s - loss: 1.1245 - acc: 0.5350 - val_loss: 0.9499 - val_acc: 0.5904
Epoch 68/512
23/23 [==============================] - 1s - loss: 1.1563 - acc: 0.5187 - val_loss: 1.0609 - val_acc: 0.5422
Epoch 69/512
23/23 [==============================] - 1s - loss: 1.1453 - acc: 0.5408 - val_loss: 1.0814 - val_acc: 0.5663
Epoch 70/512
23/23 [==============================] - 1s - loss: 1.1071 - acc: 0.5544 - val_loss: 1.0817 - val_acc: 0.5663
Epoch 71/512
23/23 [==============================] - 1s - loss: 1.1016 - acc: 0.5541 - val_loss: 1.0853 - val_acc: 0.5301
Epoch 72/512
23/23 [==============================] - 1s - loss: 1.0863 - acc: 0.5521 - val_loss: 1.0876 - val_acc: 0.5422
Epoch 73/512
23/23 [==============================] - 1s - loss: 1.3073 - acc: 0.5299 - val_loss: 1.2043 - val_acc: 0.5663
Epoch 74/512
23/23 [==============================] - 1s - loss: 1.0608 - acc: 0.5708 - val_loss: 1.2049 - val_acc: 0.5181
Epoch 75/512
23/23 [==============================] - 1s - loss: 1.1022 - acc: 0.5354 - val_loss: 1.0656 - val_acc: 0.5542
Epoch 76/512
23/23 [==============================] - 1s - loss: 1.0638 - acc: 0.5571 - val_loss: 1.0500 - val_acc: 0.6024
Epoch 77/512
23/23 [==============================] - 1s - loss: 1.1392 - acc: 0.5568 - val_loss: 1.2480 - val_acc: 0.5181
Epoch 78/512
23/23 [==============================] - 1s - loss: 1.0537 - acc: 0.5973 - val_loss: 1.0695 - val_acc: 0.5542
Epoch 79/512
23/23 [==============================] - 1s - loss: 1.0571 - acc: 0.5680 - val_loss: 1.0271 - val_acc: 0.5904
Epoch 80/512
23/23 [==============================] - 1s - loss: 1.0724 - acc: 0.5786 - val_loss: 1.1202 - val_acc: 0.5542
Epoch 81/512
23/23 [==============================] - 1s - loss: 1.0904 - acc: 0.5167 - val_loss: 1.1140 - val_acc: 0.5783
Epoch 82/512
23/23 [==============================] - 1s - loss: 1.0639 - acc: 0.5840 - val_loss: 1.0375 - val_acc: 0.5904
Epoch 83/512
23/23 [==============================] - 1s - loss: 1.0201 - acc: 0.5840 - val_loss: 1.1015 - val_acc: 0.5542
Epoch 84/512
23/23 [==============================] - 1s - loss: 1.0591 - acc: 0.5735 - val_loss: 1.1101 - val_acc: 0.5181
Epoch 85/512
23/23 [==============================] - 1s - loss: 1.1067 - acc: 0.5299 - val_loss: 1.0812 - val_acc: 0.5301
Epoch 86/512
23/23 [==============================] - 1s - loss: 1.1652 - acc: 0.5470 - val_loss: 1.0371 - val_acc: 0.5904
Epoch 87/512
23/23 [==============================] - 1s - loss: 1.0326 - acc: 0.5844 - val_loss: 1.3086 - val_acc: 0.4940
Epoch 88/512
23/23 [==============================] - 1s - loss: 1.0908 - acc: 0.5711 - val_loss: 1.1123 - val_acc: 0.5783
Epoch 89/512
23/23 [==============================] - 1s - loss: 1.0865 - acc: 0.5789 - val_loss: 1.0467 - val_acc: 0.5663 - ETA: 0s - loss: 1.0925 - acc: 0.5735
Epoch 90/512
23/23 [==============================] - 1s - loss: 1.0561 - acc: 0.5708 - val_loss: 1.1680 - val_acc: 0.5181
Epoch 91/512
23/23 [==============================] - 1s - loss: 1.1042 - acc: 0.5384 - val_loss: 1.0475 - val_acc: 0.5663
Epoch 92/512
23/23 [==============================] - 1s - loss: 1.0788 - acc: 0.5976 - val_loss: 1.0822 - val_acc: 0.5783
Epoch 93/512
23/23 [==============================] - 1s - loss: 1.0465 - acc: 0.5541 - val_loss: 0.9961 - val_acc: 0.5663
Epoch 94/512
23/23 [==============================] - 1s - loss: 1.0684 - acc: 0.5820 - val_loss: 1.0990 - val_acc: 0.5301
Epoch 95/512
23/23 [==============================] - 1s - loss: 1.0875 - acc: 0.5381 - val_loss: 1.2104 - val_acc: 0.5301 - ETA: 0s - loss: 1.0926 - acc: 0.5125
Epoch 96/512
23/23 [==============================] - 1s - loss: 1.0225 - acc: 0.6245 - val_loss: 1.1739 - val_acc: 0.5301
Epoch 97/512
23/23 [==============================] - 1s - loss: 1.0178 - acc: 0.5897 - val_loss: 1.0424 - val_acc: 0.5301
Epoch 98/512
23/23 [==============================] - 1s - loss: 1.0813 - acc: 0.5629 - val_loss: 1.0744 - val_acc: 0.5904
Epoch 99/512
23/23 [==============================] - 1s - loss: 1.0203 - acc: 0.5922 - val_loss: 1.1539 - val_acc: 0.5422
Epoch 100/512
23/23 [==============================] - 1s - loss: 1.0791 - acc: 0.5629 - val_loss: 1.1167 - val_acc: 0.5542
Epoch 101/512
23/23 [==============================] - 1s - loss: 1.1190 - acc: 0.5299 - val_loss: 1.0665 - val_acc: 0.5422
Epoch 102/512
23/23 [==============================] - 1s - loss: 1.2724 - acc: 0.5595 - val_loss: 1.1385 - val_acc: 0.5783
Epoch 103/512
23/23 [==============================] - 1s - loss: 1.0527 - acc: 0.5952 - val_loss: 1.2306 - val_acc: 0.5422
Epoch 104/512
23/23 [==============================] - 1s - loss: 1.0143 - acc: 0.5945 - val_loss: 1.2970 - val_acc: 0.5542
Epoch 105/512
23/23 [==============================] - 1s - loss: 1.0834 - acc: 0.5551 - val_loss: 1.0495 - val_acc: 0.5422
Epoch 106/512
23/23 [==============================] - 1s - loss: 1.0483 - acc: 0.5517 - val_loss: 1.0730 - val_acc: 0.5904
Epoch 107/512
23/23 [==============================] - 1s - loss: 0.9926 - acc: 0.5980 - val_loss: 1.0659 - val_acc: 0.6024
Epoch 108/512
23/23 [==============================] - 1s - loss: 1.0477 - acc: 0.5708 - val_loss: 1.0756 - val_acc: 0.5542
Epoch 109/512
23/23 [==============================] - 1s - loss: 1.0181 - acc: 0.6031 - val_loss: 1.1320 - val_acc: 0.5422
Epoch 110/512
23/23 [==============================] - 1s - loss: 1.0145 - acc: 0.5789 - val_loss: 1.2977 - val_acc: 0.5181
Epoch 111/512
23/23 [==============================] - 1s - loss: 0.9940 - acc: 0.5976 - val_loss: 1.1122 - val_acc: 0.5783
Epoch 112/512
23/23 [==============================] - 1s - loss: 1.1156 - acc: 0.5629 - val_loss: 1.0911 - val_acc: 0.5060
Epoch 113/512
23/23 [==============================] - 1s - loss: 0.9879 - acc: 0.5871 - val_loss: 1.0167 - val_acc: 0.5422
Epoch 114/512
23/23 [==============================] - 1s - loss: 1.0269 - acc: 0.5653 - val_loss: 1.1454 - val_acc: 0.5663
Epoch 115/512
23/23 [==============================] - 1s - loss: 1.0249 - acc: 0.5731 - val_loss: 1.0127 - val_acc: 0.5904
Epoch 116/512
23/23 [==============================] - 1s - loss: 0.9994 - acc: 0.6065 - val_loss: 1.1054 - val_acc: 0.5542
Epoch 117/512
23/23 [==============================] - 1s - loss: 0.9628 - acc: 0.6167 - val_loss: 1.0750 - val_acc: 0.5542
Epoch 118/512
23/23 [==============================] - 1s - loss: 1.0530 - acc: 0.5738 - val_loss: 1.2104 - val_acc: 0.4940
Epoch 119/512
23/23 [==============================] - 1s - loss: 1.0638 - acc: 0.5544 - val_loss: 1.0991 - val_acc: 0.5301
Epoch 120/512
23/23 [==============================] - 1s - loss: 1.0254 - acc: 0.5735 - val_loss: 1.2782 - val_acc: 0.4096
Epoch 121/512
23/23 [==============================] - 1s - loss: 1.0250 - acc: 0.5978 - val_loss: 1.2215 - val_acc: 0.5542
Epoch 122/512
23/23 [==============================] - 1s - loss: 1.0066 - acc: 0.5867 - val_loss: 1.1147 - val_acc: 0.5181
Epoch 123/512
23/23 [==============================] - 1s - loss: 0.9621 - acc: 0.5922 - val_loss: 1.2423 - val_acc: 0.4940
Epoch 124/512
23/23 [==============================] - 1s - loss: 1.0038 - acc: 0.5976 - val_loss: 1.1285 - val_acc: 0.5663
Epoch 125/512
23/23 [==============================] - 1s - loss: 1.0152 - acc: 0.6054 - val_loss: 1.1086 - val_acc: 0.5783
Epoch 126/512
23/23 [==============================] - 1s - loss: 1.0270 - acc: 0.5622 - val_loss: 1.0431 - val_acc: 0.5783
Epoch 127/512
23/23 [==============================] - 1s - loss: 0.9896 - acc: 0.5793 - val_loss: 1.3704 - val_acc: 0.4699
Epoch 128/512
23/23 [==============================] - 1s - loss: 1.0326 - acc: 0.5684 - val_loss: 1.1797 - val_acc: 0.4940
Epoch 129/512
23/23 [==============================] - 1s - loss: 1.0390 - acc: 0.5680 - val_loss: 1.3155 - val_acc: 0.5301
Epoch 130/512
23/23 [==============================] - 1s - loss: 0.9336 - acc: 0.6088 - val_loss: 1.1739 - val_acc: 0.5181
Epoch 131/512
23/23 [==============================] - 1s - loss: 1.0371 - acc: 0.5867 - val_loss: 1.1862 - val_acc: 0.4819
Epoch 132/512
23/23 [==============================] - 1s - loss: 0.9026 - acc: 0.6218 - val_loss: 1.1685 - val_acc: 0.5301
Epoch 133/512
23/23 [==============================] - 1s - loss: 1.0252 - acc: 0.5684 - val_loss: 1.0203 - val_acc: 0.5422 - ETA: 0s - loss: 1.0596 - acc: 0.5583
Epoch 134/512
23/23 [==============================] - 1s - loss: 1.0277 - acc: 0.5789 - val_loss: 1.0680 - val_acc: 0.5542
Epoch 135/512
23/23 [==============================] - 1s - loss: 1.1058 - acc: 0.5925 - val_loss: 1.3687 - val_acc: 0.5542
Epoch 136/512
23/23 [==============================] - 1s - loss: 0.9656 - acc: 0.6007 - val_loss: 1.0895 - val_acc: 0.5422
Epoch 137/512
23/23 [==============================] - 1s - loss: 1.0436 - acc: 0.5680 - val_loss: 1.0621 - val_acc: 0.5663
Epoch 138/512
23/23 [==============================] - 1s - loss: 1.0078 - acc: 0.5976 - val_loss: 1.1325 - val_acc: 0.5060
Epoch 139/512
23/23 [==============================] - 1s - loss: 1.0362 - acc: 0.5762 - val_loss: 1.1337 - val_acc: 0.5060
Epoch 140/512
23/23 [==============================] - 1s - loss: 1.0051 - acc: 0.5894 - val_loss: 1.1032 - val_acc: 0.5301
Epoch 141/512
23/23 [==============================] - 1s - loss: 1.0093 - acc: 0.5976 - val_loss: 1.0012 - val_acc: 0.5542
Epoch 142/512
23/23 [==============================] - 1s - loss: 1.0378 - acc: 0.5517 - val_loss: 1.0677 - val_acc: 0.5904
Epoch 143/512
23/23 [==============================] - 1s - loss: 1.0009 - acc: 0.5891 - val_loss: 1.0465 - val_acc: 0.5663
Epoch 144/512
23/23 [==============================] - 1s - loss: 1.1022 - acc: 0.5898 - val_loss: 1.2570 - val_acc: 0.5181
Epoch 145/512
23/23 [==============================] - 1s - loss: 0.9628 - acc: 0.5978 - val_loss: 1.1035 - val_acc: 0.5422
Epoch 146/512
23/23 [==============================] - 1s - loss: 0.9712 - acc: 0.6003 - val_loss: 1.2447 - val_acc: 0.5783
Epoch 147/512
23/23 [==============================] - 1s - loss: 0.9899 - acc: 0.5894 - val_loss: 1.1499 - val_acc: 0.5542
Epoch 148/512
23/23 [==============================] - 1s - loss: 0.9886 - acc: 0.5762 - val_loss: 1.1218 - val_acc: 0.5663
Epoch 149/512
23/23 [==============================] - 1s - loss: 0.9918 - acc: 0.6034 - val_loss: 1.1471 - val_acc: 0.5663
Epoch 150/512
23/23 [==============================] - 1s - loss: 0.9979 - acc: 0.5731 - val_loss: 1.1186 - val_acc: 0.5301
Epoch 151/512
23/23 [==============================] - 1s - loss: 1.0312 - acc: 0.5735 - val_loss: 1.0518 - val_acc: 0.5904
Epoch 152/512
23/23 [==============================] - 1s - loss: 0.9802 - acc: 0.6007 - val_loss: 1.1822 - val_acc: 0.5060
Epoch 153/512
23/23 [==============================] - 1s - loss: 1.0034 - acc: 0.5813 - val_loss: 1.0552 - val_acc: 0.5663
Epoch 154/512
23/23 [==============================] - 1s - loss: 0.9568 - acc: 0.6116 - val_loss: 1.2978 - val_acc: 0.4819
Epoch 155/512
23/23 [==============================] - 1s - loss: 1.0020 - acc: 0.5929 - val_loss: 1.1197 - val_acc: 0.5783
Epoch 156/512
23/23 [==============================] - 1s - loss: 0.9952 - acc: 0.5956 - val_loss: 1.2290 - val_acc: 0.4940
Epoch 157/512
23/23 [==============================] - 1s - loss: 0.9320 - acc: 0.5844 - val_loss: 1.1272 - val_acc: 0.5904
Epoch 158/512
23/23 [==============================] - 1s - loss: 0.9043 - acc: 0.6167 - val_loss: 1.3082 - val_acc: 0.5301 - ETA: 0s - loss: 0.8921 - acc: 0.6116
Epoch 159/512
23/23 [==============================] - 1s - loss: 1.0468 - acc: 0.5738 - val_loss: 1.1108 - val_acc: 0.5663
Epoch 160/512
23/23 [==============================] - 1s - loss: 0.9809 - acc: 0.6007 - val_loss: 1.2354 - val_acc: 0.4819
Epoch 161/512
23/23 [==============================] - 1s - loss: 1.0639 - acc: 0.5871 - val_loss: 1.1460 - val_acc: 0.5181
Epoch 162/512
23/23 [==============================] - 1s - loss: 1.0108 - acc: 0.5874 - val_loss: 1.0478 - val_acc: 0.5663
Epoch 163/512
23/23 [==============================] - 1s - loss: 0.9768 - acc: 0.5871 - val_loss: 1.1588 - val_acc: 0.5181
Epoch 164/512
23/23 [==============================] - 1s - loss: 0.9714 - acc: 0.6061 - val_loss: 1.1536 - val_acc: 0.5663
Epoch 165/512
23/23 [==============================] - 1s - loss: 1.0077 - acc: 0.5874 - val_loss: 1.1441 - val_acc: 0.5181
Epoch 166/512
23/23 [==============================] - 1s - loss: 1.0154 - acc: 0.5867 - val_loss: 1.2368 - val_acc: 0.4819
Epoch 167/512
23/23 [==============================] - 1s - loss: 1.0262 - acc: 0.5575 - val_loss: 1.1349 - val_acc: 0.5542
Epoch 168/512
23/23 [==============================] - 1s - loss: 0.9081 - acc: 0.6163 - val_loss: 1.1651 - val_acc: 0.5181
Epoch 169/512
23/23 [==============================] - 2s - loss: 0.9843 - acc: 0.6005 - val_loss: 2.1813 - val_acc: 0.4217
Epoch 170/512
23/23 [==============================] - 1s - loss: 1.0529 - acc: 0.5653 - val_loss: 1.1363 - val_acc: 0.5301
Epoch 171/512
23/23 [==============================] - 1s - loss: 0.9222 - acc: 0.6085 - val_loss: 1.1184 - val_acc: 0.5904
Epoch 172/512
23/23 [==============================] - 1s - loss: 0.9634 - acc: 0.5758 - val_loss: 1.1988 - val_acc: 0.5904
Epoch 173/512
23/23 [==============================] - 1s - loss: 0.9558 - acc: 0.6194 - val_loss: 1.1385 - val_acc: 0.5422
Epoch 174/512
23/23 [==============================] - 1s - loss: 0.9672 - acc: 0.5976 - val_loss: 1.0664 - val_acc: 0.5542
Epoch 175/512
23/23 [==============================] - 1s - loss: 1.0905 - acc: 0.5711 - val_loss: 0.9917 - val_acc: 0.5904
Epoch 176/512
23/23 [==============================] - 1s - loss: 0.9636 - acc: 0.5983 - val_loss: 1.0278 - val_acc: 0.5663
Epoch 177/512
23/23 [==============================] - 1s - loss: 1.0474 - acc: 0.5867 - val_loss: 1.1987 - val_acc: 0.4819
Epoch 178/512
23/23 [==============================] - 1s - loss: 0.9849 - acc: 0.6248 - val_loss: 1.1428 - val_acc: 0.5181
Epoch 179/512
23/23 [==============================] - 1s - loss: 0.9383 - acc: 0.6081 - val_loss: 1.0562 - val_acc: 0.5181
Epoch 180/512
23/23 [==============================] - 1s - loss: 1.2303 - acc: 0.5684 - val_loss: 1.1174 - val_acc: 0.5663
Epoch 181/512
23/23 [==============================] - 1s - loss: 0.9355 - acc: 0.6031 - val_loss: 1.4708 - val_acc: 0.5422
Epoch 182/512
23/23 [==============================] - 1s - loss: 1.0053 - acc: 0.5980 - val_loss: 1.2229 - val_acc: 0.5422
Epoch 183/512
23/23 [==============================] - 1s - loss: 0.9845 - acc: 0.6174 - val_loss: 1.0613 - val_acc: 0.5904
Epoch 184/512
23/23 [==============================] - 1s - loss: 0.9452 - acc: 0.6221 - val_loss: 1.1475 - val_acc: 0.5301
Epoch 185/512
23/23 [==============================] - 1s - loss: 0.9160 - acc: 0.6143 - val_loss: 1.0968 - val_acc: 0.5422
Epoch 186/512
23/23 [==============================] - 1s - loss: 1.0096 - acc: 0.6170 - val_loss: 1.2723 - val_acc: 0.5422
Epoch 187/512
23/23 [==============================] - 1s - loss: 0.9830 - acc: 0.6221 - val_loss: 1.1549 - val_acc: 0.5060
Epoch 188/512
23/23 [==============================] - 1s - loss: 0.9690 - acc: 0.6275 - val_loss: 1.3596 - val_acc: 0.4940
Epoch 189/512
23/23 [==============================] - 1s - loss: 0.9186 - acc: 0.6225 - val_loss: 1.1236 - val_acc: 0.5181
Epoch 190/512
23/23 [==============================] - 1s - loss: 0.9201 - acc: 0.6684 - val_loss: 1.1782 - val_acc: 0.5301
Epoch 191/512
23/23 [==============================] - 1s - loss: 0.9998 - acc: 0.6061 - val_loss: 1.0452 - val_acc: 0.5542
Epoch 192/512
23/23 [==============================] - 1s - loss: 0.9624 - acc: 0.5929 - val_loss: 1.1788 - val_acc: 0.5181
Epoch 193/512
23/23 [==============================] - 1s - loss: 0.9861 - acc: 0.6277 - val_loss: 1.0958 - val_acc: 0.5542
Epoch 194/512
23/23 [==============================] - 1s - loss: 0.9795 - acc: 0.5847 - val_loss: 1.1162 - val_acc: 0.5542
Epoch 195/512
23/23 [==============================] - 1s - loss: 0.9284 - acc: 0.6384 - val_loss: 1.1791 - val_acc: 0.4940
Epoch 196/512
23/23 [==============================] - 1s - loss: 1.0078 - acc: 0.5871 - val_loss: 1.1623 - val_acc: 0.5301
Epoch 197/512
23/23 [==============================] - 1s - loss: 1.1600 - acc: 0.6408 - val_loss: 1.2138 - val_acc: 0.5060
Epoch 198/512
23/23 [==============================] - 1s - loss: 0.9878 - acc: 0.6333 - val_loss: 1.2172 - val_acc: 0.4940
Epoch 199/512
23/23 [==============================] - 1s - loss: 0.9946 - acc: 0.6225 - val_loss: 1.1103 - val_acc: 0.5422
Epoch 200/512
23/23 [==============================] - 1s - loss: 0.9230 - acc: 0.6412 - val_loss: 1.1310 - val_acc: 0.4699
Epoch 201/512
23/23 [==============================] - 1s - loss: 0.9238 - acc: 0.6031 - val_loss: 1.0888 - val_acc: 0.5783
Epoch 202/512
23/23 [==============================] - 1s - loss: 1.0153 - acc: 0.6279 - val_loss: 1.1438 - val_acc: 0.5181
Epoch 203/512
23/23 [==============================] - 1s - loss: 0.9727 - acc: 0.6299 - val_loss: 1.0989 - val_acc: 0.5542
Epoch 204/512
23/23 [==============================] - 1s - loss: 1.0024 - acc: 0.5952 - val_loss: 1.1154 - val_acc: 0.5422
Epoch 205/512
23/23 [==============================] - 1s - loss: 0.9905 - acc: 0.6058 - val_loss: 1.2008 - val_acc: 0.5181
Epoch 206/512
23/23 [==============================] - 1s - loss: 0.9338 - acc: 0.6415 - val_loss: 1.2719 - val_acc: 0.5422
Epoch 207/512
23/23 [==============================] - 1s - loss: 1.0275 - acc: 0.6255 - val_loss: 1.2624 - val_acc: 0.4819
Epoch 208/512
23/23 [==============================] - 1s - loss: 0.9723 - acc: 0.6116 - val_loss: 1.2153 - val_acc: 0.5060
Epoch 209/512
23/23 [==============================] - 1s - loss: 0.9965 - acc: 0.5871 - val_loss: 1.1874 - val_acc: 0.4940
Epoch 210/512
23/23 [==============================] - 1s - loss: 0.8955 - acc: 0.6602 - val_loss: 1.3115 - val_acc: 0.5181
Epoch 211/512
23/23 [==============================] - 1s - loss: 0.8711 - acc: 0.6279 - val_loss: 1.1791 - val_acc: 0.5301
Epoch 212/512
23/23 [==============================] - 1s - loss: 0.9965 - acc: 0.6085 - val_loss: 1.0287 - val_acc: 0.6024
Epoch 213/512
23/23 [==============================] - 1s - loss: 0.9293 - acc: 0.6548 - val_loss: 1.0812 - val_acc: 0.5181
Epoch 214/512
23/23 [==============================] - 1s - loss: 0.9088 - acc: 0.6439 - val_loss: 1.3457 - val_acc: 0.5422
Epoch 215/512
23/23 [==============================] - 1s - loss: 0.9454 - acc: 0.6299 - val_loss: 1.0755 - val_acc: 0.5422
Epoch 216/512
23/23 [==============================] - 1s - loss: 0.9935 - acc: 0.6248 - val_loss: 1.4457 - val_acc: 0.5181
Epoch 217/512
23/23 [==============================] - 1s - loss: 0.9661 - acc: 0.6168 - val_loss: 1.1636 - val_acc: 0.4940
Epoch 218/512
23/23 [==============================] - 1s - loss: 0.9152 - acc: 0.6419 - val_loss: 1.1137 - val_acc: 0.5904
Epoch 219/512
23/23 [==============================] - 1s - loss: 0.9796 - acc: 0.6116 - val_loss: 1.2980 - val_acc: 0.4699
Epoch 220/512
23/23 [==============================] - 1s - loss: 0.9174 - acc: 0.6412 - val_loss: 1.2509 - val_acc: 0.5181
Epoch 221/512
23/23 [==============================] - 1s - loss: 0.9562 - acc: 0.6551 - val_loss: 1.1093 - val_acc: 0.4940
Epoch 222/512
23/23 [==============================] - 1s - loss: 0.9644 - acc: 0.5976 - val_loss: 1.2599 - val_acc: 0.4940
Epoch 223/512
23/23 [==============================] - 1s - loss: 0.9598 - acc: 0.6575 - val_loss: 1.2377 - val_acc: 0.5301
Epoch 224/512
23/23 [==============================] - 1s - loss: 0.9438 - acc: 0.6606 - val_loss: 1.1330 - val_acc: 0.4940
Epoch 225/512
23/23 [==============================] - 1s - loss: 0.8969 - acc: 0.6252 - val_loss: 1.2605 - val_acc: 0.5181
Epoch 226/512
23/23 [==============================] - 1s - loss: 1.0041 - acc: 0.6027 - val_loss: 1.4300 - val_acc: 0.5301
Epoch 227/512
23/23 [==============================] - 1s - loss: 0.9457 - acc: 0.6228 - val_loss: 1.0360 - val_acc: 0.5542
Epoch 228/512
23/23 [==============================] - 1s - loss: 0.9135 - acc: 0.6442 - val_loss: 1.1352 - val_acc: 0.5422
Epoch 229/512
23/23 [==============================] - 1s - loss: 0.9867 - acc: 0.5922 - val_loss: 1.2176 - val_acc: 0.5783
Epoch 230/512
23/23 [==============================] - 1s - loss: 0.9078 - acc: 0.6598 - val_loss: 1.2051 - val_acc: 0.5301
Epoch 231/512
23/23 [==============================] - 1s - loss: 0.9697 - acc: 0.6058 - val_loss: 1.6696 - val_acc: 0.5542
Epoch 232/512
23/23 [==============================] - 1s - loss: 0.9331 - acc: 0.6255 - val_loss: 1.1867 - val_acc: 0.4819
Epoch 233/512
23/23 [==============================] - 1s - loss: 0.9856 - acc: 0.5983 - val_loss: 1.1930 - val_acc: 0.5301
Epoch 234/512
23/23 [==============================] - 1s - loss: 0.9054 - acc: 0.6548 - val_loss: 1.2803 - val_acc: 0.5301
Epoch 235/512
23/23 [==============================] - 1s - loss: 1.0281 - acc: 0.6116 - val_loss: 1.2735 - val_acc: 0.4578
Epoch 236/512
23/23 [==============================] - 1s - loss: 0.9321 - acc: 0.6252 - val_loss: 1.4036 - val_acc: 0.5060
Epoch 237/512
23/23 [==============================] - 1s - loss: 0.9177 - acc: 0.6820 - val_loss: 1.2785 - val_acc: 0.5181
Epoch 238/512
23/23 [==============================] - 1s - loss: 0.9604 - acc: 0.6275 - val_loss: 1.2855 - val_acc: 0.5301
Epoch 239/512
23/23 [==============================] - 1s - loss: 0.9328 - acc: 0.6119 - val_loss: 1.1317 - val_acc: 0.5422
Epoch 240/512
23/23 [==============================] - 1s - loss: 0.9250 - acc: 0.6388 - val_loss: 1.3962 - val_acc: 0.5542
Epoch 241/512
23/23 [==============================] - 1s - loss: 0.9446 - acc: 0.6359 - val_loss: 1.1440 - val_acc: 0.5904
Epoch 242/512
23/23 [==============================] - 1s - loss: 0.9513 - acc: 0.6442 - val_loss: 1.2810 - val_acc: 0.5542 - ETA: 0s - loss: 0.9155 - acc: 0.6328
Epoch 243/512
23/23 [==============================] - 1s - loss: 0.9763 - acc: 0.6279 - val_loss: 1.1161 - val_acc: 0.5542
Epoch 244/512
23/23 [==============================] - 1s - loss: 0.9348 - acc: 0.6626 - val_loss: 1.4170 - val_acc: 0.5301
Epoch 245/512
23/23 [==============================] - 1s - loss: 0.9453 - acc: 0.6388 - val_loss: 1.0869 - val_acc: 0.5542
Epoch 246/512
23/23 [==============================] - 1s - loss: 0.9167 - acc: 0.6194 - val_loss: 1.1400 - val_acc: 0.5301
Epoch 247/512
23/23 [==============================] - 1s - loss: 0.9099 - acc: 0.6517 - val_loss: 1.4055 - val_acc: 0.5422
Epoch 248/512
23/23 [==============================] - 1s - loss: 0.8640 - acc: 0.6684 - val_loss: 1.1982 - val_acc: 0.5301
Epoch 249/512
23/23 [==============================] - 1s - loss: 0.9169 - acc: 0.6326 - val_loss: 1.1215 - val_acc: 0.5783
Epoch 250/512
23/23 [==============================] - 1s - loss: 1.0003 - acc: 0.6221 - val_loss: 1.0707 - val_acc: 0.5783
Epoch 251/512
23/23 [==============================] - 1s - loss: 0.8899 - acc: 0.6469 - val_loss: 1.0660 - val_acc: 0.5542
Epoch 252/512
23/23 [==============================] - 1s - loss: 0.8948 - acc: 0.6469 - val_loss: 1.1987 - val_acc: 0.4940
Epoch 253/512
23/23 [==============================] - 1s - loss: 0.9671 - acc: 0.6225 - val_loss: 1.2052 - val_acc: 0.4940
Epoch 254/512
23/23 [==============================] - 1s - loss: 0.9616 - acc: 0.6354 - val_loss: 1.1528 - val_acc: 0.5783
Epoch 255/512
23/23 [==============================] - 1s - loss: 0.9462 - acc: 0.6357 - val_loss: 1.1049 - val_acc: 0.5181
Epoch 256/512
23/23 [==============================] - 1s - loss: 0.9238 - acc: 0.6361 - val_loss: 1.0693 - val_acc: 0.5663
Epoch 257/512
23/23 [==============================] - 1s - loss: 0.8905 - acc: 0.7037 - val_loss: 1.5496 - val_acc: 0.5301
Epoch 258/512
23/23 [==============================] - 1s - loss: 0.9588 - acc: 0.6439 - val_loss: 1.2960 - val_acc: 0.5783
Epoch 259/512
23/23 [==============================] - 1s - loss: 0.8469 - acc: 0.6598 - val_loss: 1.1714 - val_acc: 0.5060
Epoch 260/512
23/23 [==============================] - 1s - loss: 0.9034 - acc: 0.6364 - val_loss: 1.1214 - val_acc: 0.5060
Epoch 261/512
23/23 [==============================] - 1s - loss: 0.9094 - acc: 0.6500 - val_loss: 1.3302 - val_acc: 0.5301
Epoch 262/512
23/23 [==============================] - 1s - loss: 0.8307 - acc: 0.6765 - val_loss: 1.3988 - val_acc: 0.5904
Epoch 263/512
23/23 [==============================] - 1s - loss: 0.9097 - acc: 0.6660 - val_loss: 1.1113 - val_acc: 0.5542
Epoch 264/512
23/23 [==============================] - 1s - loss: 0.8833 - acc: 0.6520 - val_loss: 1.2133 - val_acc: 0.5301
Epoch 265/512
23/23 [==============================] - 1s - loss: 0.8639 - acc: 0.6413 - val_loss: 1.2675 - val_acc: 0.5301
Epoch 266/512
23/23 [==============================] - 1s - loss: 0.9470 - acc: 0.6170 - val_loss: 1.2742 - val_acc: 0.4699
Epoch 267/512
23/23 [==============================] - 1s - loss: 0.8841 - acc: 0.6384 - val_loss: 1.3648 - val_acc: 0.5542
Epoch 268/512
23/23 [==============================] - 1s - loss: 0.8722 - acc: 0.6707 - val_loss: 1.1230 - val_acc: 0.6024
Epoch 269/512
23/23 [==============================] - 1s - loss: 0.8599 - acc: 0.6578 - val_loss: 1.2078 - val_acc: 0.5181
Epoch 270/512
23/23 [==============================] - 1s - loss: 0.8910 - acc: 0.6633 - val_loss: 1.1984 - val_acc: 0.5904
Epoch 271/512
23/23 [==============================] - 1s - loss: 0.8720 - acc: 0.6571 - val_loss: 1.2443 - val_acc: 0.5904
Epoch 272/512
23/23 [==============================] - 1s - loss: 0.8772 - acc: 0.6466 - val_loss: 1.1731 - val_acc: 0.5663
Epoch 273/512
23/23 [==============================] - 1s - loss: 0.8823 - acc: 0.6415 - val_loss: 1.1608 - val_acc: 0.5301
Epoch 274/512
23/23 [==============================] - 1s - loss: 0.9255 - acc: 0.6660 - val_loss: 1.3068 - val_acc: 0.5060
Epoch 275/512
23/23 [==============================] - 1s - loss: 0.9522 - acc: 0.6150 - val_loss: 1.3004 - val_acc: 0.5422
Epoch 276/512
23/23 [==============================] - 1s - loss: 0.9401 - acc: 0.6901 - val_loss: 1.2215 - val_acc: 0.5542
Epoch 277/512
23/23 [==============================] - 1s - loss: 0.9108 - acc: 0.6306 - val_loss: 1.1340 - val_acc: 0.5542
Epoch 278/512
23/23 [==============================] - 1s - loss: 0.8224 - acc: 0.6874 - val_loss: 1.1422 - val_acc: 0.5422 - ETA: 0s - loss: 0.8051 - acc: 0.6953
Epoch 279/512
23/23 [==============================] - 1s - loss: 0.9061 - acc: 0.6527 - val_loss: 1.2598 - val_acc: 0.5422
Epoch 280/512
23/23 [==============================] - 1s - loss: 0.8609 - acc: 0.6527 - val_loss: 1.1400 - val_acc: 0.5422
Epoch 281/512
23/23 [==============================] - 1s - loss: 1.0203 - acc: 0.5929 - val_loss: 1.1208 - val_acc: 0.5422
Epoch 282/512
23/23 [==============================] - 1s - loss: 0.8019 - acc: 0.6684 - val_loss: 1.2487 - val_acc: 0.5060
Epoch 283/512
23/23 [==============================] - 1s - loss: 0.9753 - acc: 0.6364 - val_loss: 1.2112 - val_acc: 0.5422
Epoch 284/512
23/23 [==============================] - 1s - loss: 0.8476 - acc: 0.6684 - val_loss: 1.1741 - val_acc: 0.5663
Epoch 285/512
23/23 [==============================] - 1s - loss: 0.8818 - acc: 0.6606 - val_loss: 1.2716 - val_acc: 0.5301
Epoch 286/512
23/23 [==============================] - 1s - loss: 0.9425 - acc: 0.6497 - val_loss: 1.1744 - val_acc: 0.5663
Epoch 287/512
23/23 [==============================] - 1s - loss: 0.8323 - acc: 0.6796 - val_loss: 1.1169 - val_acc: 0.5783 - ETA: 0s - loss: 0.8210 - acc: 0.6906
Epoch 288/512
23/23 [==============================] - 1s - loss: 0.8755 - acc: 0.6792 - val_loss: 1.1229 - val_acc: 0.5422 - ETA: 0s - loss: 0.8601 - acc: 0.6726
Epoch 289/512
23/23 [==============================] - 1s - loss: 0.9224 - acc: 0.6522 - val_loss: 1.2256 - val_acc: 0.5542
Epoch 290/512
23/23 [==============================] - 1s - loss: 0.8547 - acc: 0.6925 - val_loss: 1.1905 - val_acc: 0.5663
Epoch 291/512
23/23 [==============================] - 1s - loss: 0.9139 - acc: 0.6609 - val_loss: 1.1549 - val_acc: 0.5301
Epoch 292/512
23/23 [==============================] - 1s - loss: 0.8865 - acc: 0.6520 - val_loss: 1.0779 - val_acc: 0.5301
Epoch 293/512
23/23 [==============================] - 1s - loss: 0.8563 - acc: 0.6388 - val_loss: 1.1422 - val_acc: 0.5301
Epoch 294/512
23/23 [==============================] - 1s - loss: 0.8405 - acc: 0.6820 - val_loss: 1.0060 - val_acc: 0.6145
Epoch 295/512
23/23 [==============================] - 1s - loss: 0.8513 - acc: 0.6629 - val_loss: 1.2874 - val_acc: 0.5060
Epoch 296/512
23/23 [==============================] - 1s - loss: 0.9884 - acc: 0.6415 - val_loss: 1.3069 - val_acc: 0.5301
Epoch 297/512
23/23 [==============================] - 1s - loss: 0.8355 - acc: 0.7092 - val_loss: 1.2002 - val_acc: 0.5783
Epoch 298/512
23/23 [==============================] - 1s - loss: 0.8901 - acc: 0.6602 - val_loss: 1.0897 - val_acc: 0.6024
Epoch 299/512
23/23 [==============================] - 1s - loss: 0.7678 - acc: 0.7143 - val_loss: 1.2851 - val_acc: 0.5181 - ETA: 0s - loss: 0.7097 - acc: 0.7273
Epoch 300/512
23/23 [==============================] - 1s - loss: 0.8559 - acc: 0.6493 - val_loss: 1.1650 - val_acc: 0.5422
Epoch 301/512
23/23 [==============================] - 1s - loss: 0.9206 - acc: 0.6598 - val_loss: 1.2024 - val_acc: 0.5663
Epoch 302/512
23/23 [==============================] - 1s - loss: 0.8847 - acc: 0.6575 - val_loss: 1.0753 - val_acc: 0.5422
Epoch 303/512
23/23 [==============================] - 1s - loss: 0.9007 - acc: 0.6629 - val_loss: 1.2082 - val_acc: 0.5542
Epoch 304/512
23/23 [==============================] - 1s - loss: 0.8933 - acc: 0.6361 - val_loss: 1.6823 - val_acc: 0.4699
Epoch 305/512
23/23 [==============================] - 1s - loss: 0.8787 - acc: 0.6653 - val_loss: 1.1372 - val_acc: 0.5663
Epoch 306/512
23/23 [==============================] - 1s - loss: 0.7677 - acc: 0.6956 - val_loss: 1.3767 - val_acc: 0.5301
Epoch 307/512
23/23 [==============================] - 1s - loss: 0.8685 - acc: 0.6816 - val_loss: 1.1506 - val_acc: 0.5783
Epoch 308/512
23/23 [==============================] - 1s - loss: 0.8678 - acc: 0.6735 - val_loss: 1.2411 - val_acc: 0.5542
Epoch 309/512
23/23 [==============================] - 1s - loss: 0.9443 - acc: 0.6493 - val_loss: 1.2492 - val_acc: 0.5663
Epoch 310/512
23/23 [==============================] - 1s - loss: 0.8714 - acc: 0.6571 - val_loss: 1.3389 - val_acc: 0.5060 - ETA: 0s - loss: 0.8589 - acc: 0.6528
Epoch 311/512
23/23 [==============================] - 1s - loss: 0.8012 - acc: 0.6738 - val_loss: 1.1860 - val_acc: 0.5301
Epoch 312/512
23/23 [==============================] - 1s - loss: 0.8650 - acc: 0.6843 - val_loss: 1.1564 - val_acc: 0.5181
Epoch 313/512
23/23 [==============================] - 1s - loss: 0.7987 - acc: 0.6821 - val_loss: 1.1899 - val_acc: 0.5422
Epoch 314/512
23/23 [==============================] - 1s - loss: 0.9509 - acc: 0.5983 - val_loss: 1.0585 - val_acc: 0.6024
Epoch 315/512
23/23 [==============================] - 1s - loss: 0.8909 - acc: 0.6929 - val_loss: 1.3408 - val_acc: 0.5301
Epoch 316/512
23/23 [==============================] - 1s - loss: 0.8819 - acc: 0.6548 - val_loss: 1.1574 - val_acc: 0.5542
Epoch 317/512
23/23 [==============================] - 1s - loss: 0.7974 - acc: 0.7007 - val_loss: 1.1934 - val_acc: 0.5663
Epoch 318/512
23/23 [==============================] - 1s - loss: 0.8943 - acc: 0.6707 - val_loss: 1.4936 - val_acc: 0.4819
Epoch 319/512
23/23 [==============================] - 1s - loss: 0.8729 - acc: 0.6745 - val_loss: 1.1499 - val_acc: 0.5422
Epoch 320/512
23/23 [==============================] - 1s - loss: 0.8175 - acc: 0.7065 - val_loss: 1.1711 - val_acc: 0.5301
Epoch 321/512
23/23 [==============================] - 1s - loss: 0.8345 - acc: 0.6796 - val_loss: 1.1043 - val_acc: 0.6024
Epoch 322/512
23/23 [==============================] - 1s - loss: 0.8396 - acc: 0.6711 - val_loss: 1.6308 - val_acc: 0.5663
Epoch 323/512
23/23 [==============================] - 1s - loss: 0.9038 - acc: 0.6656 - val_loss: 1.1264 - val_acc: 0.5422
Epoch 324/512
23/23 [==============================] - 1s - loss: 0.8191 - acc: 0.6929 - val_loss: 1.1037 - val_acc: 0.5542
Epoch 325/512
23/23 [==============================] - 1s - loss: 0.8218 - acc: 0.6497 - val_loss: 1.1985 - val_acc: 0.5060
Epoch 326/512
23/23 [==============================] - 1s - loss: 0.9651 - acc: 0.6282 - val_loss: 1.1692 - val_acc: 0.5301
Epoch 327/512
23/23 [==============================] - 1s - loss: 0.7766 - acc: 0.7197 - val_loss: 1.2011 - val_acc: 0.5663
Epoch 328/512
23/23 [==============================] - 1s - loss: 0.9222 - acc: 0.6527 - val_loss: 1.2584 - val_acc: 0.5542
Epoch 329/512
23/23 [==============================] - 1s - loss: 0.8834 - acc: 0.6218 - val_loss: 1.0718 - val_acc: 0.6145
Epoch 330/512
23/23 [==============================] - 1s - loss: 0.8320 - acc: 0.6765 - val_loss: 1.0627 - val_acc: 0.6386
Epoch 331/512
23/23 [==============================] - 1s - loss: 0.8305 - acc: 0.7065 - val_loss: 1.4483 - val_acc: 0.4940
Epoch 332/512
23/23 [==============================] - 1s - loss: 0.8969 - acc: 0.6439 - val_loss: 1.0642 - val_acc: 0.5663 - ETA: 0s - loss: 0.7950 - acc: 0.6736
Epoch 333/512
23/23 [==============================] - 1s - loss: 0.8274 - acc: 0.6772 - val_loss: 1.2085 - val_acc: 0.5663
Epoch 334/512
23/23 [==============================] - 1s - loss: 0.8346 - acc: 0.6820 - val_loss: 1.4950 - val_acc: 0.5542
Epoch 335/512
23/23 [==============================] - 1s - loss: 0.8864 - acc: 0.6473 - val_loss: 1.5662 - val_acc: 0.4699
Epoch 336/512
23/23 [==============================] - 1s - loss: 0.8424 - acc: 0.6551 - val_loss: 1.1602 - val_acc: 0.5181
Epoch 337/512
23/23 [==============================] - 1s - loss: 0.8597 - acc: 0.6766 - val_loss: 1.1110 - val_acc: 0.5663
Epoch 338/512
23/23 [==============================] - 1s - loss: 0.8202 - acc: 0.7173 - val_loss: 1.2120 - val_acc: 0.5663
Epoch 339/512
23/23 [==============================] - 1s - loss: 0.8872 - acc: 0.6493 - val_loss: 1.2616 - val_acc: 0.5301
Epoch 340/512
23/23 [==============================] - 1s - loss: 0.8254 - acc: 0.6820 - val_loss: 1.3970 - val_acc: 0.5181
Epoch 341/512
23/23 [==============================] - 1s - loss: 0.8572 - acc: 0.6952 - val_loss: 1.1008 - val_acc: 0.6145
Epoch 342/512
23/23 [==============================] - 1s - loss: 0.8470 - acc: 0.6847 - val_loss: 1.1654 - val_acc: 0.5783
Epoch 343/512
23/23 [==============================] - 1s - loss: 0.8247 - acc: 0.6956 - val_loss: 1.0984 - val_acc: 0.6024
Epoch 344/512
23/23 [==============================] - 1s - loss: 0.8619 - acc: 0.6691 - val_loss: 1.0760 - val_acc: 0.5783
Epoch 345/512
23/23 [==============================] - 1s - loss: 0.8184 - acc: 0.6850 - val_loss: 1.0924 - val_acc: 0.5422
Epoch 346/512
23/23 [==============================] - 1s - loss: 0.8422 - acc: 0.7037 - val_loss: 1.2291 - val_acc: 0.5783
Epoch 347/512
23/23 [==============================] - 1s - loss: 0.7420 - acc: 0.6901 - val_loss: 1.1503 - val_acc: 0.5542
Epoch 348/512
23/23 [==============================] - 1s - loss: 0.9352 - acc: 0.6333 - val_loss: 1.0853 - val_acc: 0.6265
Epoch 349/512
23/23 [==============================] - 1s - loss: 0.8038 - acc: 0.6952 - val_loss: 1.0995 - val_acc: 0.5783
Epoch 350/512
23/23 [==============================] - 1s - loss: 0.7802 - acc: 0.6983 - val_loss: 1.2137 - val_acc: 0.5181
Epoch 351/512
23/23 [==============================] - 1s - loss: 0.8519 - acc: 0.6633 - val_loss: 1.2794 - val_acc: 0.5663 - ETA: 0s - loss: 0.8190 - acc: 0.6562
Epoch 352/512
23/23 [==============================] - 1s - loss: 0.8277 - acc: 0.6469 - val_loss: 1.1742 - val_acc: 0.5542
Epoch 353/512
23/23 [==============================] - 1s - loss: 0.8524 - acc: 0.6435 - val_loss: 1.0211 - val_acc: 0.5783
Epoch 354/512
23/23 [==============================] - 1s - loss: 0.8364 - acc: 0.6905 - val_loss: 1.0820 - val_acc: 0.6024
Epoch 355/512
23/23 [==============================] - 1s - loss: 0.8196 - acc: 0.6769 - val_loss: 1.2251 - val_acc: 0.5663
Epoch 356/512
23/23 [==============================] - 1s - loss: 0.7545 - acc: 0.7170 - val_loss: 1.3275 - val_acc: 0.5663
Epoch 357/512
23/23 [==============================] - 1s - loss: 0.8128 - acc: 0.6874 - val_loss: 1.1461 - val_acc: 0.5542
Epoch 358/512
23/23 [==============================] - 1s - loss: 0.8330 - acc: 0.6656 - val_loss: 1.2027 - val_acc: 0.5301
Epoch 359/512
23/23 [==============================] - 1s - loss: 0.7997 - acc: 0.7034 - val_loss: 1.0832 - val_acc: 0.5904
Epoch 360/512
23/23 [==============================] - 1s - loss: 0.8059 - acc: 0.7065 - val_loss: 1.2592 - val_acc: 0.5301 - ETA: 0s - loss: 0.8215 - acc: 0.6875
Epoch 361/512
23/23 [==============================] - 1s - loss: 0.8110 - acc: 0.6875 - val_loss: 1.0624 - val_acc: 0.5783
Epoch 362/512
23/23 [==============================] - 1s - loss: 0.7687 - acc: 0.7306 - val_loss: 1.5763 - val_acc: 0.4699
Epoch 363/512
23/23 [==============================] - 1s - loss: 0.7842 - acc: 0.7146 - val_loss: 1.3213 - val_acc: 0.4940
Epoch 364/512
23/23 [==============================] - 1s - loss: 0.7666 - acc: 0.7037 - val_loss: 1.0785 - val_acc: 0.5904
Epoch 365/512
23/23 [==============================] - 1s - loss: 0.8777 - acc: 0.6531 - val_loss: 1.1930 - val_acc: 0.5663
Epoch 366/512
23/23 [==============================] - 1s - loss: 0.8027 - acc: 0.7228 - val_loss: 1.5258 - val_acc: 0.5181
Epoch 367/512
23/23 [==============================] - 1s - loss: 0.8037 - acc: 0.6742 - val_loss: 1.1306 - val_acc: 0.5904
Epoch 368/512
23/23 [==============================] - 1s - loss: 0.7863 - acc: 0.6959 - val_loss: 1.2702 - val_acc: 0.5663
Epoch 369/512
23/23 [==============================] - 1s - loss: 0.8493 - acc: 0.6847 - val_loss: 1.2595 - val_acc: 0.5301
Epoch 370/512
23/23 [==============================] - 1s - loss: 0.8012 - acc: 0.6905 - val_loss: 1.3507 - val_acc: 0.5783
Epoch 371/512
23/23 [==============================] - 1s - loss: 0.9071 - acc: 0.6656 - val_loss: 1.2156 - val_acc: 0.5422
Epoch 372/512
23/23 [==============================] - 1s - loss: 0.7874 - acc: 0.7041 - val_loss: 1.2899 - val_acc: 0.5904
Epoch 373/512
23/23 [==============================] - 1s - loss: 0.8398 - acc: 0.6762 - val_loss: 1.4047 - val_acc: 0.5181
Epoch 374/512
23/23 [==============================] - 1s - loss: 0.8013 - acc: 0.6769 - val_loss: 1.4190 - val_acc: 0.5181
Epoch 375/512
23/23 [==============================] - 1s - loss: 0.8184 - acc: 0.6850 - val_loss: 1.7745 - val_acc: 0.4940
Epoch 376/512
23/23 [==============================] - 1s - loss: 0.8317 - acc: 0.6691 - val_loss: 1.1259 - val_acc: 0.5422
Epoch 377/512
23/23 [==============================] - 1s - loss: 0.7721 - acc: 0.6979 - val_loss: 1.3115 - val_acc: 0.5301
Epoch 378/512
23/23 [==============================] - 1s - loss: 0.8610 - acc: 0.6660 - val_loss: 1.2123 - val_acc: 0.5181
Epoch 379/512
23/23 [==============================] - 1s - loss: 0.7962 - acc: 0.6901 - val_loss: 1.1600 - val_acc: 0.5783
Epoch 380/512
23/23 [==============================] - 1s - loss: 0.8391 - acc: 0.6823 - val_loss: 1.2696 - val_acc: 0.5663
Epoch 381/512
23/23 [==============================] - 1s - loss: 0.7448 - acc: 0.7255 - val_loss: 1.2859 - val_acc: 0.5542
Epoch 382/512
23/23 [==============================] - 1s - loss: 0.8264 - acc: 0.7037 - val_loss: 1.1813 - val_acc: 0.5904
Epoch 383/512
23/23 [==============================] - 1s - loss: 0.8341 - acc: 0.6979 - val_loss: 1.2659 - val_acc: 0.5301
Epoch 384/512
23/23 [==============================] - 1s - loss: 0.7896 - acc: 0.6850 - val_loss: 1.3052 - val_acc: 0.5060
Epoch 385/512
23/23 [==============================] - 1s - loss: 0.8173 - acc: 0.6658 - val_loss: 1.0883 - val_acc: 0.5663
Epoch 386/512
23/23 [==============================] - 1s - loss: 0.7952 - acc: 0.6850 - val_loss: 1.0786 - val_acc: 0.6024
Epoch 387/512
23/23 [==============================] - 1s - loss: 0.7999 - acc: 0.6925 - val_loss: 1.1914 - val_acc: 0.5422
Epoch 388/512
23/23 [==============================] - 1s - loss: 0.8461 - acc: 0.6364 - val_loss: 1.4169 - val_acc: 0.5181
Epoch 389/512
23/23 [==============================] - 1s - loss: 0.7696 - acc: 0.7092 - val_loss: 1.1433 - val_acc: 0.5542
Epoch 390/512
23/23 [==============================] - 1s - loss: 0.8145 - acc: 0.7119 - val_loss: 1.3508 - val_acc: 0.5301
Epoch 391/512
23/23 [==============================] - 1s - loss: 0.7627 - acc: 0.7061 - val_loss: 1.2919 - val_acc: 0.5181
Epoch 392/512
23/23 [==============================] - 1s - loss: 0.7897 - acc: 0.7146 - val_loss: 1.1041 - val_acc: 0.6024
Epoch 393/512
23/23 [==============================] - 1s - loss: 0.8159 - acc: 0.6956 - val_loss: 1.0348 - val_acc: 0.5904
Epoch 394/512
23/23 [==============================] - 1s - loss: 0.8181 - acc: 0.6796 - val_loss: 1.1526 - val_acc: 0.5783
Epoch 395/512
23/23 [==============================] - 1s - loss: 0.7436 - acc: 0.7204 - val_loss: 1.2173 - val_acc: 0.5181
Epoch 396/512
23/23 [==============================] - 1s - loss: 0.8009 - acc: 0.7034 - val_loss: 1.1711 - val_acc: 0.5783
Epoch 397/512
23/23 [==============================] - 1s - loss: 0.7902 - acc: 0.7041 - val_loss: 1.0886 - val_acc: 0.5904
Epoch 398/512
23/23 [==============================] - 1s - loss: 0.8006 - acc: 0.6979 - val_loss: 1.3283 - val_acc: 0.5301
Epoch 399/512
23/23 [==============================] - 1s - loss: 0.7902 - acc: 0.6956 - val_loss: 1.1644 - val_acc: 0.5301
Epoch 400/512
23/23 [==============================] - 1s - loss: 0.8402 - acc: 0.6626 - val_loss: 1.2582 - val_acc: 0.6265
Epoch 401/512
23/23 [==============================] - 1s - loss: 0.8057 - acc: 0.6847 - val_loss: 1.3371 - val_acc: 0.5663
Epoch 402/512
23/23 [==============================] - 1s - loss: 0.6515 - acc: 0.7578 - val_loss: 1.3295 - val_acc: 0.5301
Epoch 403/512
23/23 [==============================] - 1s - loss: 0.9109 - acc: 0.6626 - val_loss: 1.3759 - val_acc: 0.5060
Epoch 404/512
23/23 [==============================] - 1s - loss: 0.7724 - acc: 0.7041 - val_loss: 1.3702 - val_acc: 0.5301 - ETA: 0s - loss: 0.7197 - acc: 0.7316
Epoch 405/512
23/23 [==============================] - 1s - loss: 0.7943 - acc: 0.7116 - val_loss: 1.1674 - val_acc: 0.6024
Epoch 406/512
23/23 [==============================] - 1s - loss: 0.8316 - acc: 0.7095 - val_loss: 1.1918 - val_acc: 0.5904
Epoch 407/512
23/23 [==============================] - 1s - loss: 0.8107 - acc: 0.6714 - val_loss: 1.2350 - val_acc: 0.5663
Epoch 408/512
23/23 [==============================] - 1s - loss: 0.7483 - acc: 0.7088 - val_loss: 1.0882 - val_acc: 0.6145
Epoch 409/512
23/23 [==============================] - 1s - loss: 0.8152 - acc: 0.7065 - val_loss: 1.1311 - val_acc: 0.5783
Epoch 410/512
23/23 [==============================] - 1s - loss: 0.7548 - acc: 0.7143 - val_loss: 1.2227 - val_acc: 0.5422
Epoch 411/512
23/23 [==============================] - 1s - loss: 0.7966 - acc: 0.6847 - val_loss: 1.2845 - val_acc: 0.5301
Epoch 412/512
23/23 [==============================] - 1s - loss: 0.7825 - acc: 0.7037 - val_loss: 1.3241 - val_acc: 0.5060
Epoch 413/512
23/23 [==============================] - 1s - loss: 0.7922 - acc: 0.6823 - val_loss: 1.1987 - val_acc: 0.5542
Epoch 414/512
23/23 [==============================] - 1s - loss: 0.8069 - acc: 0.6850 - val_loss: 1.4127 - val_acc: 0.5663
Epoch 415/512
23/23 [==============================] - 1s - loss: 0.8037 - acc: 0.6847 - val_loss: 1.1904 - val_acc: 0.5663
Epoch 416/512
23/23 [==============================] - 1s - loss: 0.7695 - acc: 0.7395 - val_loss: 1.3872 - val_acc: 0.5060
Epoch 417/512
23/23 [==============================] - 1s - loss: 0.8450 - acc: 0.6792 - val_loss: 1.5152 - val_acc: 0.5301
Epoch 418/512
23/23 [==============================] - 1s - loss: 0.7527 - acc: 0.7422 - val_loss: 1.3229 - val_acc: 0.5542
Epoch 419/512
23/23 [==============================] - 1s - loss: 0.8612 - acc: 0.6633 - val_loss: 1.4447 - val_acc: 0.5542
Epoch 420/512
23/23 [==============================] - 1s - loss: 0.7088 - acc: 0.7123 - val_loss: 1.5047 - val_acc: 0.5422
Epoch 421/512
23/23 [==============================] - 1s - loss: 0.7380 - acc: 0.7123 - val_loss: 1.2776 - val_acc: 0.5663
Epoch 422/512
23/23 [==============================] - 1s - loss: 0.7757 - acc: 0.6956 - val_loss: 1.2927 - val_acc: 0.5060
Epoch 423/512
23/23 [==============================] - 1s - loss: 0.7875 - acc: 0.6878 - val_loss: 1.1568 - val_acc: 0.5422
Epoch 424/512
23/23 [==============================] - 1s - loss: 0.8512 - acc: 0.6789 - val_loss: 1.0573 - val_acc: 0.5422
Epoch 425/512
23/23 [==============================] - 1s - loss: 0.7608 - acc: 0.6854 - val_loss: 1.2778 - val_acc: 0.5422
Epoch 426/512
23/23 [==============================] - 1s - loss: 0.8177 - acc: 0.7150 - val_loss: 1.2504 - val_acc: 0.5542
Epoch 427/512
23/23 [==============================] - 1s - loss: 0.8016 - acc: 0.6956 - val_loss: 1.2941 - val_acc: 0.5663
Epoch 428/512
23/23 [==============================] - 1s - loss: 0.7293 - acc: 0.7333 - val_loss: 1.2091 - val_acc: 0.5904
Epoch 429/512
23/23 [==============================] - 1s - loss: 0.7776 - acc: 0.7092 - val_loss: 1.1280 - val_acc: 0.5904 - ETA: 0s - loss: 0.8383 - acc: 0.6979
Epoch 430/512
23/23 [==============================] - 1s - loss: 0.7863 - acc: 0.7204 - val_loss: 1.1554 - val_acc: 0.6024
Epoch 431/512
23/23 [==============================] - 1s - loss: 0.7197 - acc: 0.7095 - val_loss: 1.2579 - val_acc: 0.5542
Epoch 432/512
23/23 [==============================] - 1s - loss: 0.7820 - acc: 0.7007 - val_loss: 1.1809 - val_acc: 0.5422
Epoch 433/512
23/23 [==============================] - 1s - loss: 0.7777 - acc: 0.7283 - val_loss: 1.2880 - val_acc: 0.5422
Epoch 434/512
23/23 [==============================] - 1s - loss: 0.7669 - acc: 0.7197 - val_loss: 1.2899 - val_acc: 0.5422
Epoch 435/512
23/23 [==============================] - 1s - loss: 0.7749 - acc: 0.7204 - val_loss: 1.2499 - val_acc: 0.5422
Epoch 436/512
23/23 [==============================] - 1s - loss: 0.7893 - acc: 0.7092 - val_loss: 1.4161 - val_acc: 0.5060
Epoch 437/512
23/23 [==============================] - 1s - loss: 0.7748 - acc: 0.7092 - val_loss: 1.4094 - val_acc: 0.5301
Epoch 438/512
23/23 [==============================] - 1s - loss: 0.8289 - acc: 0.7088 - val_loss: 1.3569 - val_acc: 0.5542
Epoch 439/512
23/23 [==============================] - 1s - loss: 0.7886 - acc: 0.7010 - val_loss: 1.2263 - val_acc: 0.5663
Epoch 440/512
23/23 [==============================] - 1s - loss: 0.7601 - acc: 0.7119 - val_loss: 1.2170 - val_acc: 0.5904
Epoch 441/512
23/23 [==============================] - 1s - loss: 0.7666 - acc: 0.7119 - val_loss: 1.4093 - val_acc: 0.5301
Epoch 442/512
23/23 [==============================] - 1s - loss: 0.8506 - acc: 0.6582 - val_loss: 1.2340 - val_acc: 0.5422
Epoch 443/512
23/23 [==============================] - 1s - loss: 0.6996 - acc: 0.7197 - val_loss: 1.2418 - val_acc: 0.6024 - ETA: 0s - loss: 0.7109 - acc: 0.7202
Epoch 444/512
23/23 [==============================] - 1s - loss: 0.8426 - acc: 0.6816 - val_loss: 1.1305 - val_acc: 0.5542
Epoch 445/512
23/23 [==============================] - 1s - loss: 0.7403 - acc: 0.7173 - val_loss: 1.2680 - val_acc: 0.5542
Epoch 446/512
23/23 [==============================] - 1s - loss: 0.7881 - acc: 0.7092 - val_loss: 1.2954 - val_acc: 0.5542
Epoch 447/512
23/23 [==============================] - 1s - loss: 0.8481 - acc: 0.6905 - val_loss: 1.3608 - val_acc: 0.5181
Epoch 448/512
23/23 [==============================] - 1s - loss: 0.7512 - acc: 0.7010 - val_loss: 1.4766 - val_acc: 0.5422 - ETA: 0s - loss: 0.7721 - acc: 0.7072
Epoch 449/512
23/23 [==============================] - 1s - loss: 0.7688 - acc: 0.6939 - val_loss: 1.1192 - val_acc: 0.6024
Epoch 450/512
23/23 [==============================] - 1s - loss: 0.8003 - acc: 0.6843 - val_loss: 1.0009 - val_acc: 0.6265
Epoch 451/512
23/23 [==============================] - 1s - loss: 0.7236 - acc: 0.7551 - val_loss: 1.3822 - val_acc: 0.5542
Epoch 452/512
23/23 [==============================] - 1s - loss: 0.7605 - acc: 0.7201 - val_loss: 1.2417 - val_acc: 0.5904
Epoch 453/512
23/23 [==============================] - 1s - loss: 0.7450 - acc: 0.7197 - val_loss: 1.1559 - val_acc: 0.5542
Epoch 454/512
23/23 [==============================] - 1s - loss: 0.7705 - acc: 0.6925 - val_loss: 1.3413 - val_acc: 0.5783
Epoch 455/512
23/23 [==============================] - 1s - loss: 0.7361 - acc: 0.7282 - val_loss: 1.4608 - val_acc: 0.5422 - ETA: 0s - loss: 0.7109 - acc: 0.7368
Epoch 456/512
23/23 [==============================] - 1s - loss: 0.8026 - acc: 0.7099 - val_loss: 1.3346 - val_acc: 0.4578
Epoch 457/512
23/23 [==============================] - 1s - loss: 0.8182 - acc: 0.6739 - val_loss: 1.1754 - val_acc: 0.6024
Epoch 458/512
23/23 [==============================] - 1s - loss: 0.7490 - acc: 0.7313 - val_loss: 1.4668 - val_acc: 0.5783
Epoch 459/512
23/23 [==============================] - 1s - loss: 0.7731 - acc: 0.7255 - val_loss: 1.2299 - val_acc: 0.6145
Epoch 460/512
23/23 [==============================] - 1s - loss: 0.7468 - acc: 0.7173 - val_loss: 1.1574 - val_acc: 0.6024
Epoch 461/512
23/23 [==============================] - 1s - loss: 0.7583 - acc: 0.7286 - val_loss: 1.3313 - val_acc: 0.5663
Epoch 462/512
23/23 [==============================] - 1s - loss: 0.7388 - acc: 0.7173 - val_loss: 1.4067 - val_acc: 0.4699
Epoch 463/512
23/23 [==============================] - 1s - loss: 0.7480 - acc: 0.7360 - val_loss: 1.3096 - val_acc: 0.5542
Epoch 464/512
23/23 [==============================] - 1s - loss: 0.7327 - acc: 0.7313 - val_loss: 1.1834 - val_acc: 0.5663
Epoch 465/512
23/23 [==============================] - 1s - loss: 0.7768 - acc: 0.7061 - val_loss: 1.3637 - val_acc: 0.5663
Epoch 466/512
23/23 [==============================] - 1s - loss: 0.7359 - acc: 0.7364 - val_loss: 1.3143 - val_acc: 0.5904
Epoch 467/512
23/23 [==============================] - 1s - loss: 0.7582 - acc: 0.7065 - val_loss: 1.4812 - val_acc: 0.5301
Epoch 468/512
23/23 [==============================] - 1s - loss: 0.8303 - acc: 0.6820 - val_loss: 1.2478 - val_acc: 0.5663
Epoch 469/512
23/23 [==============================] - 1s - loss: 0.7306 - acc: 0.7282 - val_loss: 1.3169 - val_acc: 0.5301
Epoch 470/512
23/23 [==============================] - 1s - loss: 0.8098 - acc: 0.6983 - val_loss: 1.2364 - val_acc: 0.5904
Epoch 471/512
23/23 [==============================] - 1s - loss: 0.8326 - acc: 0.6901 - val_loss: 1.5694 - val_acc: 0.4940
Epoch 472/512
23/23 [==============================] - 1s - loss: 0.7418 - acc: 0.7119 - val_loss: 1.5127 - val_acc: 0.5422
Epoch 473/512
23/23 [==============================] - 1s - loss: 0.7150 - acc: 0.7398 - val_loss: 1.3371 - val_acc: 0.6024
Epoch 474/512
23/23 [==============================] - 1s - loss: 0.8036 - acc: 0.6823 - val_loss: 1.6303 - val_acc: 0.5301
Epoch 475/512
23/23 [==============================] - 1s - loss: 0.7385 - acc: 0.7065 - val_loss: 1.2562 - val_acc: 0.5542
Epoch 476/512
23/23 [==============================] - 1s - loss: 0.7946 - acc: 0.6769 - val_loss: 1.2929 - val_acc: 0.5663
Epoch 477/512
23/23 [==============================] - 1s - loss: 0.7796 - acc: 0.6901 - val_loss: 1.3984 - val_acc: 0.5542
Epoch 478/512
23/23 [==============================] - 1s - loss: 0.7271 - acc: 0.7609 - val_loss: 1.2763 - val_acc: 0.5663
Epoch 479/512
23/23 [==============================] - 1s - loss: 0.7526 - acc: 0.7146 - val_loss: 1.1811 - val_acc: 0.5060
Epoch 480/512
23/23 [==============================] - 1s - loss: 0.7263 - acc: 0.7065 - val_loss: 1.1979 - val_acc: 0.5904
Epoch 481/512
23/23 [==============================] - 1s - loss: 0.7413 - acc: 0.7228 - val_loss: 1.4635 - val_acc: 0.5301
Epoch 482/512
23/23 [==============================] - 1s - loss: 0.7286 - acc: 0.7177 - val_loss: 1.3438 - val_acc: 0.5422
Epoch 483/512
23/23 [==============================] - 1s - loss: 0.7550 - acc: 0.7065 - val_loss: 1.7097 - val_acc: 0.6024
Epoch 484/512
23/23 [==============================] - 1s - loss: 0.7296 - acc: 0.7360 - val_loss: 1.3081 - val_acc: 0.5783
Epoch 485/512
23/23 [==============================] - 1s - loss: 0.7554 - acc: 0.6874 - val_loss: 1.1696 - val_acc: 0.5783
Epoch 486/512
23/23 [==============================] - 1s - loss: 0.7528 - acc: 0.7201 - val_loss: 1.4871 - val_acc: 0.5542
Epoch 487/512
23/23 [==============================] - 1s - loss: 0.6319 - acc: 0.7660 - val_loss: 1.3704 - val_acc: 0.5542
Epoch 488/512
23/23 [==============================] - 1s - loss: 0.8045 - acc: 0.6905 - val_loss: 1.3402 - val_acc: 0.5663
Epoch 489/512
23/23 [==============================] - 1s - loss: 0.7404 - acc: 0.7119 - val_loss: 1.5290 - val_acc: 0.5422
Epoch 490/512
23/23 [==============================] - 1s - loss: 0.7367 - acc: 0.7259 - val_loss: 1.6176 - val_acc: 0.5422
Epoch 491/512
23/23 [==============================] - 1s - loss: 0.8069 - acc: 0.7007 - val_loss: 1.3938 - val_acc: 0.5301
Epoch 492/512
23/23 [==============================] - 1s - loss: 0.6953 - acc: 0.7500 - val_loss: 1.6186 - val_acc: 0.5904
Epoch 493/512
23/23 [==============================] - 1s - loss: 0.7376 - acc: 0.7092 - val_loss: 1.3888 - val_acc: 0.6024
Epoch 494/512
23/23 [==============================] - 1s - loss: 0.7991 - acc: 0.6823 - val_loss: 1.2237 - val_acc: 0.5422
Epoch 495/512
23/23 [==============================] - 1s - loss: 0.6868 - acc: 0.7395 - val_loss: 1.4164 - val_acc: 0.5542
Epoch 496/512
23/23 [==============================] - 1s - loss: 0.7697 - acc: 0.6742 - val_loss: 1.5504 - val_acc: 0.5542
Epoch 497/512
23/23 [==============================] - 1s - loss: 0.7996 - acc: 0.7041 - val_loss: 1.7265 - val_acc: 0.5060
Epoch 498/512
23/23 [==============================] - 1s - loss: 0.7655 - acc: 0.7037 - val_loss: 1.6670 - val_acc: 0.5422
Epoch 499/512
23/23 [==============================] - 1s - loss: 0.6875 - acc: 0.7578 - val_loss: 1.3400 - val_acc: 0.5783
Epoch 500/512
23/23 [==============================] - 1s - loss: 0.7944 - acc: 0.6598 - val_loss: 1.4917 - val_acc: 0.5181
Epoch 501/512
23/23 [==============================] - 1s - loss: 0.7788 - acc: 0.7123 - val_loss: 1.4278 - val_acc: 0.5422
Epoch 502/512
23/23 [==============================] - 1s - loss: 0.8032 - acc: 0.6986 - val_loss: 1.5082 - val_acc: 0.5181
Epoch 503/512
23/23 [==============================] - 1s - loss: 0.7593 - acc: 0.7095 - val_loss: 1.4655 - val_acc: 0.5904
Epoch 504/512
23/23 [==============================] - 1s - loss: 0.7513 - acc: 0.7201 - val_loss: 1.7971 - val_acc: 0.5542
Epoch 505/512
23/23 [==============================] - 1s - loss: 0.7399 - acc: 0.7174 - val_loss: 1.2534 - val_acc: 0.5301
Epoch 506/512
23/23 [==============================] - 1s - loss: 0.7302 - acc: 0.7197 - val_loss: 1.5039 - val_acc: 0.5301
Epoch 507/512
23/23 [==============================] - 1s - loss: 0.7747 - acc: 0.7282 - val_loss: 1.3765 - val_acc: 0.5422
Epoch 508/512
23/23 [==============================] - 1s - loss: 0.7682 - acc: 0.6986 - val_loss: 1.3427 - val_acc: 0.5422
Epoch 509/512
23/23 [==============================] - 1s - loss: 0.8327 - acc: 0.6691 - val_loss: 1.4722 - val_acc: 0.5422
Epoch 510/512
23/23 [==============================] - 1s - loss: 0.7664 - acc: 0.7010 - val_loss: 1.2352 - val_acc: 0.5542
Epoch 511/512
23/23 [==============================] - 1s - loss: 0.7188 - acc: 0.7123 - val_loss: 1.5649 - val_acc: 0.5181
Epoch 512/512
23/23 [==============================] - 1s - loss: 0.7526 - acc: 0.7442 - val_loss: 1.3242 - val_acc: 0.5904
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
flatten_2 (Flatten)          (None, 12288)             0         
_________________________________________________________________
dense_3 (Dense)              (None, 60)                737340    
_________________________________________________________________
dense_4 (Dense)              (None, 5)                 305       
=================================================================
Total params: 737,645
Trainable params: 737,645
Non-trainable params: 0
_________________________________________________________________

In [ ]:


In [ ]: