In [12]:
from __future__ import print_function

import numpy as np
from keras.datasets import imdb
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM, Convolution1D, MaxPooling1D
from keras.layers import Dropout
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import MaxPooling1D
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from six.moves import cPickle as pickle
from keras.utils.np_utils import to_categorical
from keras.callbacks import ModelCheckpoint, EarlyStopping, LambdaCallback

# fix random seed for reproducibility
seed=7
np.random.seed(seed)
import tensorflow as tf
tf.set_random_seed(seed)

NAME="LSTM-10.2"
split_valid_test=False

In [13]:
# Dane wejściowe
with open("X-sequences.pickle", 'rb') as f:
    X = pickle.load(f)
with open("Y.pickle", 'rb') as f:
    Y = pickle.load(f)

In [14]:
np.unique(Y)


Out[14]:
array([  24,   37,  162,  231,  303,  325,  412,  573,  606,  665,  859,
        959, 1191, 1250, 1305, 1351, 1356, 1381, 1534, 1603, 1683, 1823,
       1833, 1903, 1983, 2153, 2183, 2234, 2344, 2584, 2655, 2687, 2694,
       2712, 2716, 2729, 2764, 2844, 2954, 4035, 4104, 4144, 4145, 4146])

W tej wersji eksperymentu, Y zawiera root_service - 44 unikalne kategorie główne. Zamieńmy je na liczby z przedziału 0-43


In [15]:
root_services=np.sort(np.unique(Y))
# skonstruuj odwrtotny indeks kategorii głównych
services_idx={root_services[i]: i for i in range(len(root_services))}

In [16]:
# Zamień 
Y=[services_idx[y] for y in Y]

In [17]:
Y=to_categorical(Y)
Y.shape


Out[17]:
(22450, 44)

In [18]:
top_words = 5000
classes=Y[0,].shape[0]
print(classes)


44

In [19]:
# max_length (98th percentile is 476), padd the rest
max_length=500
X=sequence.pad_sequences(X, maxlen=max_length)

slice in half even/odds to nulify time differencies

X_train=X[0:][::2] # even X_test=X[1:][::2] # odds

Y_train=np.array(Y[0:][::2]) # even Y_test=np.array(Y[1:][::2]) # odds

if split_valid_test:

# Split "test" in half for validation and final testing
X_valid=X_test[:len(X_test)/2]
Y_valid=Y_test[:len(Y_test)/2]
X_test=X_test[len(X_test)/2:]
Y_test=Y_test[len(Y_test)/2:]

else: X_valid=X_test Y_valid=Y_test


In [20]:
# create the model
embedding_vecor_length = 100
model = Sequential()
model.add(Embedding(top_words, embedding_vecor_length, input_length=max_length))
model.add(Dropout(0.2))
model.add(Conv1D(filters=64, kernel_size=3, padding='same', activation='relu'))
model.add(MaxPooling1D(pool_size=2))
model.add(Conv1D(filters=16, kernel_size=3, padding='same', activation='relu'))
model.add(MaxPooling1D(pool_size=2))
# model.add(Convolution1D(nb_filter=1024, filter_length=3, border_mode='same', activation='relu'))
# model.add(MaxPooling1D(pool_length=2))
# model.add(Convolution1D(nb_filter=512, filter_length=3, border_mode='same', activation='relu'))
# model.add(MaxPooling1D(pool_length=2))
model.add(LSTM(100))
model.add(Dropout(0.3))
model.add(Dense(classes, activation='softmax'))


model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

print(model.summary())


# Callbacks
early_stop_cb = EarlyStopping(monitor='val_loss', patience=20, verbose=1)

checkpoit_cb = ModelCheckpoint(NAME+".h5", save_best_only=True)

# Print the batch number at the beginning of every batch.
batch_print_cb = LambdaCallback(on_batch_begin=lambda batch, logs: print (".",end=''), 
                                on_epoch_end=lambda batch, logs: print (batch))

# Plot the loss after every epoch.
plot_loss_cb = LambdaCallback(on_epoch_end=lambda epoch, logs: 
                              print (epoch, logs))
                              #plt.plot(np.arange(epoch), logs['loss']))

print("done")


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
embedding_2 (Embedding)      (None, 500, 100)          500000    
_________________________________________________________________
dropout_3 (Dropout)          (None, 500, 100)          0         
_________________________________________________________________
conv1d_3 (Conv1D)            (None, 500, 64)           19264     
_________________________________________________________________
max_pooling1d_3 (MaxPooling1 (None, 250, 64)           0         
_________________________________________________________________
conv1d_4 (Conv1D)            (None, 250, 16)           3088      
_________________________________________________________________
max_pooling1d_4 (MaxPooling1 (None, 125, 16)           0         
_________________________________________________________________
lstm_2 (LSTM)                (None, 100)               46800     
_________________________________________________________________
dropout_4 (Dropout)          (None, 100)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 44)                4444      
=================================================================
Total params: 573,596
Trainable params: 573,596
Non-trainable params: 0
_________________________________________________________________
None
done

In [21]:
history = model.fit(
        X,#_train,
        Y,#_train,
        initial_epoch=200,
        epochs=300,
        batch_size=1024,
        #validation_data=(X_valid,Y_valid),
        validation_split=0.3,
        callbacks=[early_stop_cb, checkpoit_cb, batch_print_cb, plot_loss_cb],
        verbose=0
        )

#history=model.fit(X_train, Y_train, validation_data=(X_test, Y_test), nb_epoch=3, batch_size=512)


................200
200 {'acc': 0.165075728657198, 'loss': 3.5458040180378658, 'val_acc': 0.13479810133753367, 'val_loss': 2.8823333771948012}
................201
201 {'acc': 0.17697594531873292, 'loss': 2.8055852585292826, 'val_acc': 0.20353325504163666, 'val_loss': 2.6592358840616184}
................202
202 {'acc': 0.25353188245475816, 'loss': 2.6303187277809541, 'val_acc': 0.33165083312365334, 'val_loss': 2.3802558871742665}
................203
203 {'acc': 0.35859742941588746, 'loss': 2.3059678528620511, 'val_acc': 0.34619952748903471, 'val_loss': 2.1538395213401231}
................204
204 {'acc': 0.38888888915819841, 'loss': 2.1314054335045181, 'val_acc': 0.38494655950052437, 'val_loss': 2.0882845434610182}
................205
205 {'acc': 0.43133511583253115, 'loss': 2.018911347401966, 'val_acc': 0.42102138064536232, 'val_loss': 1.9680861112907211}
................206
206 {'acc': 0.46639938880669884, 'loss': 1.8828499963208587, 'val_acc': 0.45917458467698719, 'val_loss': 1.8555228761142903}
................207
207 {'acc': 0.50655466525991921, 'loss': 1.7517474962582611, 'val_acc': 0.48960807813318213, 'val_loss': 1.774305235461781}
................208
208 {'acc': 0.53792796207624372, 'loss': 1.6361215626054006, 'val_acc': 0.52256532477086626, 'val_loss': 1.6773280383855316}
................209
209 {'acc': 0.57038309792002395, 'loss': 1.5396732256523871, 'val_acc': 0.55700712674020869, 'val_loss': 1.6119259196618956}
................210
210 {'acc': 0.58966526731638202, 'loss': 1.4757088192155285, 'val_acc': 0.54468527882229401, 'val_loss': 1.5707844875204309}
................211
211 {'acc': 0.60143820866534647, 'loss': 1.4198600781150519, 'val_acc': 0.55997625141981944, 'val_loss': 1.5603766373387424}
................212
212 {'acc': 0.61092019943664799, 'loss': 1.3691614082907393, 'val_acc': 0.57586104951958195, 'val_loss': 1.5178298463164486}
................213
213 {'acc': 0.62453862705712593, 'loss': 1.331091906378951, 'val_acc': 0.58090855602413916, 'val_loss': 1.4930750802690229}
................214
214 {'acc': 0.64063891987316901, 'loss': 1.2769941615742764, 'val_acc': 0.59501188002402883, 'val_loss': 1.4679794203923604}
................215
215 {'acc': 0.64019345678839501, 'loss': 1.279364295871837, 'val_acc': 0.59575415931801334, 'val_loss': 1.4607607066772896}
................216
216 {'acc': 0.64961181135901036, 'loss': 1.2481373560797309, 'val_acc': 0.61045130981119111, 'val_loss': 1.4500623559158941}
................217
217 {'acc': 0.66456662975104697, 'loss': 1.2065979612834037, 'val_acc': 0.61653800361796396, 'val_loss': 1.4163429349165348}
................218
218 {'acc': 0.68015782223473653, 'loss': 1.1653752361558329, 'val_acc': 0.62039786506435368, 'val_loss': 1.3908396571379094}
................219
219 {'acc': 0.68340333526608177, 'loss': 1.1349280502287291, 'val_acc': 0.62752375452648712, 'val_loss': 1.3840895574053222}
................220
220 {'acc': 0.69842178924432374, 'loss': 1.0850930643078631, 'val_acc': 0.63687648555162113, 'val_loss': 1.3571820581893739}
................221
221 {'acc': 0.70472190431530757, 'loss': 1.0569696536087816, 'val_acc': 0.63182898074600868, 'val_loss': 1.3581531551841319}
................222
222 {'acc': 0.71757668387242002, 'loss': 1.0233106005354147, 'val_acc': 0.64400238025216761, 'val_loss': 1.3391882614398511}
................223
223 {'acc': 0.72438589778886553, 'loss': 0.99561907192276111, 'val_acc': 0.64088479965712852, 'val_loss': 1.3473791651374654}
................224
224 {'acc': 0.73354970166985733, 'loss': 0.96440625234734734, 'val_acc': 0.64608076037816931, 'val_loss': 1.3310992862153224}
................225
225 {'acc': 0.73132238724736354, 'loss': 0.9651203536040549, 'val_acc': 0.64637767022692394, 'val_loss': 1.3253620101267256}
................226
226 {'acc': 0.74112256690414102, 'loss': 0.92521859316845889, 'val_acc': 0.65068290168485843, 'val_loss': 1.3138973973426002}
................227
227 {'acc': 0.74977727013415962, 'loss': 0.89770379347307805, 'val_acc': 0.64133017080130317, 'val_loss': 1.3293599046041167}
................228
228 {'acc': 0.75537737277350048, 'loss': 0.87809934562378689, 'val_acc': 0.64474465883825849, 'val_loss': 1.3294087656886446}
................229
229 {'acc': 0.75945017250406233, 'loss': 0.8602143170285258, 'val_acc': 0.6584026119771309, 'val_loss': 1.3140644392887939}
................230
230 {'acc': 0.76454117432553392, 'loss': 0.83310421238847399, 'val_acc': 0.65676960002215057, 'val_loss': 1.3134068579118778}
................231
231 {'acc': 0.7707776530516941, 'loss': 0.8083248121965716, 'val_acc': 0.6543942984379385, 'val_loss': 1.3095497733906725}
................232
232 {'acc': 0.77275041415974488, 'loss': 0.80004485345029397, 'val_acc': 0.65869952635640483, 'val_loss': 1.3210947898674463}
................233
233 {'acc': 0.78032327813472213, 'loss': 0.79069864113427646, 'val_acc': 0.66330166115047251, 'val_loss': 1.3158335108088768}
................234
234 {'acc': 0.78146875476630817, 'loss': 0.76296940557796167, 'val_acc': 0.66448931385389132, 'val_loss': 1.2997980732249534}
................235
235 {'acc': 0.78789614424900323, 'loss': 0.74909514426972645, 'val_acc': 0.66255938383859014, 'val_loss': 1.2902322126963925}
................236
236 {'acc': 0.79107802047276754, 'loss': 0.72880069237257883, 'val_acc': 0.66419239919146, 'val_loss': 1.3053985771827064}
................237
237 {'acc': 0.79705994678724046, 'loss': 0.71588210500013405, 'val_acc': 0.67116983387079487, 'val_loss': 1.2823740127817185}
................238
238 {'acc': 0.79744177163823105, 'loss': 0.71637884556556819, 'val_acc': 0.65350356676799382, 'val_loss': 1.3463620508085237}
................239
239 {'acc': 0.78465063115696854, 'loss': 0.77418609763642254, 'val_acc': 0.65513658452770207, 'val_loss': 1.3429184979327784}
................240
240 {'acc': 0.78993254554048664, 'loss': 0.7484082415295995, 'val_acc': 0.65988717537877684, 'val_loss': 1.3455745811417097}
................241
241 {'acc': 0.7987781599518663, 'loss': 0.71774095276377048, 'val_acc': 0.67295130768751021, 'val_loss': 1.2825818713090764}
................242
242 {'acc': 0.80934198827890402, 'loss': 0.67623579713001614, 'val_acc': 0.67250594588753165, 'val_loss': 1.2777839751821232}
................243
243 {'acc': 0.80966017615541752, 'loss': 0.67725800125141, 'val_acc': 0.67814727378854278, 'val_loss': 1.2952110648438371}
................244
244 {'acc': 0.81233295122752047, 'loss': 0.65956390046642466, 'val_acc': 0.67265440024559398, 'val_loss': 1.2907976962608283}
................245
245 {'acc': 0.81895125419777648, 'loss': 0.63890712582398967, 'val_acc': 0.67220903320720249, 'val_loss': 1.3171755752201035}
................246
246 {'acc': 0.82226040659324651, 'loss': 0.61798319468990426, 'val_acc': 0.67651425942672405, 'val_loss': 1.2860366397685505}
................247
247 {'acc': 0.83046964615435492, 'loss': 0.59237437336533161, 'val_acc': 0.67606888714991953, 'val_loss': 1.3178859968932961}
................248
248 {'acc': 0.83619702211299651, 'loss': 0.58139431104870021, 'val_acc': 0.67666271491741625, 'val_loss': 1.3201212084491576}
................249
249 {'acc': 0.83307878431267024, 'loss': 0.57293783326450609, 'val_acc': 0.67636579232657612, 'val_loss': 1.2992377906966945}
................250
250 {'acc': 0.84338806134318978, 'loss': 0.54850801983695308, 'val_acc': 0.6744358706644199, 'val_loss': 1.3414553752138043}
................251
251 {'acc': 0.84135166156135688, 'loss': 0.55530419014512822, 'val_acc': 0.67324821512942645, 'val_loss': 1.3397090848437965}
................252
252 {'acc': 0.84586992741875233, 'loss': 0.53767105055110243, 'val_acc': 0.67072446754029702, 'val_loss': 1.3600873318533999}
................253
253 {'acc': 0.84835178973915404, 'loss': 0.53175683138069185, 'val_acc': 0.67384204105639967, 'val_loss': 1.3584392263317335}
................254
254 {'acc': 0.85038818878512723, 'loss': 0.52192820519667871, 'val_acc': 0.67606888417676625, 'val_loss': 1.3353670187064417}
................255
255 {'acc': 0.85605192965825161, 'loss': 0.50225578448207109, 'val_acc': 0.68022565264033596, 'val_loss': 1.3446310727443378}
................256
256 {'acc': 0.85796105464906414, 'loss': 0.48981410530485298, 'val_acc': 0.67369358896359699, 'val_loss': 1.3639415055159436}
................257
257 {'acc': 0.85891561701929842, 'loss': 0.48451248165212912, 'val_acc': 0.67948336938214704, 'val_loss': 1.3566647363105466}
................258
258 {'acc': 0.86298841995881592, 'loss': 0.47355776407707428, 'val_acc': 0.67933491403303348, 'val_loss': 1.3721975742213233}
................259
259 {'acc': 0.86648848384668065, 'loss': 0.46750098172529242, 'val_acc': 0.67785035855979647, 'val_loss': 1.3797217225235601}
................260
260 {'acc': 0.86731577207830646, 'loss': 0.45923739106157163, 'val_acc': 0.67918646307286057, 'val_loss': 1.3853099448097574}
................261
261 {'acc': 0.86954308316287943, 'loss': 0.4508982821170559, 'val_acc': 0.67814727138170439, 'val_loss': 1.3899148637495244}
................262
262 {'acc': 0.86877943308158911, 'loss': 0.43879385003306809, 'val_acc': 0.67295130655488045, 'val_loss': 1.4145359715486649}
................263
263 {'acc': 0.87329769776312616, 'loss': 0.43082324696797913, 'val_acc': 0.67592043236712096, 'val_loss': 1.405419529758553}
Epoch 00263: early stopping

In [22]:
import matplotlib.pyplot as plt
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='lower right')
plt.show()


# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper right')
# plt.title('model loss (log scale)')
# plt.yscale('log')
plt.show()



In [25]:
history2 = model.fit(
        X,#_train,
        Y,#_train,
        initial_epoch=10000,    
        epochs=10050,
        batch_size=1024,
        #validation_data=(X_valid,Y_valid),
        validation_split=0.3,
        callbacks=[early_stop_cb, checkpoit_cb, batch_print_cb, plot_loss_cb],
        verbose=0
        )


................10000
10000 {'acc': 0.92064401264999063, 'loss': 0.26861639642694213, 'val_acc': 0.68497624929613854, 'val_loss': 1.5715816304123034}
................10001
10001 {'acc': 0.9205803749624123, 'loss': 0.26326394750217119, 'val_acc': 0.67933491856355277, 'val_loss': 1.5882249127657566}
................10002
10002 {'acc': 0.92427135141232364, 'loss': 0.26034148080864528, 'val_acc': 0.68007719601701389, 'val_loss': 1.637434688146777}
................10003
10003 {'acc': 0.92408043939648232, 'loss': 0.25239949219096658, 'val_acc': 0.6796318329428267, 'val_loss': 1.5912886883470621}
................10004
10004 {'acc': 0.9240804387668291, 'loss': 0.25327368379701287, 'val_acc': 0.68245249859227708, 'val_loss': 1.5947540831395963}
................10005
10005 {'acc': 0.92522591367635132, 'loss': 0.25923121294114099, 'val_acc': 0.68230404140264012, 'val_loss': 1.6093536255761733}
................10006
10006 {'acc': 0.92821687618496918, 'loss': 0.24091740912785434, 'val_acc': 0.67562351374048524, 'val_loss': 1.6638663132230347}
................10007
10007 {'acc': 0.92936235246000454, 'loss': 0.24489544117528092, 'val_acc': 0.6748812328891346, 'val_loss': 1.6582245096174952}
................10008
10008 {'acc': 0.92662594066376869, 'loss': 0.24549305280444123, 'val_acc': 0.67324822390730765, 'val_loss': 1.6381649489640624}
................10009
10009 {'acc': 0.92904416494004161, 'loss': 0.23716425490968021, 'val_acc': 0.67651425121515774, 'val_loss': 1.6867410428733554}
................10010
10010 {'acc': 0.93228967706863097, 'loss': 0.23107435513016103, 'val_acc': 0.67651425772777929, 'val_loss': 1.6703816915634409}
................10011
10011 {'acc': 0.93076237745225554, 'loss': 0.22798666790027236, 'val_acc': 0.67354513658763682, 'val_loss': 1.667796024517322}
................10012
10012 {'acc': 0.93222604240035378, 'loss': 0.23229501458908805, 'val_acc': 0.67488123472965811, 'val_loss': 1.6666616413768849}
................10013
10013 {'acc': 0.93050782827228262, 'loss': 0.23377846980786771, 'val_acc': 0.67592042585449941, 'val_loss': 1.6864209605509199}
................10014
10014 {'acc': 0.93419880617115503, 'loss': 0.221667548320335, 'val_acc': 0.67488123232281971, 'val_loss': 1.662007920815656}
................10015
10015 {'acc': 0.93318060326093755, 'loss': 0.22516229257330847, 'val_acc': 0.67681116814284881, 'val_loss': 1.6718788979455581}
................10016
10016 {'acc': 0.93178057931557978, 'loss': 0.22527749946254863, 'val_acc': 0.67636579685709541, 'val_loss': 1.6999002879135965}
................10017
10017 {'acc': 0.93228967668932172, 'loss': 0.22078372375555769, 'val_acc': 0.67933491629829312, 'val_loss': 1.6728854332287351}
................10018
10018 {'acc': 0.93451699100560903, 'loss': 0.21380926247041795, 'val_acc': 0.67948337419582372, 'val_loss': 1.6941316263692678}
................10019
10019 {'acc': 0.93680794273637191, 'loss': 0.21304423475145642, 'val_acc': 0.67755344885262059, 'val_loss': 1.7063790353063732}
................10020
10020 {'acc': 0.94228076880193934, 'loss': 0.19948936451880306, 'val_acc': 0.67458432445616734, 'val_loss': 1.7391051130453368}
................10021
10021 {'acc': 0.94177166809027657, 'loss': 0.19766754143729956, 'val_acc': 0.67532660700646263, 'val_loss': 1.7820373023207567}
Epoch 10021: early stopping

In [15]:
score=model.evaluate(X_test,Y_test, verbose=0)
print("OOS %s: %.2f%%" % (model.metrics_names[1], score[1]*100))
print("OOS %s: %.2f" % (model.metrics_names[0], score[0]))



NameErrorTraceback (most recent call last)
<ipython-input-15-e678b5269ea6> in <module>()
----> 1 score=model.evaluate(X_test,Y_test, verbose=0)
      2 print("OOS %s: %.2f%%" % (model.metrics_names[1], score[1]*100))
      3 print("OOS %s: %.2f" % (model.metrics_names[0], score[0]))

NameError: name 'X_test' is not defined

In [26]:
import matplotlib.pyplot as plt
# summarize history for accuracy
plt.plot(history2.history['acc'])
plt.plot(history2.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='lower right')
plt.show()


# summarize history for loss
plt.plot(history2.history['loss'])
plt.plot(history2.history['val_loss'])
plt.title('model loss (log scale)')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper right')
plt.yscale('log')
plt.show()



In [15]:
history3 = model.fit(
        X,#_train,
        Y,#_train,
        initial_epoch=60,    
        epochs=90,
        batch_size=1024,
        #validation_data=(X_valid,Y_valid),
        validation_split=0.3,
        callbacks=[early_stop_cb, checkpoit_cb, batch_print_cb, plot_loss_cb],
        verbose=0
        )


................60
60 {'acc': 0.69396716357036081, 'loss': 1.0682002606852541, 'val_acc': 0.61906176239181299, 'val_loss': 1.4453319599962575}
................61
61 {'acc': 0.7037037048871484, 'loss': 1.0332637657505632, 'val_acc': 0.61564727237275552, 'val_loss': 1.4414969362725554}
................62
62 {'acc': 0.70701285371711209, 'loss': 1.0041220267947522, 'val_acc': 0.62232779267281646, 'val_loss': 1.4268438312050282}
................63
63 {'acc': 0.71197658232548955, 'loss': 0.99611397675313584, 'val_acc': 0.62262470591946051, 'val_loss': 1.4255109716764256}
................64
64 {'acc': 0.71834033401439978, 'loss': 0.97293814648437305, 'val_acc': 0.62069477901889136, 'val_loss': 1.4260232516535671}
................65
65 {'acc': 0.72184039811467771, 'loss': 0.97190722480626812, 'val_acc': 0.61935867436424852, 'val_loss': 1.4186554088162131}
................66
66 {'acc': 0.72578592269766862, 'loss': 0.94906096593772571, 'val_acc': 0.62024940674208684, 'val_loss': 1.4094102914995932}
................67
67 {'acc': 0.69625811521767567, 'loss': 1.0645167365607617, 'val_acc': 0.60243468216649143, 'val_loss': 1.4889457426841355}
................68
68 {'acc': 0.69625811337423305, 'loss': 1.0706597206716062, 'val_acc': 0.60599762640203259, 'val_loss': 1.4689711890141357}
................69
69 {'acc': 0.69950362780902231, 'loss': 1.0387265352221471, 'val_acc': 0.60881829502463625, 'val_loss': 1.4538306377279504}
................70
70 {'acc': 0.71000381935123758, 'loss': 1.0077977372259679, 'val_acc': 0.61624109844130748, 'val_loss': 1.4287678232668697}
................71
71 {'acc': 0.70134911596949534, 'loss': 1.0318226642518822, 'val_acc': 0.60213777005247715, 'val_loss': 1.4894028610401653}
................72
72 {'acc': 0.68760341133524538, 'loss': 1.099111608152886, 'val_acc': 0.60733373091509679, 'val_loss': 1.479072934658114}
................73
73 {'acc': 0.69142166257617699, 'loss': 1.0872183346605733, 'val_acc': 0.60213776608827274, 'val_loss': 1.4834875916075536}
................74
74 {'acc': 0.69994909045379761, 'loss': 1.058706705785972, 'val_acc': 0.60881829021095946, 'val_loss': 1.4884105396950329}
................75
75 {'acc': 0.70306732959687834, 'loss': 1.037833027186035, 'val_acc': 0.61564726826697236, 'val_loss': 1.4449179574599458}
................76
76 {'acc': 0.70364006627405573, 'loss': 1.0262630202470731, 'val_acc': 0.61861639280500436, 'val_loss': 1.4351270128032658}
................77
77 {'acc': 0.70949471811612808, 'loss': 1.0033399172831814, 'val_acc': 0.61831947729310066, 'val_loss': 1.4346362315560746}
................78
78 {'acc': 0.71541300888232673, 'loss': 0.98309921032602254, 'val_acc': 0.62054632282030553, 'val_loss': 1.4234220100978208}
................79
79 {'acc': 0.71961308407149305, 'loss': 0.97791410750281926, 'val_acc': 0.62277315843699943, 'val_loss': 1.434774916981858}
................80
80 {'acc': 0.71840397145163393, 'loss': 0.98115001725258377, 'val_acc': 0.61460808280528012, 'val_loss': 1.4496738825727811}
................81
81 {'acc': 0.7203767358520885, 'loss': 0.95888457092541513, 'val_acc': 0.61980404253526988, 'val_loss': 1.4341611703614441}
................82
82 {'acc': 0.72572228624663826, 'loss': 0.95314105417007133, 'val_acc': 0.6193586749305634, 'val_loss': 1.4350798064909185}
................83
83 {'acc': 0.72871324873249754, 'loss': 0.93137366312430436, 'val_acc': 0.6227731577291058, 'val_loss': 1.4264792727744495}
................84
84 {'acc': 0.73036782408057999, 'loss': 0.93253052497890232, 'val_acc': 0.62470309694702442, 'val_loss': 1.43061292313057}
................85
85 {'acc': 0.7329133257271756, 'loss': 0.90921011269358076, 'val_acc': 0.62425771943180697, 'val_loss': 1.4309742467703559}
................86
86 {'acc': 0.73475881330351256, 'loss': 0.90705162541654194, 'val_acc': 0.62381236003866669, 'val_loss': 1.4259913146071084}
................87
87 {'acc': 0.73711340282806026, 'loss': 0.91535873602437079, 'val_acc': 0.62841448846169157, 'val_loss': 1.4194937236518588}
Epoch 00087: early stopping

In [16]:
import matplotlib.pyplot as plt
# summarize history for accuracy
plt.plot(history3.history['acc'])
plt.plot(history3.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='lower right')
plt.show()


# summarize history for loss
plt.plot(history3.history['loss'])
plt.plot(history3.history['val_loss'])
plt.title('model loss (log scale)')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper right')
plt.yscale('log')
plt.show()



In [ ]: