In [1]:
from __future__ import print_function

import numpy as np
from keras.datasets import imdb
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM, Convolution1D, MaxPooling1D
from keras.layers import Dropout
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from six.moves import cPickle as pickle
from keras.utils.np_utils import to_categorical
from keras.callbacks import ModelCheckpoint, EarlyStopping, LambdaCallback

# fix random seed for reproducibility
np.random.seed(7)

NAME="LSTM-10.1"
split_valid_test=False


Using TensorFlow backend.

In [2]:
# Dane wejściowe
with open("X-sequences.pickle", 'rb') as f:
    X = pickle.load(f)
with open("Y.pickle", 'rb') as f:
    Y = pickle.load(f)

In [3]:
np.unique(Y)


Out[3]:
array([  24,   37,  162,  231,  303,  325,  412,  573,  606,  665,  859,
        959, 1191, 1250, 1305, 1351, 1356, 1381, 1534, 1603, 1683, 1823,
       1833, 1903, 1983, 2153, 2183, 2234, 2344, 2584, 2655, 2687, 2694,
       2712, 2716, 2729, 2764, 2844, 2954, 4035, 4104, 4144, 4145, 4146])

W tej wersji eksperymentu, Y zawiera root_service - 44 unikalne kategorie główne. Zamieńmy je na liczby z przedziału 0-43


In [4]:
root_services=np.sort(np.unique(Y))
# skonstruuj odwrtotny indeks kategorii głównych
services_idx={root_services[i]: i for i in range(len(root_services))}

In [5]:
# Zamień 
Y=[services_idx[y] for y in Y]

In [6]:
Y=to_categorical(Y)
Y.shape


Out[6]:
(22450, 44)

In [7]:
top_words = 6000
classes=Y[0,].shape[0]
print(classes)


44

In [8]:
# max_length (98th percentile is 476), padd the rest
max_length=500
X=sequence.pad_sequences(X, maxlen=max_length)

In [9]:
# slice in half even/odds to nulify time differencies
X_train=X[0:][::2] # even
X_test=X[1:][::2] # odds

Y_train=np.array(Y[0:][::2]) # even
Y_test=np.array(Y[1:][::2]) # odds

if split_valid_test:
    # Split "test" in half for validation and final testing
    X_valid=X_test[:len(X_test)/2]
    Y_valid=Y_test[:len(Y_test)/2]
    X_test=X_test[len(X_test)/2:]
    Y_test=Y_test[len(Y_test)/2:]
else:
    X_valid=X_test
    Y_valid=Y_test

In [9]:
# create the model
embedding_vecor_length = 128
model = Sequential()
model.add(Embedding(top_words, embedding_vecor_length, input_length=max_length))
model.add(Dropout(0.25))
# model.add(Convolution1D(nb_filter=1024, filter_length=3, border_mode='same', activation='relu'))
# model.add(MaxPooling1D(pool_length=2))
# model.add(Convolution1D(nb_filter=512, filter_length=3, border_mode='same', activation='relu'))
# model.add(MaxPooling1D(pool_length=2))
model.add(LSTM(100))
model.add(Dropout(0.3))
model.add(Dense(classes, activation='softmax'))


model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

print(model.summary())


# Callbacks
early_stop_cb = EarlyStopping(monitor='val_loss', patience=20, verbose=1)

checkpoit_cb = ModelCheckpoint(NAME+".h5", save_best_only=True)

# Print the batch number at the beginning of every batch.
batch_print_cb = LambdaCallback(on_batch_begin=lambda batch, logs: print (".",end=''), 
                                on_epoch_end=lambda batch, logs: print (batch))

# Plot the loss after every epoch.
plot_loss_cb = LambdaCallback(on_epoch_end=lambda epoch, logs: 
                              print (epoch, logs))
                              #plt.plot(np.arange(epoch), logs['loss']))

print("done")


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
embedding_1 (Embedding)      (None, 500, 128)          768000    
_________________________________________________________________
dropout_1 (Dropout)          (None, 500, 128)          0         
_________________________________________________________________
lstm_1 (LSTM)                (None, 100)               91600     
_________________________________________________________________
dropout_2 (Dropout)          (None, 100)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 44)                4444      
=================================================================
Total params: 864,044
Trainable params: 864,044
Non-trainable params: 0
_________________________________________________________________
None
done

In [10]:
history = model.fit(
        X,#_train,
        Y,#_train,
        epochs=50,
        batch_size=1024,
        #validation_data=(X_valid,Y_valid),
        validation_split=0.3,
        callbacks=[early_stop_cb, checkpoit_cb, batch_print_cb, plot_loss_cb],
        verbose=0
        )

#history=model.fit(X_train, Y_train, validation_data=(X_test, Y_test), nb_epoch=3, batch_size=512)


................0
0 {'acc': 0.30603283721860225, 'loss': 3.6439972524957316, 'val_acc': 0.13479810068273204, 'val_loss': 3.100206742094135}
................1
1 {'acc': 0.32779686858639762, 'loss': 2.6346146135486781, 'val_acc': 0.38732185365185318, 'val_loss': 2.2789429213825145}
................2
2 {'acc': 0.40728013307914429, 'loss': 2.1687355504261108, 'val_acc': 0.38539192200839661, 'val_loss': 2.0784195471829303}
................3
3 {'acc': 0.46900852665398068, 'loss': 1.9759234166470014, 'val_acc': 0.48678740965215722, 'val_loss': 1.9026343896100186}
................4
4 {'acc': 0.53073692289156749, 'loss': 1.7824167102230131, 'val_acc': 0.54349762456150918, 'val_loss': 1.7133478138622364}
................5
5 {'acc': 0.55638284274028793, 'loss': 1.6503025617735432, 'val_acc': 0.54453681823476763, 'val_loss': 1.6734852479359317}
................6
6 {'acc': 0.58845615415031771, 'loss': 1.5389786299151653, 'val_acc': 0.55671021575882429, 'val_loss': 1.6121934321034266}
................7
7 {'acc': 0.6035382464685497, 'loss': 1.4537964852632754, 'val_acc': 0.57304037863171864, 'val_loss': 1.5656489091361221}
................8
8 {'acc': 0.61900216267432695, 'loss': 1.3920927946388806, 'val_acc': 0.57912708149953018, 'val_loss': 1.5226502741317567}
................9
9 {'acc': 0.63077510517639135, 'loss': 1.3398138247966098, 'val_acc': 0.5858076077458978, 'val_loss': 1.5062542556300582}
................10
10 {'acc': 0.60805650989078031, 'loss': 1.4918328193636876, 'val_acc': 0.55136579784814654, 'val_loss': 1.658816452547377}
................11
11 {'acc': 0.62173857732296778, 'loss': 1.3761088819438239, 'val_acc': 0.58120546233342552, 'val_loss': 1.5436299192650584}
................12
12 {'acc': 0.64751177240270719, 'loss': 1.2899549771799634, 'val_acc': 0.59991093103789372, 'val_loss': 1.49999315891583}
................13
13 {'acc': 0.66450299061450757, 'loss': 1.2304142485202625, 'val_acc': 0.59694181003933, 'val_loss': 1.5152508062874619}
................14
14 {'acc': 0.66851215469058844, 'loss': 1.207780800946602, 'val_acc': 0.59144893295691303, 'val_loss': 1.5373933660729198}
................15
15 {'acc': 0.67983963419132687, 'loss': 1.1753826925161914, 'val_acc': 0.60881829205148297, 'val_loss': 1.4544912200075997}
................16
16 {'acc': 0.67633956998484246, 'loss': 1.186407794627097, 'val_acc': 0.60258313963928589, 'val_loss': 1.4931553995807494}
................17
17 {'acc': 0.68340333486401394, 'loss': 1.1587045634841289, 'val_acc': 0.61208432530563972, 'val_loss': 1.4764266591740334}
................18
18 {'acc': 0.69899452862976863, 'loss': 1.1307497662685899, 'val_acc': 0.61193586924863264, 'val_loss': 1.4735729790639991}
................19
19 {'acc': 0.7150311831058217, 'loss': 1.0822186521010893, 'val_acc': 0.62188242676705474, 'val_loss': 1.4285404070539316}
................20
20 {'acc': 0.72114038566027616, 'loss': 1.0445577932766592, 'val_acc': 0.62277316310909747, 'val_loss': 1.414091370168038}
................21
21 {'acc': 0.7173221337062432, 'loss': 1.0452130673358622, 'val_acc': 0.62559382762591798, 'val_loss': 1.4464855590512238}
................22
22 {'acc': 0.7301769138702503, 'loss': 0.9987807585960341, 'val_acc': 0.63450119048003062, 'val_loss': 1.4086137559804668}
................23
23 {'acc': 0.7448771797216347, 'loss': 0.95722388807744396, 'val_acc': 0.64133016612920513, 'val_loss': 1.403769628452292}
................24
24 {'acc': 0.74144075484134409, 'loss': 0.97495902058003103, 'val_acc': 0.62321852844854431, 'val_loss': 1.4538459602274407}
................25
25 {'acc': 0.74430444318859479, 'loss': 0.95270469323875973, 'val_acc': 0.64697149572916102, 'val_loss': 1.4061093319057003}
................26
26 {'acc': 0.75645920867544858, 'loss': 0.91069001517739478, 'val_acc': 0.64429928769408384, 'val_loss': 1.3849376895931724}
................27
27 {'acc': 0.76333206252498265, 'loss': 0.89032154022107313, 'val_acc': 0.64697150252493996, 'val_loss': 1.3958414923266957}
................28
28 {'acc': 0.76511391157167508, 'loss': 0.87954527692696483, 'val_acc': 0.64281472669927642, 'val_loss': 1.4062913845950238}
................29
29 {'acc': 0.75047728170856376, 'loss': 0.92649942440007715, 'val_acc': 0.63613420696553025, 'val_loss': 1.4315065881701943}
................30
30 {'acc': 0.76575028913021392, 'loss': 0.90054749353128916, 'val_acc': 0.64578384656521037, 'val_loss': 1.3932504549162406}
................31
31 {'acc': 0.76899579890708636, 'loss': 0.86242958117004753, 'val_acc': 0.63821259232994498, 'val_loss': 1.3950959693224583}
................32
32 {'acc': 0.75773195961253914, 'loss': 0.89447888279440624, 'val_acc': 0.62782066918891855, 'val_loss': 1.4261296848786416}
................33
33 {'acc': 0.76670485040803771, 'loss': 0.87483850608151792, 'val_acc': 0.64608076561658234, 'val_loss': 1.4383961007600725}
................34
34 {'acc': 0.78471427012661177, 'loss': 0.80482349152083732, 'val_acc': 0.64845605743186208, 'val_loss': 1.3873902329922856}
................35
35 {'acc': 0.79438717198824027, 'loss': 0.76298944814614522, 'val_acc': 0.65780878718278757, 'val_loss': 1.3954149742307687}
................36
36 {'acc': 0.79693267300518256, 'loss': 0.75247768919871816, 'val_acc': 0.66122327649395141, 'val_loss': 1.3608127478465899}
................37
37 {'acc': 0.79954181028350069, 'loss': 0.74240863644865851, 'val_acc': 0.64934679660547945, 'val_loss': 1.4178781045021185}
................38
38 {'acc': 0.78993254447083472, 'loss': 0.77897155388812311, 'val_acc': 0.64964370886107237, 'val_loss': 1.3815580580976401}
................39
39 {'acc': 0.79648720828179287, 'loss': 0.76012375708434732, 'val_acc': 0.65840261834817371, 'val_loss': 1.3682765909725016}
................40
40 {'acc': 0.80654193764199, 'loss': 0.72761132553422725, 'val_acc': 0.65513658282875731, 'val_loss': 1.4244802000970003}
................41
41 {'acc': 0.76536846245095314, 'loss': 0.8874871870541946, 'val_acc': 0.6566211433988286, 'val_loss': 1.3657507381076768}
................42
42 {'acc': 0.78681430748981629, 'loss': 0.80266669065241936, 'val_acc': 0.65454275987493737, 'val_loss': 1.373937786050194}
................43
43 {'acc': 0.79839633417536138, 'loss': 0.74914528870075825, 'val_acc': 0.6637470345599068, 'val_loss': 1.3428185643039803}
................44
44 {'acc': 0.80978745218298587, 'loss': 0.71421374035227525, 'val_acc': 0.66330166525625556, 'val_loss': 1.3665423481028323}
................45
45 {'acc': 0.81029655245464993, 'loss': 0.70341562651277001, 'val_acc': 0.66137173538253313, 'val_loss': 1.3788899050189309}
................46
46 {'acc': 0.81653302956495299, 'loss': 0.68056199859993227, 'val_acc': 0.6594418097561725, 'val_loss': 1.3837075290090963}
................47
47 {'acc': 0.8158966522795168, 'loss': 0.68275850585691344, 'val_acc': 0.65780878661647257, 'val_loss': 1.3704465800396337}
................48
48 {'acc': 0.81977854402250072, 'loss': 0.6727982569669394, 'val_acc': 0.66077791469397285, 'val_loss': 1.4142298726741038}
................49
49 {'acc': 0.82416953114406044, 'loss': 0.6525786030727958, 'val_acc': 0.65736342071071086, 'val_loss': 1.3973997355640075}

In [13]:
import matplotlib.pyplot as plt
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='lower right')
plt.show()


# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss (log scale)')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper right')
#plt.yscale('log')
plt.show()



In [12]:
history2 = model.fit(
        X,#_train,
        Y,#_train,
        initial_epoch=10,    
        epochs=50,
        batch_size=1024,
        #validation_data=(X_valid,Y_valid),
        validation_split=0.3,
        callbacks=[early_stop_cb, checkpoit_cb, batch_print_cb, plot_loss_cb],
        verbose=0
        )


................10
10 {'acc': 0.62479317930191702, 'loss': 1.325670536562461, 'val_acc': 0.56873516048218464, 'val_loss': 1.5279373791325404}
................11
11 {'acc': 0.633193331591968, 'loss': 1.2977991939516518, 'val_acc': 0.56680522834320246, 'val_loss': 1.5148598394597794}
................12
12 {'acc': 0.64019345678839501, 'loss': 1.2726139246818142, 'val_acc': 0.57437648838319577, 'val_loss': 1.5090631645818788}
................13
13 {'acc': 0.64222985826953305, 'loss': 1.2672030341766316, 'val_acc': 0.57407958136601578, 'val_loss': 1.509736040992012}
................14
14 {'acc': 0.65018454890101252, 'loss': 1.247774106348321, 'val_acc': 0.57986935782036131, 'val_loss': 1.4961168780179601}
................15
15 {'acc': 0.65336642468477824, 'loss': 1.221655653089577, 'val_acc': 0.59040974055786311, 'val_loss': 1.4965298113517127}
................16
16 {'acc': 0.63828433204792645, 'loss': 1.2865681063056189, 'val_acc': 0.54572447122134393, 'val_loss': 1.6003667051038946}
................17
17 {'acc': 0.60729286073500444, 'loss': 1.3962189051673144, 'val_acc': 0.57051662948522319, 'val_loss': 1.5516907653446153}
................18
18 {'acc': 0.6249840917805155, 'loss': 1.3293267184577098, 'val_acc': 0.57690023880389973, 'val_loss': 1.5198897114842069}
................19
19 {'acc': 0.63669339470604203, 'loss': 1.2846117633527299, 'val_acc': 0.58120546445710641, 'val_loss': 1.5150359996424152}
................20
20 {'acc': 0.64413898533895997, 'loss': 1.2487637818549993, 'val_acc': 0.58521378210208197, 'val_loss': 1.4990840861463206}
................21
21 {'acc': 0.65273004997864437, 'loss': 1.2265102388720119, 'val_acc': 0.58120546743025969, 'val_loss': 1.5004209033667051}
................22
22 {'acc': 0.65724831308984144, 'loss': 1.2052237513533557, 'val_acc': 0.58090855121046236, 'val_loss': 1.4994009169716733}
................23
23 {'acc': 0.65705740182503225, 'loss': 1.1994872772467566, 'val_acc': 0.59055819307540203, 'val_loss': 1.4857579747175094}
................24
24 {'acc': 0.66653939251288574, 'loss': 1.1744535021522777, 'val_acc': 0.5884798164888686, 'val_loss': 1.4729905960961929}
................25
25 {'acc': 0.67417589488854246, 'loss': 1.1432305624030754, 'val_acc': 0.59768408664331862, 'val_loss': 1.468368214269715}
................26
26 {'acc': 0.68098510863809192, 'loss': 1.128183866314902, 'val_acc': 0.60213776566353661, 'val_loss': 1.4568939163679182}
................27
27 {'acc': 0.6859488352740617, 'loss': 1.1156599567154064, 'val_acc': 0.60095011649958596, 'val_loss': 1.4441816353741281}
................28
28 {'acc': 0.67837596972115821, 'loss': 1.1457204226092921, 'val_acc': 0.58031473108821696, 'val_loss': 1.5130510222600362}
................29
29 {'acc': 0.64515718573056458, 'loss': 1.2503381686704038, 'val_acc': 0.57719715389106729, 'val_loss': 1.5152449933480197}
................30
30 {'acc': 0.65845742694624843, 'loss': 1.1999818457436802, 'val_acc': 0.59664489495216244, 'val_loss': 1.4904167445425183}
................31
31 {'acc': 0.67283950687076843, 'loss': 1.1549198146811328, 'val_acc': 0.59798100357100969, 'val_loss': 1.4772413997355662}
................32
32 {'acc': 0.67793050781224262, 'loss': 1.1357852122066614, 'val_acc': 0.60258314133823054, 'val_loss': 1.4678422311705819}
................33
33 {'acc': 0.69123075181964211, 'loss': 1.1024966772175844, 'val_acc': 0.60169240089040466, 'val_loss': 1.4639159356047025}
................34
34 {'acc': 0.69218531211126211, 'loss': 1.0872343238777402, 'val_acc': 0.60748219065315079, 'val_loss': 1.4609913933588603}
................35
35 {'acc': 0.69765813943613608, 'loss': 1.0664075245850564, 'val_acc': 0.60837292345572536, 'val_loss': 1.4418737860020436}
................36
36 {'acc': 0.69746722632788427, 'loss': 1.063449597258566, 'val_acc': 0.61223278561000871, 'val_loss': 1.4334362292233103}
................37
37 {'acc': 0.69994909062069366, 'loss': 1.0509986834655332, 'val_acc': 0.6187648444730709, 'val_loss': 1.4333996427314015}
................38
38 {'acc': 0.70567646792208982, 'loss': 1.0330596066346618, 'val_acc': 0.61891330506059727, 'val_loss': 1.4290015745616016}
................39
39 {'acc': 0.7059310154027576, 'loss': 1.0189490835220891, 'val_acc': 0.6193586749305634, 'val_loss': 1.4351344471023089}
................40
40 {'acc': 0.71420389634591563, 'loss': 1.0009138890467058, 'val_acc': 0.61965558775247132, 'val_loss': 1.4282162996482397}
................41
41 {'acc': 0.71541300778991623, 'loss': 0.99571936103516867, 'val_acc': 0.61906175828602983, 'val_loss': 1.434627515403222}
................42
42 {'acc': 0.71375843200183509, 'loss': 0.99136287305469473, 'val_acc': 0.61713182784599252, 'val_loss': 1.4376688196087111}
................43
43 {'acc': 0.7235586103537891, 'loss': 0.98009279163584417, 'val_acc': 0.62128859800850689, 'val_loss': 1.4192438287010103}
................44
44 {'acc': 0.72304951117453542, 'loss': 0.97117061109446956, 'val_acc': 0.62618764774816338, 'val_loss': 1.4033473019362062}
................45
45 {'acc': 0.72896779837522763, 'loss': 0.95657066396436752, 'val_acc': 0.6309382445455447, 'val_loss': 1.4101288106265941}
................46
46 {'acc': 0.72559501120527392, 'loss': 0.94741363523569944, 'val_acc': 0.62930522508689168, 'val_loss': 1.4183274266838743}
................47
47 {'acc': 0.73125875147150354, 'loss': 0.9405408080136699, 'val_acc': 0.62589074115571952, 'val_loss': 1.4336141514948031}
................48
48 {'acc': 0.72788596176017828, 'loss': 0.94400330593951021, 'val_acc': 0.62618765015500177, 'val_loss': 1.4063857097240637}
................49
49 {'acc': 0.71725849542694264, 'loss': 0.98811867799378994, 'val_acc': 0.61089667699116146, 'val_loss': 1.4768233740981005}

In [13]:
score=model.evaluate(X_test,Y_test, verbose=0)
print("OOS %s: %.2f%%" % (model.metrics_names[1], score[1]*100))
print("OOS %s: %.2f" % (model.metrics_names[0], score[0]))



NameErrorTraceback (most recent call last)
<ipython-input-13-e678b5269ea6> in <module>()
----> 1 score=model.evaluate(X_test,Y_test, verbose=0)
      2 print("OOS %s: %.2f%%" % (model.metrics_names[1], score[1]*100))
      3 print("OOS %s: %.2f" % (model.metrics_names[0], score[0]))

NameError: name 'X_test' is not defined

In [13]:
import matplotlib.pyplot as plt
# summarize history for accuracy
plt.plot(history2.history['acc'])
plt.plot(history2.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='lower right')
plt.show()


# summarize history for loss
plt.plot(history2.history['loss'])
plt.plot(history2.history['val_loss'])
plt.title('model loss (log scale)')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper right')
plt.yscale('log')
plt.show()



In [15]:
history3 = model.fit(
        X,#_train,
        Y,#_train,
        initial_epoch=60,    
        epochs=90,
        batch_size=1024,
        #validation_data=(X_valid,Y_valid),
        validation_split=0.3,
        callbacks=[early_stop_cb, checkpoit_cb, batch_print_cb, plot_loss_cb],
        verbose=0
        )


................60
60 {'acc': 0.69396716357036081, 'loss': 1.0682002606852541, 'val_acc': 0.61906176239181299, 'val_loss': 1.4453319599962575}
................61
61 {'acc': 0.7037037048871484, 'loss': 1.0332637657505632, 'val_acc': 0.61564727237275552, 'val_loss': 1.4414969362725554}
................62
62 {'acc': 0.70701285371711209, 'loss': 1.0041220267947522, 'val_acc': 0.62232779267281646, 'val_loss': 1.4268438312050282}
................63
63 {'acc': 0.71197658232548955, 'loss': 0.99611397675313584, 'val_acc': 0.62262470591946051, 'val_loss': 1.4255109716764256}
................64
64 {'acc': 0.71834033401439978, 'loss': 0.97293814648437305, 'val_acc': 0.62069477901889136, 'val_loss': 1.4260232516535671}
................65
65 {'acc': 0.72184039811467771, 'loss': 0.97190722480626812, 'val_acc': 0.61935867436424852, 'val_loss': 1.4186554088162131}
................66
66 {'acc': 0.72578592269766862, 'loss': 0.94906096593772571, 'val_acc': 0.62024940674208684, 'val_loss': 1.4094102914995932}
................67
67 {'acc': 0.69625811521767567, 'loss': 1.0645167365607617, 'val_acc': 0.60243468216649143, 'val_loss': 1.4889457426841355}
................68
68 {'acc': 0.69625811337423305, 'loss': 1.0706597206716062, 'val_acc': 0.60599762640203259, 'val_loss': 1.4689711890141357}
................69
69 {'acc': 0.69950362780902231, 'loss': 1.0387265352221471, 'val_acc': 0.60881829502463625, 'val_loss': 1.4538306377279504}
................70
70 {'acc': 0.71000381935123758, 'loss': 1.0077977372259679, 'val_acc': 0.61624109844130748, 'val_loss': 1.4287678232668697}
................71
71 {'acc': 0.70134911596949534, 'loss': 1.0318226642518822, 'val_acc': 0.60213777005247715, 'val_loss': 1.4894028610401653}
................72
72 {'acc': 0.68760341133524538, 'loss': 1.099111608152886, 'val_acc': 0.60733373091509679, 'val_loss': 1.479072934658114}
................73
73 {'acc': 0.69142166257617699, 'loss': 1.0872183346605733, 'val_acc': 0.60213776608827274, 'val_loss': 1.4834875916075536}
................74
74 {'acc': 0.69994909045379761, 'loss': 1.058706705785972, 'val_acc': 0.60881829021095946, 'val_loss': 1.4884105396950329}
................75
75 {'acc': 0.70306732959687834, 'loss': 1.037833027186035, 'val_acc': 0.61564726826697236, 'val_loss': 1.4449179574599458}
................76
76 {'acc': 0.70364006627405573, 'loss': 1.0262630202470731, 'val_acc': 0.61861639280500436, 'val_loss': 1.4351270128032658}
................77
77 {'acc': 0.70949471811612808, 'loss': 1.0033399172831814, 'val_acc': 0.61831947729310066, 'val_loss': 1.4346362315560746}
................78
78 {'acc': 0.71541300888232673, 'loss': 0.98309921032602254, 'val_acc': 0.62054632282030553, 'val_loss': 1.4234220100978208}
................79
79 {'acc': 0.71961308407149305, 'loss': 0.97791410750281926, 'val_acc': 0.62277315843699943, 'val_loss': 1.434774916981858}
................80
80 {'acc': 0.71840397145163393, 'loss': 0.98115001725258377, 'val_acc': 0.61460808280528012, 'val_loss': 1.4496738825727811}
................81
81 {'acc': 0.7203767358520885, 'loss': 0.95888457092541513, 'val_acc': 0.61980404253526988, 'val_loss': 1.4341611703614441}
................82
82 {'acc': 0.72572228624663826, 'loss': 0.95314105417007133, 'val_acc': 0.6193586749305634, 'val_loss': 1.4350798064909185}
................83
83 {'acc': 0.72871324873249754, 'loss': 0.93137366312430436, 'val_acc': 0.6227731577291058, 'val_loss': 1.4264792727744495}
................84
84 {'acc': 0.73036782408057999, 'loss': 0.93253052497890232, 'val_acc': 0.62470309694702442, 'val_loss': 1.43061292313057}
................85
85 {'acc': 0.7329133257271756, 'loss': 0.90921011269358076, 'val_acc': 0.62425771943180697, 'val_loss': 1.4309742467703559}
................86
86 {'acc': 0.73475881330351256, 'loss': 0.90705162541654194, 'val_acc': 0.62381236003866669, 'val_loss': 1.4259913146071084}
................87
87 {'acc': 0.73711340282806026, 'loss': 0.91535873602437079, 'val_acc': 0.62841448846169157, 'val_loss': 1.4194937236518588}
Epoch 00087: early stopping

In [16]:
import matplotlib.pyplot as plt
# summarize history for accuracy
plt.plot(history3.history['acc'])
plt.plot(history3.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='lower right')
plt.show()


# summarize history for loss
plt.plot(history3.history['loss'])
plt.plot(history3.history['val_loss'])
plt.title('model loss (log scale)')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper right')
plt.yscale('log')
plt.show()



In [ ]: