Topic Modeling Amarigna

Simple topic classifying LSTM model to test if it is possible to identify topics in Amharic text


In [1]:
from sklearn.datasets import fetch_20newsgroups
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
import keras
from keras.layers import Embedding, Dense, LSTM, GRU
from keras.models import Sequential

from sklearn.model_selection import train_test_split, StratifiedShuffleSplit


Using Theano backend.

A small sample dataset to train and test the model


In [2]:
wikis = [
    """በፈረንሳይ አገር ሃይማኖትን በግብረ ሰዶማዊ ስዕል መስደብ የተፈቀደ ነው። ግብረ ሰዶምን መስደብ ግን ክልክል ነው። ለአባቱ ፍሬድ ትራምፕ ከአምስት ልጆች መሃል አራተኛው ልጃቸው ነበር።""",
    """ኢትዮጵያ ተፈጥሮ ያደላት ሀገር ናት። ከአፍሪካ ትላልቅ ተራራዎች እንዲሁም ከዓለም ከባህር ጠለል በታች በጣም ጥልቅ ከሆኑ ቦታዎች አንዳንዶቹ ይገኙባታል።""", 
    """ሶፍ ዑመር ከአፍሪካ ዋሻዎች ትልቁ ሲሆን ፣ዳሎል ከዓለም በጣም ሙቅ ቦታዎች አንዱ ነው። ወደ ሰማንኒያ የሚቆጠሩ ብሔሮችና ብሔረሰቦች ዛሬ በኢትዮጵያ ይገኛሉ። ከእነዚህም ኦሮሞና አማራ በብዛት ትልቆቹ ናቸው።""", 
    """ኢትዮጵያ በኣክሱም ሓውልት፣ ከአንድ ድንጋይ ተፈልፍለው በተሰሩ ቤተ-ክርስትያኖቹዋ እና በኦሎምፒክ የወርቅ ሜዳልያ አሸናፊ አትሌቶቹዋ ትታወቃለች። """,
   """ የቡና ፍሬ ለመጀመሪያ ጊዜ የተገኘው በኢትዮጵያ ሲሆን ሀገሪቱዋ በቡናና ማር አምራችነት በአፍሪካ ቅድሚያ ይዛለች።""",
    """ኦሮሞ በኢትዮጵያ፣ በኬንያና፣ በሶማሊያ የሚኖር ማህበረሰብ ነዉ። ኦሮሞ ማለት በገዳ ስርኣተ መንገስት ስር ይተዳደር የነበረ በራሱ የሚኮራ ህዘብ ነው፡በ ገዳ መንግስት ስር የ አገር መሪ በየ ፰(ስምንት) አመት""", 
    """የሚቀይር ሲሆን በተለያዩ የ ኦሮሚያ ክልሎች ንጉሳት እንደነበሩም ታሪክ ይነግረናል። በኦሮሚያ ክልሎች ከነበሩት ንጉሳት መካከል የታወቁት አባ ጂፋር ናቸው።""",
    """ኦሮሚያ በ አንድሺ ስምንት መቶ ክፍለዘመን ማለቂያ ላይ በ ንጉስ ሚኒሊክ አማካኝነት ከ አቢሲኒያ ጋር ተቀላቀላ ኢትዮጵያ ስትመሰረት፣የ ቀዳሚ ሃገሩ ህዘብ ብዙ ችግር እና ጭቆና አሳልፏል። የ ኦሮሞን ብዛት አሰመልክቶ""", 
    """፤ሃይል እንዳይኖረው በሚለው ስጋት የቀድሞ መንግስታት የህዝቡን መብት ሳያከበሩ ወይ ባህሉን ሳይይደገፉ ገዝተዋል. ለዛም ነው ብዙ ኦሮምያዊ ህዘብ ከሌሎች የሚወዳችው ህዝቦችህ ተለይቶ መገንጠልን የመረጠው።""",
    """ብዙ የጀርመን ሰዎች በዓለም ዙሪያ ስመ ጥሩ ናቸው። ይህም ደራሲዎች ያኮብ ግሪምና ወንድሙ ቭልሄልም ግሪም፣ ባለቅኔው ዮሐን ቩልፍጋንግ ቮን ጌጠ፣ የፕሮቴስታንት ንቅናቄ መሪ ማርቲን ሉጠር፣ ፈላስፋዎች ካንት፣ """
    """ኒሺና ሄገል፣ ሳይንቲስቱ አልቤርት አይንስታይን፣ ፈጠራ አፍላቂዎች ዳይምለር፣ ዲዝልና ካርል ቤንዝ፣ የሙዚቃ ቃኚዎች ዮሐን ሴባስትያን ባክ፣ ሉድቪግ ቫን ቤትሆቨን፣ ብራምዝ፣ ስትራውስ፣ ቫግነርና ብዙ ሌሎች ይከትታል።""",
    """እጅግ ቁም ነገር የሆነ ጠቃሚ ፈጠራ ማሳተሚያ፤ ዮሐንስ ጉተንቤርግ በሚባል ሰው በ1431 ዓ.ም. ተጀመረ። ስለዚህ ተጓዦች ከውጭ አገር ሲመልሱ በአውሮፓ ያለው ሰው ሁሉ እርግጡን በቶሎ ያውቀው ነበር። አሁን""",
    """ጀርመን «ዶይቸ ቨለ» በሚባል ራዲዮን ጣቢያ ላይ ዜና በእንግሊዝኛ ያሠራጫል። የጀርመን ሕዝብ ባማካኝ ከአውሮፓ ሁሉ ቴሌቪዥንን የሚወድዱ ሲሆኑ ፺ ከመቶ ሰዎች ወይም ሳተላይት ወይም ገመድ ቴሌቪዥን አላቸው""",
    """ጀርመን አንድ ይፋዊ ቋንቋ ብቻ አለው እርሱም ጀርመንኛ ሲሆን ከዚህ ውስጥ ብዙ ልዩ ልዩ የጀርመንኛ ቀበሌኞች በአገሩ ይገኛሉ። ለአንዳንድ ሰዎች ጀርመን «የገጣሚዎችና የአሳቢዎች አገር» በመባል ታውቋል። በዓመታት ላይ በሥነ ጽሑፍ፣ በሥነ ጥበብ፣ በፍልስፍና፣ በሙዚቃ፣ በሲኒማ፣ """,
    """ናልድ ትራምፕ ከኒው ዮርክ ከአምስቱ ቀጠናዎች አንዱ በሆነው በክዊንስ በእ.ኤ.አ. ጁን 14 1946 ተወለደ። ለእናቱ ሜሪ አን እና ለአባቱ ፍሬድ ትራምፕ ከአምስት ልጆች መሃል አራተኛው ልጃቸው ነበር። """,
   """ እናቱ የተወለደችው በስኮትላንድ ሉዊስ ኤንድ ሃሪስ ደሴት ላይ ቶንግ በተባለው ስፍራ ነው። በእ.ኤ.አ. 1930 በ18 ዓመቷ ዩናይትድ ስቴትስን ጎበኘች እናም ከፍሬድ ትራምፕ ጋር ተገናኘች። በእ.ኤ.አ. 1936 ትዳር ይዘው """,
  """  በጃማይካ ኢስቴትስ ክዊንስ መኖር ጀመሩ። በዚህም ስፍራ ፍሬድ ትራምፕ ታላቅ የሪልኢስቴት ገንቢ ሆኖ ነበር። ዶናልድ ትራምፕ፥ ሮበርት የተባለ አንድ ወንድም፣ ሜሪአን እና ኤሊዛቤት የተባሉ ሁለት እህቶች አሉት።""", 
    """ፍሬድ ጁኒየር የተባለ ወንድሙ ደግሞ ከአልኮል ሱስ ጋር በተያያዘ ምክንያት ሕይወቱ አልፏል ፤ ይህም ከአልኮሆል መጠጥ እና ከትምባሆ እንዲታቀብ እንዳደረገውም ዶናልድ ትራምፕ ይናገራል"""
]

nb_words = 10000
max_seq_len = 1000

wlabs = [0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 0, 0, 0, 0]

In [3]:
validx = [
    """በእ.ኤ.አ. ጁን 16 2015 ላይ ደግሞ ለፕሬዚደንትነት እንደሚወዳደር አሳወቀ። ይህን ጊዜ ግን የሪፐብሊካን ፓርቲን በመወከል ነው። በስደት፣ በነፃ ገበያ እና በጦር ጣልቃ ገብነት ላይ ባለው ተቃውሞ ምክንያት ታዋቂ ሆኗል። በእነኚህ አነጋጋሪ አስተያየቶቹ""",

    """(እ.ኤ.አ. ጁን 14 ቀን 1946 ተወለደ) አሜሪካዊ ነጋዴ ፣ ፖለቲከኛ ፣ በቴሌቪዥን ፕሮግራሞቹ ታዋቂ እና 45ኛው የዩናይትድ ስቴትስ ኦፍ አሜሪካ ፕሬዚደንት ነው። ሥልጣኑንም እ.ኤ.አ. በጃኑዌሪ 20 ቀን 2017 ተረክቧል።""",

    """የኬልቶች ከተማ መጀመርያ «ሉኮቶኪያ» ተብሎ በስትራቦን ተመዘገበ። ፕቶሎመይ ደግሞ ከተማውን «ለውኮተኪያ» አለው። ዩሊዩስ ቄሳር አገሩን ሲይዘው ሥፍራውን በሮማይስጥ «ሉቴቲያ» አለው። የኖረበት ጎሣ ፓሪሲ ስለ ተባሉ፣ የከተማው ስም በሙሉ «ሉቴቲያ ፓሪሶሩም» («የፓሪሲ ሉቴቲያ») ተባለ።""",

    """ባቫሪያ ፣ በሙሉ ስሙ ነጻ የባቫሪያ አስተዳደር (ጀርመንኛ፦ Freistaat Bayern /ፍሪሽታት ባየርን/) ደቡብ ምስራቅ ጀርመን ውስጥ የሚገኝ ክፍለ ሃገር ነው። 70,548 ስኩየር ኪ/ሜትር ስፋት ሲኖረው፣ ከማናቸውም የጀርመን ክፍላተ ሃገሮች የበለጠ የቆዳ ስፋት አለው። ይህ ግዛት የጀርመንን አጠቃላይ ስፋት አንድ አምስተኛ (20%) ይሸፍናል።""",

    """ ከኖርስ ራይን ዌስትፋሊያ ክፍለሃገር ቀጥሎ ባቫሪያ ብዙውን የጀርመን ህዝብ ይይዛል። (12.5 ሚሊየን)። ሙኒክ የባቫሪያ ዋና ከተማ ነው።""",

    """የታሪክ ፀሀፊ የሆነው እንደ ዶናልድ ሰቨን ጎበና በደቡብ በኩል የተደረገው የማስፋፋት ስራ ማለትም ኦሮምኛ ተናጋሪውን ህዝብ ወደ ሚኒሊክ ሀሳብ የተዋሀደው በራስ ጎበና ነበር ይህ እንዲ እንዳለ በኢትዮጵያ ታዋቂ የሆኑት የኦሮሞ አስተዳደር ሹማምንት ወታደሮችም እረድተውት ነበር። በተጨማሪም የኦሮሞ ህዝብ ደቡብ ሲዳማን እና የጉራጌን ህዝብ ወታደር ድል ነስተዋል። """

]

validy = [ 0, 0, 0, 3, 3, 1]


X = wikis + validx
y = wlabs + validy

Preparing the data for the model

  • Tokenizing the text - Identifying unique words, creating a dictionary and counting their frequency in the list of documents (texts) in the training data.
  • One-hot encoding the labels (topics)
  • Splitting the data into train and test(validation) sets

In [6]:
tokenizer = Tokenizer(num_words=nb_words)
tokenizer.fit_on_texts(X)
sequences = Tokenizer.texts_to_sequences(tokenizer, X)
word_index = tokenizer.word_index

ydata = keras.utils.to_categorical(y)
input_data = pad_sequences(sequences, maxlen=max_seq_len)

Xtrain, Xvalid, ytrain, yvalid = train_test_split(input_data, ydata, test_size=0.4)

Model definition and training


In [8]:
embedding_vector_length = 64
model = Sequential()
model.add(Embedding(len(word_index)+1, embedding_vector_length, input_length=max_seq_len, embeddings_initializer='glorot_normal', 
                    embeddings_regularizer=keras.regularizers.l2(0.01)))
model.add(LSTM(80, dropout=0.25))
model.add(Dense(4, activation='softmax'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
print(model.summary())


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
embedding_2 (Embedding)      (None, 1000, 64)          33344     
_________________________________________________________________
lstm_2 (LSTM)                (None, 80)                46400     
_________________________________________________________________
dense_2 (Dense)              (None, 4)                 324       
=================================================================
Total params: 80,068.0
Trainable params: 80,068
Non-trainable params: 0.0
_________________________________________________________________
None

In [ ]:
model.fit(Xtrain, ytrain, validation_data=(Xvalid, yvalid), nb_epoch=75, batch_size=32)


Train on 13 samples, validate on 10 samples
Epoch 1/75
13/13 [==============================] - 5s - loss: 1.6172 - acc: 0.7500 - val_loss: 1.5891 - val_acc: 0.7500
Epoch 2/75
13/13 [==============================] - 5s - loss: 1.5858 - acc: 0.7500 - val_loss: 1.5600 - val_acc: 0.7500
Epoch 3/75
13/13 [==============================] - 6s - loss: 1.5524 - acc: 0.7500 - val_loss: 1.5316 - val_acc: 0.7500
Epoch 4/75
13/13 [==============================] - 6s - loss: 1.5212 - acc: 0.7500 - val_loss: 1.5040 - val_acc: 0.7500
Epoch 5/75
13/13 [==============================] - 6s - loss: 1.4912 - acc: 0.7500 - val_loss: 1.4771 - val_acc: 0.7500
Epoch 6/75
13/13 [==============================] - 6s - loss: 1.4622 - acc: 0.7500 - val_loss: 1.4510 - val_acc: 0.7500
Epoch 7/75
13/13 [==============================] - 6s - loss: 1.4314 - acc: 0.7500 - val_loss: 1.4256 - val_acc: 0.7500
Epoch 8/75
13/13 [==============================] - 6s - loss: 1.4034 - acc: 0.7500 - val_loss: 1.4010 - val_acc: 0.7500
Epoch 9/75
13/13 [==============================] - 6s - loss: 1.3733 - acc: 0.7500 - val_loss: 1.3772 - val_acc: 0.7500
Epoch 10/75
13/13 [==============================] - 6s - loss: 1.3463 - acc: 0.7500 - val_loss: 1.3543 - val_acc: 0.7500
Epoch 11/75
13/13 [==============================] - 6s - loss: 1.3182 - acc: 0.7500 - val_loss: 1.3323 - val_acc: 0.7500
Epoch 12/75
13/13 [==============================] - 6s - loss: 1.2918 - acc: 0.7500 - val_loss: 1.3115 - val_acc: 0.7500
Epoch 13/75
13/13 [==============================] - 7s - loss: 1.2617 - acc: 0.7500 - val_loss: 1.2921 - val_acc: 0.7500
Epoch 14/75
13/13 [==============================] - 7s - loss: 1.2354 - acc: 0.7500 - val_loss: 1.2747 - val_acc: 0.7500
Epoch 15/75
13/13 [==============================] - 7s - loss: 1.2050 - acc: 0.7500 - val_loss: 1.2609 - val_acc: 0.7500
Epoch 16/75
13/13 [==============================] - 6s - loss: 1.1773 - acc: 0.7500 - val_loss: 1.2553 - val_acc: 0.7500
Epoch 17/75
13/13 [==============================] - 1s - loss: 1.1454 - acc: 0.7500 - val_loss: 1.2825 - val_acc: 0.7500
Epoch 18/75
13/13 [==============================] - 1s - loss: 1.1283 - acc: 0.7885 - val_loss: 1.2576 - val_acc: 0.8000
Epoch 19/75
13/13 [==============================] - 1s - loss: 1.0840 - acc: 0.8654 - val_loss: 1.2422 - val_acc: 0.8000
Epoch 20/75
13/13 [==============================] - 1s - loss: 1.0652 - acc: 0.8654 - val_loss: 1.2230 - val_acc: 0.8000
Epoch 21/75
13/13 [==============================] - 1s - loss: 1.0362 - acc: 0.8654 - val_loss: 1.2040 - val_acc: 0.8000
Epoch 22/75
13/13 [==============================] - 1s - loss: 1.0143 - acc: 0.8654 - val_loss: 1.1877 - val_acc: 0.7750
Epoch 23/75
13/13 [==============================] - 2s - loss: 0.9854 - acc: 0.8654 - val_loss: 1.1796 - val_acc: 0.7500
Epoch 24/75
13/13 [==============================] - 1s - loss: 0.9607 - acc: 0.8654 - val_loss: 1.1768 - val_acc: 0.7500
Epoch 25/75
13/13 [==============================] - 2s - loss: 0.9277 - acc: 0.8462 - val_loss: 1.1900 - val_acc: 0.6750
Epoch 26/75
13/13 [==============================] - 1s - loss: 0.8799 - acc: 0.8462 - val_loss: 1.1751 - val_acc: 0.7250
Epoch 27/75
13/13 [==============================] - 2s - loss: 0.8456 - acc: 0.8462 - val_loss: 1.1962 - val_acc: 0.6750
Epoch 28/75
13/13 [==============================] - 1s - loss: 0.8132 - acc: 0.8462 - val_loss: 1.1632 - val_acc: 0.7250
Epoch 29/75
13/13 [==============================] - 2s - loss: 0.8626 - acc: 0.8846 - val_loss: 1.1435 - val_acc: 0.7250
Epoch 30/75
13/13 [==============================] - 1s - loss: 0.8425 - acc: 0.8269 - val_loss: 1.1103 - val_acc: 0.7250
Epoch 31/75
13/13 [==============================] - 3s - loss: 0.7338 - acc: 0.9038 - val_loss: 1.0897 - val_acc: 0.7000
Epoch 32/75
13/13 [==============================] - 2s - loss: 0.7251 - acc: 0.8846 - val_loss: 1.0876 - val_acc: 0.6500
Epoch 33/75
13/13 [==============================] - 2s - loss: 0.7122 - acc: 0.9231 - val_loss: 1.0994 - val_acc: 0.6250
Epoch 34/75
13/13 [==============================] - 3s - loss: 0.6812 - acc: 0.9038 - val_loss: 1.1155 - val_acc: 0.6000
Epoch 35/75
13/13 [==============================] - 2s - loss: 0.6848 - acc: 0.8846 - val_loss: 1.1213 - val_acc: 0.6500
Epoch 36/75
13/13 [==============================] - 4s - loss: 0.6546 - acc: 0.8846 - val_loss: 1.0895 - val_acc: 0.6500
Epoch 37/75
13/13 [==============================] - 3s - loss: 0.6274 - acc: 0.8846 - val_loss: 1.0389 - val_acc: 0.6500
Epoch 38/75
13/13 [==============================] - 3s - loss: 0.6102 - acc: 0.9038 - val_loss: 1.0116 - val_acc: 0.6750
Epoch 39/75
13/13 [==============================] - 3s - loss: 0.6049 - acc: 0.9038 - val_loss: 0.9906 - val_acc: 0.6750
Epoch 40/75
13/13 [==============================] - 3s - loss: 0.5628 - acc: 0.9231 - val_loss: 0.9951 - val_acc: 0.6750
Epoch 41/75
13/13 [==============================] - 3s - loss: 0.5532 - acc: 0.9038 - val_loss: 1.0225 - val_acc: 0.6500
Epoch 42/75
13/13 [==============================] - 3s - loss: 0.5387 - acc: 0.9231 - val_loss: 1.0694 - val_acc: 0.6250
Epoch 43/75
13/13 [==============================] - 2s - loss: 0.5243 - acc: 0.9231 - val_loss: 1.1264 - val_acc: 0.6250
Epoch 44/75
13/13 [==============================] - 2s - loss: 0.5015 - acc: 0.9231 - val_loss: 1.1467 - val_acc: 0.6250
Epoch 45/75
13/13 [==============================] - 1s - loss: 0.4901 - acc: 0.9038 - val_loss: 1.0720 - val_acc: 0.6250
Epoch 46/75
13/13 [==============================] - 2s - loss: 0.4840 - acc: 0.9423 - val_loss: 0.9466 - val_acc: 0.6500
Epoch 47/75
13/13 [==============================] - 5s - loss: 0.4629 - acc: 0.9615 - val_loss: 0.9004 - val_acc: 0.7250
Epoch 48/75
13/13 [==============================] - 5s - loss: 0.4562 - acc: 0.9615 - val_loss: 0.8839 - val_acc: 0.7250
Epoch 49/75
13/13 [==============================] - 6s - loss: 0.4432 - acc: 0.9615 - val_loss: 0.8856 - val_acc: 0.7000
Epoch 50/75
13/13 [==============================] - 7s - loss: 0.4302 - acc: 0.9615 - val_loss: 0.9145 - val_acc: 0.6750
Epoch 51/75
13/13 [==============================] - 7s - loss: 0.4183 - acc: 0.9615 - val_loss: 0.9933 - val_acc: 0.6500
Epoch 52/75
13/13 [==============================] - 6s - loss: 0.4053 - acc: 0.9615 - val_loss: 1.1995 - val_acc: 0.6500
Epoch 53/75
13/13 [==============================] - 7s - loss: 0.4162 - acc: 0.9038 - val_loss: 0.9077 - val_acc: 0.6500
Epoch 54/75
13/13 [==============================] - 8s - loss: 0.3838 - acc: 0.9615 - val_loss: 0.8365 - val_acc: 0.6750
Epoch 55/75
13/13 [==============================] - 7s - loss: 0.3796 - acc: 0.9615 - val_loss: 0.8052 - val_acc: 0.7500
Epoch 56/75
13/13 [==============================] - 7s - loss: 0.3707 - acc: 0.9615 - val_loss: 0.7891 - val_acc: 0.7500
Epoch 57/75
13/13 [==============================] - 7s - loss: 0.3646 - acc: 0.9615 - val_loss: 0.7835 - val_acc: 0.7250
Epoch 58/75
13/13 [==============================] - 8s - loss: 0.3532 - acc: 0.9615 - val_loss: 0.7851 - val_acc: 0.7000
Epoch 59/75
13/13 [==============================] - 8s - loss: 0.3419 - acc: 0.9615 - val_loss: 0.7979 - val_acc: 0.7000
Epoch 60/75
13/13 [==============================] - 7s - loss: 0.3326 - acc: 0.9423 - val_loss: 0.8159 - val_acc: 0.7000
Epoch 61/75
13/13 [==============================] - 7s - loss: 0.3243 - acc: 0.9615 - val_loss: 0.8361 - val_acc: 0.7000
Epoch 62/75
13/13 [==============================] - 7s - loss: 0.3123 - acc: 0.9615 - val_loss: 0.8619 - val_acc: 0.7000
Epoch 63/75
13/13 [==============================] - 7s - loss: 0.3006 - acc: 0.9615 - val_loss: 0.9011 - val_acc: 0.7000
Epoch 64/75
13/13 [==============================] - 7s - loss: 0.2906 - acc: 0.9423 - val_loss: 0.9527 - val_acc: 0.6500
Epoch 65/75
13/13 [==============================] - 7s - loss: 0.2782 - acc: 0.9615 - val_loss: 1.0239 - val_acc: 0.6500
Epoch 66/75
13/13 [==============================] - 7s - loss: 0.2776 - acc: 0.9615 - val_loss: 1.0632 - val_acc: 0.6500
Epoch 67/75
13/13 [==============================] - 7s - loss: 0.2605 - acc: 0.9615 - val_loss: 1.0878 - val_acc: 0.6500
Epoch 68/75
13/13 [==============================] - 7s - loss: 0.2490 - acc: 0.9615 - val_loss: 1.1077 - val_acc: 0.6500
Epoch 69/75
13/13 [==============================] - 7s - loss: 0.2456 - acc: 0.9615 - val_loss: 1.0929 - val_acc: 0.7000
Epoch 70/75
13/13 [==============================] - 7s - loss: 0.2315 - acc: 0.9808 - val_loss: 1.1197 - val_acc: 0.6750
Epoch 71/75
13/13 [==============================] - 9s - loss: 0.2265 - acc: 0.9808 - val_loss: 1.1258 - val_acc: 0.6750
Epoch 72/75
13/13 [==============================] - 8s - loss: 0.2157 - acc: 1.0000 - val_loss: 1.0876 - val_acc: 0.6500
Epoch 73/75
13/13 [==============================] - 9s - loss: 0.2163 - acc: 1.0000 - val_loss: 1.0815 - val_acc: 0.7000
Epoch 74/75
13/13 [==============================] - 7s - loss: 0.2082 - acc: 1.0000 - val_loss: 1.1026 - val_acc: 0.7000
Epoch 75/75
13/13 [==============================] - 8s - loss: 0.1951 - acc: 1.0000 - val_loss: 1.1398 - val_acc: 0.6250
Out[ ]:
<keras.callbacks.History at 0x229f5eb8>

In [ ]: