importing require packages


In [1]:
from __future__ import print_function

import json
import os
import numpy as np
import sys
import h5py

from gensim.models import Word2Vec
from gensim.utils import simple_preprocess
from keras.engine import Input
from keras.layers import Embedding, merge
from keras.models import Model
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import LSTM
from keras.preprocessing import sequence
from embeddings import Embeddings
from keras.callbacks import ModelCheckpoint

from nltk.tokenize import word_tokenize
import random


Using TensorFlow backend.

In [2]:
np.mean([1, 2, 3])


Out[2]:
2.0

Instantiate Embeddings


In [3]:
embeddings = Embeddings(100, 4, 1, 4)


Loading the embeddings from the cache

getting data from preprocessing


In [4]:
word2vec_weights = embeddings.get_weights()
word2index, index2word = embeddings.get_vocabulary()
word2vec_model = embeddings.get_model()
tokenized_indexed_sentences = embeddings.get_tokenized_indexed_sentences()

generating training data


In [5]:
window_size = 5
vocab_size = len(word2index)
print(vocab_size)
#sorted(window_size,reverse=True)
#sentence_max_length = max([len(sentence) for sentence in tokenized_indexed_sentence ])


132184

Defining model


In [6]:
model_weights_path = "../weights/LSTM-2-512-Window-5-Batch-128-Epoch-10-Stateful"
if not os.path.exists(model_weights_path):
    os.makedirs(model_weights_path)

In [7]:
seq_in = []
seq_out = []

# generating dataset
for sentence in tokenized_indexed_sentences:
    sentence_seq_in = []
    sentence_seq_out = []
    for i in range(len(sentence)-window_size-1):
        x = sentence[i:i + window_size]
        y = sentence[i + window_size]
        sentence_seq_in.append(x)#[]
        sentence_seq_out.append(word2vec_weights[y])
    seq_in.append(sentence_seq_in)
    seq_out.append(sentence_seq_out)

# converting seq_in and seq_out into numpy array
seq_in = np.array(seq_in)
seq_out = np.array(seq_out)
n_samples = len(seq_in)
print ("Number of samples : ", n_samples)


Number of samples :  18473

In [8]:
subsamples = np.array([len(seq) for seq in seq_in])
print(np.sum(subsamples))


252670

In [9]:
subsamples_in = np.array([s for seq in seq_in for s in seq])
subsamples_out = np.array([s for seq in seq_out for s in seq])

Train Model


In [10]:
np.expand_dims(seq_in[0][0], axis=1)


Out[10]:
array([[535],
       [592],
       [736],
       [  8],
       [ 25]])

In [11]:
total_batches = int(subsamples_in.shape[0] / 256)

In [12]:
batch_len = []
for i in range(total_batches):
    batch_len.append(len(subsamples_in[i::total_batches]))
min_batch_len = min(batch_len)

In [18]:
# Changes to the model to be done here
model = Sequential()
model.add(Embedding(input_dim=word2vec_weights.shape[0], output_dim=word2vec_weights.shape[1], weights=[word2vec_weights], batch_input_shape=(min_batch_len, 5)))
model.add(LSTM(512, return_sequences=True, stateful=True))
model.add(Dropout(0.2))
model.add(LSTM(512, stateful=True))
model.add(Dropout(0.1))
model.add(Dense(word2vec_weights.shape[1], activation='sigmoid'))
model.load_weights("../weights/LSTM-2-512-Window-5-Batch-128-Epoch-10-Stateful/weights-10-0.9673129916191101")
model.compile(loss='mse', optimizer='adam',metrics=['accuracy'])
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
embedding_3 (Embedding)      (256, 5, 100)             13218400  
_________________________________________________________________
lstm_5 (LSTM)                (256, 5, 512)             1255424   
_________________________________________________________________
dropout_5 (Dropout)          (256, 5, 512)             0         
_________________________________________________________________
lstm_6 (LSTM)                (256, 512)                2099200   
_________________________________________________________________
dropout_6 (Dropout)          (256, 512)                0         
_________________________________________________________________
dense_3 (Dense)              (256, 100)                51300     
=================================================================
Total params: 16,624,324
Trainable params: 16,624,324
Non-trainable params: 0
_________________________________________________________________

In [33]:
print("Train")
for epoch in range(15):
    print("Epoch {0}/{1}".format(epoch+1, 15))
    mean_tr_accuracy = []
    mean_tr_loss = []
    for i in range(total_batches):
        # print("Done with {0}/{1} batches".format(i, total_batches))
        train_accuracy, train_loss = model.train_on_batch(subsamples_in[i::total_batches][:min_batch_len], subsamples_out[i::total_batches][:min_batch_len])
        mean_tr_accuracy.append(train_accuracy)
        mean_tr_loss.append(train_loss)
        model.reset_states()
    mean_accuracy = np.mean(mean_tr_accuracy)
    mean_loss = np.mean(mean_tr_loss)
    print("Mean Accuracy", mean_accuracy)
    print("Mean Loss", mean_loss)
    filepath = "../weights/LSTM-2-512-Window-5-Batch-128-Epoch-10-Stateful/weights-{0}-{1}".format(epoch+1, mean_accuracy, mean_loss)
    model.save_weights(filepath)


Train
Epoch 1/15
Done with 0/986 batches
Done with 1/986 batches
Done with 2/986 batches
Done with 3/986 batches
Done with 4/986 batches
Done with 5/986 batches
Done with 6/986 batches
Done with 7/986 batches
Done with 8/986 batches
Done with 9/986 batches
Done with 10/986 batches
Done with 11/986 batches
Done with 12/986 batches
Done with 13/986 batches
Done with 14/986 batches
Done with 15/986 batches
Done with 16/986 batches
Done with 17/986 batches
Done with 18/986 batches
Done with 19/986 batches
Done with 20/986 batches
Done with 21/986 batches
Done with 22/986 batches
Done with 23/986 batches
Done with 24/986 batches
Done with 25/986 batches
Done with 26/986 batches
Done with 27/986 batches
Done with 28/986 batches
Done with 29/986 batches
Done with 30/986 batches
Done with 31/986 batches
Done with 32/986 batches
Done with 33/986 batches
Done with 34/986 batches
Done with 35/986 batches
Done with 36/986 batches
Done with 37/986 batches
Done with 38/986 batches
Done with 39/986 batches
Done with 40/986 batches
Done with 41/986 batches
Done with 42/986 batches
Done with 43/986 batches
Done with 44/986 batches
Done with 45/986 batches
Done with 46/986 batches
Done with 47/986 batches
Done with 48/986 batches
Done with 49/986 batches
Done with 50/986 batches
Done with 51/986 batches
Done with 52/986 batches
Done with 53/986 batches
Done with 54/986 batches
Done with 55/986 batches
Done with 56/986 batches
Done with 57/986 batches
Done with 58/986 batches
Done with 59/986 batches
Done with 60/986 batches
Done with 61/986 batches
Done with 62/986 batches
Done with 63/986 batches
Done with 64/986 batches
Done with 65/986 batches
Done with 66/986 batches
Done with 67/986 batches
Done with 68/986 batches
Done with 69/986 batches
Done with 70/986 batches
Done with 71/986 batches
Done with 72/986 batches
Done with 73/986 batches
Done with 74/986 batches
Done with 75/986 batches
Done with 76/986 batches
Done with 77/986 batches
Done with 78/986 batches
Done with 79/986 batches
Done with 80/986 batches
Done with 81/986 batches
Done with 82/986 batches
Done with 83/986 batches
Done with 84/986 batches
Done with 85/986 batches
Done with 86/986 batches
Done with 87/986 batches
Done with 88/986 batches
Done with 89/986 batches
Done with 90/986 batches
Done with 91/986 batches
Done with 92/986 batches
Done with 93/986 batches
Done with 94/986 batches
Done with 95/986 batches
Done with 96/986 batches
Done with 97/986 batches
Done with 98/986 batches
Done with 99/986 batches
Done with 100/986 batches
Done with 101/986 batches
Done with 102/986 batches
Done with 103/986 batches
Done with 104/986 batches
Done with 105/986 batches
Done with 106/986 batches
Done with 107/986 batches
Done with 108/986 batches
Done with 109/986 batches
Done with 110/986 batches
Done with 111/986 batches
Done with 112/986 batches
Done with 113/986 batches
Done with 114/986 batches
Done with 115/986 batches
Done with 116/986 batches
Done with 117/986 batches
Done with 118/986 batches
Done with 119/986 batches
Done with 120/986 batches
Done with 121/986 batches
Done with 122/986 batches
Done with 123/986 batches
Done with 124/986 batches
Done with 125/986 batches
Done with 126/986 batches
Done with 127/986 batches
Done with 128/986 batches
Done with 129/986 batches
Done with 130/986 batches
Done with 131/986 batches
Done with 132/986 batches
Done with 133/986 batches
Done with 134/986 batches
Done with 135/986 batches
Done with 136/986 batches
Done with 137/986 batches
Done with 138/986 batches
Done with 139/986 batches
Done with 140/986 batches
Done with 141/986 batches
Done with 142/986 batches
Done with 143/986 batches
Done with 144/986 batches
Done with 145/986 batches
Done with 146/986 batches
Done with 147/986 batches
Done with 148/986 batches
Done with 149/986 batches
Done with 150/986 batches
Done with 151/986 batches
Done with 152/986 batches
Done with 153/986 batches
Done with 154/986 batches
Done with 155/986 batches
Done with 156/986 batches
Done with 157/986 batches
Done with 158/986 batches
Done with 159/986 batches
Done with 160/986 batches
Done with 161/986 batches
Done with 162/986 batches
Done with 163/986 batches
Done with 164/986 batches
Done with 165/986 batches
Done with 166/986 batches
Done with 167/986 batches
Done with 168/986 batches
Done with 169/986 batches
Done with 170/986 batches
Done with 171/986 batches
Done with 172/986 batches
Done with 173/986 batches
Done with 174/986 batches
Done with 175/986 batches
Done with 176/986 batches
Done with 177/986 batches
Done with 178/986 batches
Done with 179/986 batches
Done with 180/986 batches
Done with 181/986 batches
Done with 182/986 batches
Done with 183/986 batches
Done with 184/986 batches
Done with 185/986 batches
Done with 186/986 batches
Done with 187/986 batches
Done with 188/986 batches
Done with 189/986 batches
Done with 190/986 batches
Done with 191/986 batches
Done with 192/986 batches
Done with 193/986 batches
Done with 194/986 batches
Done with 195/986 batches
Done with 196/986 batches
Done with 197/986 batches
Done with 198/986 batches
Done with 199/986 batches
Done with 200/986 batches
Done with 201/986 batches
Done with 202/986 batches
Done with 203/986 batches
Done with 204/986 batches
Done with 205/986 batches
Done with 206/986 batches
Done with 207/986 batches
Done with 208/986 batches
Done with 209/986 batches
Done with 210/986 batches
Done with 211/986 batches
Done with 212/986 batches
Done with 213/986 batches
Done with 214/986 batches
Done with 215/986 batches
Done with 216/986 batches
Done with 217/986 batches
Done with 218/986 batches
Done with 219/986 batches
Done with 220/986 batches
Done with 221/986 batches
Done with 222/986 batches
Done with 223/986 batches
Done with 224/986 batches
Done with 225/986 batches
Done with 226/986 batches
Done with 227/986 batches
Done with 228/986 batches
Done with 229/986 batches
Done with 230/986 batches
Done with 231/986 batches
Done with 232/986 batches
Done with 233/986 batches
Done with 234/986 batches
Done with 235/986 batches
Done with 236/986 batches
Done with 237/986 batches
Done with 238/986 batches
Done with 239/986 batches
Done with 240/986 batches
Done with 241/986 batches
Done with 242/986 batches
Done with 243/986 batches
Done with 244/986 batches
Done with 245/986 batches
Done with 246/986 batches
Done with 247/986 batches
Done with 248/986 batches
Done with 249/986 batches
Done with 250/986 batches
Done with 251/986 batches
Done with 252/986 batches
Done with 253/986 batches
Done with 254/986 batches
Done with 255/986 batches
Done with 256/986 batches
Done with 257/986 batches
Done with 258/986 batches
Done with 259/986 batches
Done with 260/986 batches
Done with 261/986 batches
Done with 262/986 batches
Done with 263/986 batches
Done with 264/986 batches
Done with 265/986 batches
Done with 266/986 batches
Done with 267/986 batches
Done with 268/986 batches
Done with 269/986 batches
Done with 270/986 batches
Done with 271/986 batches
Done with 272/986 batches
Done with 273/986 batches
Done with 274/986 batches
Done with 275/986 batches
Done with 276/986 batches
Done with 277/986 batches
Done with 278/986 batches
Done with 279/986 batches
Done with 280/986 batches
Done with 281/986 batches
Done with 282/986 batches
Done with 283/986 batches
Done with 284/986 batches
Done with 285/986 batches
Done with 286/986 batches
Done with 287/986 batches
Done with 288/986 batches
Done with 289/986 batches
Done with 290/986 batches
Done with 291/986 batches
Done with 292/986 batches
Done with 293/986 batches
Done with 294/986 batches
Done with 295/986 batches
Done with 296/986 batches
Done with 297/986 batches
Done with 298/986 batches
Done with 299/986 batches
Done with 300/986 batches
Done with 301/986 batches
Done with 302/986 batches
Done with 303/986 batches
Done with 304/986 batches
Done with 305/986 batches
Done with 306/986 batches
Done with 307/986 batches
Done with 308/986 batches
Done with 309/986 batches
Done with 310/986 batches
Done with 311/986 batches
Done with 312/986 batches
Done with 313/986 batches
Done with 314/986 batches
Done with 315/986 batches
Done with 316/986 batches
Done with 317/986 batches
Done with 318/986 batches
Done with 319/986 batches
Done with 320/986 batches
Done with 321/986 batches
Done with 322/986 batches
Done with 323/986 batches
Done with 324/986 batches
Done with 325/986 batches
Done with 326/986 batches
Done with 327/986 batches
Done with 328/986 batches
Done with 329/986 batches
Done with 330/986 batches
Done with 331/986 batches
Done with 332/986 batches
Done with 333/986 batches
Done with 334/986 batches
Done with 335/986 batches
Done with 336/986 batches
Done with 337/986 batches
Done with 338/986 batches
Done with 339/986 batches
Done with 340/986 batches
Done with 341/986 batches
Done with 342/986 batches
Done with 343/986 batches
Done with 344/986 batches
Done with 345/986 batches
Done with 346/986 batches
Done with 347/986 batches
Done with 348/986 batches
Done with 349/986 batches
Done with 350/986 batches
Done with 351/986 batches
Done with 352/986 batches
Done with 353/986 batches
Done with 354/986 batches
Done with 355/986 batches
Done with 356/986 batches
Done with 357/986 batches
Done with 358/986 batches
Done with 359/986 batches
Done with 360/986 batches
Done with 361/986 batches
Done with 362/986 batches
Done with 363/986 batches
Done with 364/986 batches
Done with 365/986 batches
Done with 366/986 batches
Done with 367/986 batches
Done with 368/986 batches
Done with 369/986 batches
Done with 370/986 batches
Done with 371/986 batches
Done with 372/986 batches
Done with 373/986 batches
Done with 374/986 batches
Done with 375/986 batches
Done with 376/986 batches
Done with 377/986 batches
Done with 378/986 batches
Done with 379/986 batches
Done with 380/986 batches
Done with 381/986 batches
Done with 382/986 batches
Done with 383/986 batches
Done with 384/986 batches
Done with 385/986 batches
Done with 386/986 batches
Done with 387/986 batches
Done with 388/986 batches
Done with 389/986 batches
Done with 390/986 batches
Done with 391/986 batches
Done with 392/986 batches
Done with 393/986 batches
Done with 394/986 batches
Done with 395/986 batches
Done with 396/986 batches
Done with 397/986 batches
Done with 398/986 batches
Done with 399/986 batches
Done with 400/986 batches
Done with 401/986 batches
Done with 402/986 batches
Done with 403/986 batches
Done with 404/986 batches
Done with 405/986 batches
Done with 406/986 batches
Done with 407/986 batches
Done with 408/986 batches
Done with 409/986 batches
Done with 410/986 batches
Done with 411/986 batches
Done with 412/986 batches
Done with 413/986 batches
Done with 414/986 batches
Done with 415/986 batches
Done with 416/986 batches
Done with 417/986 batches
Done with 418/986 batches
Done with 419/986 batches
Done with 420/986 batches
Done with 421/986 batches
Done with 422/986 batches
Done with 423/986 batches
Done with 424/986 batches
Done with 425/986 batches
Done with 426/986 batches
Done with 427/986 batches
Done with 428/986 batches
Done with 429/986 batches
Done with 430/986 batches
Done with 431/986 batches
Done with 432/986 batches
Done with 433/986 batches
Done with 434/986 batches
Done with 435/986 batches
Done with 436/986 batches
Done with 437/986 batches
Done with 438/986 batches
Done with 439/986 batches
Done with 440/986 batches
Done with 441/986 batches
Done with 442/986 batches
Done with 443/986 batches
Done with 444/986 batches
Done with 445/986 batches
Done with 446/986 batches
Done with 447/986 batches
Done with 448/986 batches
Done with 449/986 batches
Done with 450/986 batches
Done with 451/986 batches
Done with 452/986 batches
Done with 453/986 batches
Done with 454/986 batches
Done with 455/986 batches
Done with 456/986 batches
Done with 457/986 batches
Done with 458/986 batches
Done with 459/986 batches
Done with 460/986 batches
Done with 461/986 batches
Done with 462/986 batches
Done with 463/986 batches
Done with 464/986 batches
Done with 465/986 batches
Done with 466/986 batches
Done with 467/986 batches
Done with 468/986 batches
Done with 469/986 batches
Done with 470/986 batches
Done with 471/986 batches
Done with 472/986 batches
Done with 473/986 batches
Done with 474/986 batches
Done with 475/986 batches
Done with 476/986 batches
Done with 477/986 batches
Done with 478/986 batches
Done with 479/986 batches
Done with 480/986 batches
Done with 481/986 batches
Done with 482/986 batches
Done with 483/986 batches
Done with 484/986 batches
Done with 485/986 batches
Done with 486/986 batches
Done with 487/986 batches
Done with 488/986 batches
Done with 489/986 batches
Done with 490/986 batches
Done with 491/986 batches
Done with 492/986 batches
Done with 493/986 batches
Done with 494/986 batches
Done with 495/986 batches
Done with 496/986 batches
Done with 497/986 batches
Done with 498/986 batches
Done with 499/986 batches
Done with 500/986 batches
Done with 501/986 batches
Done with 502/986 batches
Done with 503/986 batches
Done with 504/986 batches
Done with 505/986 batches
Done with 506/986 batches
Done with 507/986 batches
Done with 508/986 batches
Done with 509/986 batches
Done with 510/986 batches
Done with 511/986 batches
Done with 512/986 batches
Done with 513/986 batches
Done with 514/986 batches
Done with 515/986 batches
Done with 516/986 batches
Done with 517/986 batches
Done with 518/986 batches
Done with 519/986 batches
Done with 520/986 batches
Done with 521/986 batches
Done with 522/986 batches
Done with 523/986 batches
Done with 524/986 batches
Done with 525/986 batches
Done with 526/986 batches
Done with 527/986 batches
Done with 528/986 batches
Done with 529/986 batches
Done with 530/986 batches
Done with 531/986 batches
Done with 532/986 batches
Done with 533/986 batches
Done with 534/986 batches
Done with 535/986 batches
Done with 536/986 batches
Done with 537/986 batches
Done with 538/986 batches
Done with 539/986 batches
Done with 540/986 batches
Done with 541/986 batches
Done with 542/986 batches
Done with 543/986 batches
Done with 544/986 batches
Done with 545/986 batches
Done with 546/986 batches
Done with 547/986 batches
Done with 548/986 batches
Done with 549/986 batches
Done with 550/986 batches
Done with 551/986 batches
Done with 552/986 batches
Done with 553/986 batches
Done with 554/986 batches
Done with 555/986 batches
Done with 556/986 batches
Done with 557/986 batches
Done with 558/986 batches
Done with 559/986 batches
Done with 560/986 batches
Done with 561/986 batches
Done with 562/986 batches
Done with 563/986 batches
Done with 564/986 batches
Done with 565/986 batches
Done with 566/986 batches
Done with 567/986 batches
Done with 568/986 batches
Done with 569/986 batches
Done with 570/986 batches
Done with 571/986 batches
Done with 572/986 batches
Done with 573/986 batches
Done with 574/986 batches
Done with 575/986 batches
Done with 576/986 batches
Done with 577/986 batches
Done with 578/986 batches
Done with 579/986 batches
Done with 580/986 batches
Done with 581/986 batches
Done with 582/986 batches
Done with 583/986 batches
Done with 584/986 batches
Done with 585/986 batches
Done with 586/986 batches
Done with 587/986 batches
Done with 588/986 batches
Done with 589/986 batches
Done with 590/986 batches
Done with 591/986 batches
Done with 592/986 batches
Done with 593/986 batches
Done with 594/986 batches
Done with 595/986 batches
Done with 596/986 batches
Done with 597/986 batches
Done with 598/986 batches
Done with 599/986 batches
Done with 600/986 batches
Done with 601/986 batches
Done with 602/986 batches
Done with 603/986 batches
Done with 604/986 batches
Done with 605/986 batches
Done with 606/986 batches
Done with 607/986 batches
Done with 608/986 batches
Done with 609/986 batches
Done with 610/986 batches
Done with 611/986 batches
Done with 612/986 batches
Done with 613/986 batches
Done with 614/986 batches
Done with 615/986 batches
Done with 616/986 batches
Done with 617/986 batches
Done with 618/986 batches
Done with 619/986 batches
Done with 620/986 batches
Done with 621/986 batches
Done with 622/986 batches
Done with 623/986 batches
Done with 624/986 batches
Done with 625/986 batches
Done with 626/986 batches
Done with 627/986 batches
Done with 628/986 batches
Done with 629/986 batches
Done with 630/986 batches
Done with 631/986 batches
Done with 632/986 batches
Done with 633/986 batches
Done with 634/986 batches
Done with 635/986 batches
Done with 636/986 batches
Done with 637/986 batches
Done with 638/986 batches
Done with 639/986 batches
Done with 640/986 batches
Done with 641/986 batches
Done with 642/986 batches
Done with 643/986 batches
Done with 644/986 batches
Done with 645/986 batches
Done with 646/986 batches
Done with 647/986 batches
Done with 648/986 batches
Done with 649/986 batches
Done with 650/986 batches
Done with 651/986 batches
Done with 652/986 batches
Done with 653/986 batches
Done with 654/986 batches
Done with 655/986 batches
Done with 656/986 batches
Done with 657/986 batches
Done with 658/986 batches
Done with 659/986 batches
Done with 660/986 batches
Done with 661/986 batches
Done with 662/986 batches
Done with 663/986 batches
Done with 664/986 batches
Done with 665/986 batches
Done with 666/986 batches
Done with 667/986 batches
Done with 668/986 batches
Done with 669/986 batches
Done with 670/986 batches
Done with 671/986 batches
Done with 672/986 batches
Done with 673/986 batches
Done with 674/986 batches
Done with 675/986 batches
Done with 676/986 batches
Done with 677/986 batches
Done with 678/986 batches
Done with 679/986 batches
Done with 680/986 batches
Done with 681/986 batches
Done with 682/986 batches
Done with 683/986 batches
Done with 684/986 batches
Done with 685/986 batches
Done with 686/986 batches
Done with 687/986 batches
Done with 688/986 batches
Done with 689/986 batches
Done with 690/986 batches
Done with 691/986 batches
Done with 692/986 batches
Done with 693/986 batches
Done with 694/986 batches
Done with 695/986 batches
Done with 696/986 batches
Done with 697/986 batches
Done with 698/986 batches
Done with 699/986 batches
Done with 700/986 batches
Done with 701/986 batches
Done with 702/986 batches
Done with 703/986 batches
Done with 704/986 batches
Done with 705/986 batches
Done with 706/986 batches
Done with 707/986 batches
Done with 708/986 batches
Done with 709/986 batches
Done with 710/986 batches
Done with 711/986 batches
Done with 712/986 batches
Done with 713/986 batches
Done with 714/986 batches
Done with 715/986 batches
Done with 716/986 batches
Done with 717/986 batches
Done with 718/986 batches
Done with 719/986 batches
Done with 720/986 batches
Done with 721/986 batches
Done with 722/986 batches
Done with 723/986 batches
Done with 724/986 batches
Done with 725/986 batches
Done with 726/986 batches
Done with 727/986 batches
Done with 728/986 batches
Done with 729/986 batches
Done with 730/986 batches
Done with 731/986 batches
Done with 732/986 batches
Done with 733/986 batches
Done with 734/986 batches
Done with 735/986 batches
Done with 736/986 batches
Done with 737/986 batches
Done with 738/986 batches
Done with 739/986 batches
Done with 740/986 batches
Done with 741/986 batches
Done with 742/986 batches
Done with 743/986 batches
Done with 744/986 batches
Done with 745/986 batches
Done with 746/986 batches
Done with 747/986 batches
Done with 748/986 batches
Done with 749/986 batches
Done with 750/986 batches
Done with 751/986 batches
Done with 752/986 batches
Done with 753/986 batches
Done with 754/986 batches
Done with 755/986 batches
Done with 756/986 batches
Done with 757/986 batches
Done with 758/986 batches
Done with 759/986 batches
Done with 760/986 batches
Done with 761/986 batches
Done with 762/986 batches
Done with 763/986 batches
Done with 764/986 batches
Done with 765/986 batches
Done with 766/986 batches
Done with 767/986 batches
Done with 768/986 batches
Done with 769/986 batches
Done with 770/986 batches
Done with 771/986 batches
Done with 772/986 batches
Done with 773/986 batches
Done with 774/986 batches
Done with 775/986 batches
Done with 776/986 batches
Done with 777/986 batches
Done with 778/986 batches
Done with 779/986 batches
Done with 780/986 batches
Done with 781/986 batches
Done with 782/986 batches
Done with 783/986 batches
Done with 784/986 batches
Done with 785/986 batches
Done with 786/986 batches
Done with 787/986 batches
Done with 788/986 batches
Done with 789/986 batches
Done with 790/986 batches
Done with 791/986 batches
Done with 792/986 batches
Done with 793/986 batches
Done with 794/986 batches
Done with 795/986 batches
Done with 796/986 batches
Done with 797/986 batches
Done with 798/986 batches
Done with 799/986 batches
Done with 800/986 batches
Done with 801/986 batches
Done with 802/986 batches
Done with 803/986 batches
Done with 804/986 batches
Done with 805/986 batches
Done with 806/986 batches
Done with 807/986 batches
Done with 808/986 batches
Done with 809/986 batches
Done with 810/986 batches
Done with 811/986 batches
Done with 812/986 batches
Done with 813/986 batches
Done with 814/986 batches
Done with 815/986 batches
Done with 816/986 batches
Done with 817/986 batches
Done with 818/986 batches
Done with 819/986 batches
Done with 820/986 batches
Done with 821/986 batches
Done with 822/986 batches
Done with 823/986 batches
Done with 824/986 batches
Done with 825/986 batches
Done with 826/986 batches
Done with 827/986 batches
Done with 828/986 batches
Done with 829/986 batches
Done with 830/986 batches
Done with 831/986 batches
Done with 832/986 batches
Done with 833/986 batches
Done with 834/986 batches
Done with 835/986 batches
Done with 836/986 batches
Done with 837/986 batches
Done with 838/986 batches
Done with 839/986 batches
Done with 840/986 batches
Done with 841/986 batches
Done with 842/986 batches
Done with 843/986 batches
Done with 844/986 batches
Done with 845/986 batches
Done with 846/986 batches
Done with 847/986 batches
Done with 848/986 batches
Done with 849/986 batches
Done with 850/986 batches
Done with 851/986 batches
Done with 852/986 batches
Done with 853/986 batches
Done with 854/986 batches
Done with 855/986 batches
Done with 856/986 batches
Done with 857/986 batches
Done with 858/986 batches
Done with 859/986 batches
Done with 860/986 batches
Done with 861/986 batches
Done with 862/986 batches
Done with 863/986 batches
Done with 864/986 batches
Done with 865/986 batches
Done with 866/986 batches
Done with 867/986 batches
Done with 868/986 batches
Done with 869/986 batches
Done with 870/986 batches
Done with 871/986 batches
Done with 872/986 batches
Done with 873/986 batches
Done with 874/986 batches
Done with 875/986 batches
Done with 876/986 batches
Done with 877/986 batches
Done with 878/986 batches
Done with 879/986 batches
Done with 880/986 batches
Done with 881/986 batches
Done with 882/986 batches
Done with 883/986 batches
Done with 884/986 batches
Done with 885/986 batches
Done with 886/986 batches
Done with 887/986 batches
Done with 888/986 batches
Done with 889/986 batches
Done with 890/986 batches
Done with 891/986 batches
Done with 892/986 batches
Done with 893/986 batches
Done with 894/986 batches
Done with 895/986 batches
Done with 896/986 batches
Done with 897/986 batches
Done with 898/986 batches
Done with 899/986 batches
Done with 900/986 batches
Done with 901/986 batches
Done with 902/986 batches
Done with 903/986 batches
Done with 904/986 batches
Done with 905/986 batches
Done with 906/986 batches
Done with 907/986 batches
Done with 908/986 batches
Done with 909/986 batches
Done with 910/986 batches
Done with 911/986 batches
Done with 912/986 batches
Done with 913/986 batches
Done with 914/986 batches
Done with 915/986 batches
Done with 916/986 batches
Done with 917/986 batches
Done with 918/986 batches
Done with 919/986 batches
Done with 920/986 batches
Done with 921/986 batches
Done with 922/986 batches
Done with 923/986 batches
Done with 924/986 batches
Done with 925/986 batches
Done with 926/986 batches
Done with 927/986 batches
Done with 928/986 batches
Done with 929/986 batches
Done with 930/986 batches
Done with 931/986 batches
Done with 932/986 batches
Done with 933/986 batches
Done with 934/986 batches
Done with 935/986 batches
Done with 936/986 batches
Done with 937/986 batches
Done with 938/986 batches
Done with 939/986 batches
Done with 940/986 batches
Done with 941/986 batches
Done with 942/986 batches
Done with 943/986 batches
Done with 944/986 batches
Done with 945/986 batches
Done with 946/986 batches
Done with 947/986 batches
Done with 948/986 batches
Done with 949/986 batches
Done with 950/986 batches
Done with 951/986 batches
Done with 952/986 batches
Done with 953/986 batches
Done with 954/986 batches
Done with 955/986 batches
Done with 956/986 batches
Done with 957/986 batches
Done with 958/986 batches
Done with 959/986 batches
Done with 960/986 batches
Done with 961/986 batches
Done with 962/986 batches
Done with 963/986 batches
Done with 964/986 batches
Done with 965/986 batches
Done with 966/986 batches
Done with 967/986 batches
Done with 968/986 batches
Done with 969/986 batches
Done with 970/986 batches
Done with 971/986 batches
Done with 972/986 batches
Done with 973/986 batches
Done with 974/986 batches
Done with 975/986 batches
Done with 976/986 batches
Done with 977/986 batches
Done with 978/986 batches
Done with 979/986 batches
Done with 980/986 batches
Done with 981/986 batches
Done with 982/986 batches
Done with 983/986 batches
Done with 984/986 batches
Done with 985/986 batches
Mean Accuracy [1.1103017, 1.0904403, 1.1443708, 1.0977936, 1.0995524, 1.0935338, 1.0097511, 0.97735041, 1.0771585, 1.1029634, 1.2259653, 1.0201443, 1.1160424, 1.1046675, 1.0485839, 1.1082163, 1.0706687, 0.96200639, 1.0665212, 1.1315149, 1.0305886, 1.1536031, 1.043407, 1.1243076, 1.0206476, 1.1766616, 1.1152487, 1.0780991, 1.0990124, 1.0298845, 1.1511924, 1.1040194, 1.1190172, 1.1240332, 1.076997, 1.1144539, 1.0727763, 1.1649069, 1.1563742, 0.993819, 1.1745353, 1.0985744, 1.0193784, 1.0425407, 1.0968447, 1.1456587, 1.1167411, 1.0468462, 1.1353345, 1.0784574, 1.0480379, 1.0540149, 1.1292493, 1.0949686, 1.1804171, 0.9714939, 1.1270266, 1.162307, 1.035313, 1.0267942, 1.1427001, 1.0485729, 1.0714788, 1.0085521, 1.1011953, 1.110276, 1.1382055, 1.0974526, 1.1679349, 1.0716193, 1.0840117, 1.0282903, 1.1548729, 1.103302, 1.0995862, 1.1347579, 1.1467283, 1.0292888, 1.1220324, 1.0905012, 1.0911238, 1.0702164, 1.0770786, 1.132566, 1.1002724, 1.1206338, 1.100811, 1.0243317, 1.1107531, 1.0978322, 1.0867922, 0.99957085, 1.0948203, 1.1509471, 1.0826905, 1.1172023, 1.0510283, 1.0450692, 1.0478331, 1.0679129, 1.07026, 1.1191425, 1.0443568, 1.1000085, 1.0056753, 1.0933229, 1.0885046, 1.0498338, 1.1512685, 1.0092224, 1.116678, 1.0881746, 0.99505293, 1.0938622, 1.110093, 1.0564135, 1.0684135, 1.1173227, 0.99129832, 1.127938, 1.0424409, 1.0229175, 1.0738523, 1.0351546, 1.0350189, 1.0614367, 1.2593725, 1.1615106, 1.2101798, 0.99011379, 1.1598405, 1.1440736, 1.056018, 1.0501058, 1.1140678, 0.96256059, 1.1807988, 1.0730569, 1.0863326, 1.0720332, 1.0252407, 1.1291511, 0.95084161, 1.0419992, 1.0932224, 1.0941883, 1.0622134, 1.0976321, 1.180124, 1.1745059, 1.1624045, 1.1172627, 1.0266931, 1.1100852, 1.1725646, 1.0793881, 1.0593761, 1.0374014, 1.1630261, 1.1514668, 1.0254663, 0.91694987, 1.0656309, 1.1028512, 1.1082755, 1.1025273, 1.0860653, 1.1408066, 1.0602145, 1.1887639, 1.2155291, 1.1807339, 1.0860296, 1.1410657, 1.0396469, 1.066366, 0.95606625, 1.0524874, 1.1082549, 1.0622725, 1.019974, 1.1921109, 1.2279283, 0.98841155, 1.0791924, 1.0308704, 1.0873079, 1.0828059, 1.1328784, 1.0056374, 1.0512856, 1.027055, 1.0662427, 1.0544949, 1.0525879, 1.0354267, 0.99784207, 1.1391286, 1.0346754, 1.1394432, 1.1466231, 1.087054, 1.0378435, 1.0576613, 1.0548072, 1.040448, 1.0455055, 0.98849618, 1.0609384, 1.125235, 1.0894624, 1.1314431, 1.0271045, 1.0771199, 1.0991153, 1.1489863, 1.1286246, 1.1185671, 1.0799842, 1.0034522, 1.0862094, 0.98389196, 1.090976, 1.109472, 1.1185247, 1.102263, 1.0513165, 1.0145941, 1.0851017, 1.0977523, 1.0641234, 1.0685756, 0.9778676, 1.1506584, 1.06779, 1.0677882, 1.145826, 1.0720024, 1.1283857, 1.0319567, 0.93339729, 1.1126715, 1.0106306, 1.0473735, 1.0015466, 1.081542, 1.0639391, 1.0514035, 1.1512377, 1.003633, 1.1262991, 1.0178947, 1.1035249, 1.0813015, 1.0830033, 1.1584837, 1.1323349, 1.0400474, 1.0431592, 1.1471001, 1.0315915, 1.0071952, 1.1088831, 1.0679176, 1.0501763, 1.1602689, 1.1207685, 1.1337481, 1.0316312, 1.0872523, 1.1409055, 1.0815187, 1.2684617, 1.0042572, 1.0172875, 1.1028814, 1.062283, 1.0064054, 0.98368359, 1.0871847, 1.0966153, 1.1154677, 1.0822533, 1.1081817, 1.0704699, 1.0309837, 1.0736616, 1.1094096, 1.0339247, 1.1283185, 1.1619077, 1.1252346, 1.0819144, 1.0331686, 0.98666686, 1.0282604, 1.1300653, 1.1215012, 1.0332965, 1.0880301, 1.1296123, 1.1281043, 1.0436777, 1.0648134, 1.0366955, 1.0836157, 1.0966758, 1.0904012, 1.0427284, 1.1439919, 1.1225458, 1.0140052, 1.0533123, 1.0407305, 1.1396204, 1.1439799, 1.09039, 1.0870929, 1.1384034, 1.0028362, 1.1173246, 1.1041319, 1.0857878, 1.0683144, 1.0975888, 1.0701706, 1.0177786, 1.0261692, 1.2019004, 1.2033395, 1.0312332, 1.0570027, 1.1105795, 1.1657805, 1.0186617, 1.2286558, 0.95507812, 0.98800105, 1.1599388, 1.0564213, 1.0683227, 1.050787, 1.0832391, 1.1620603, 1.0511445, 1.0736865, 1.0568891, 1.0169175, 1.0739987, 1.0448304, 1.0409367, 1.1529026, 1.1783769, 1.1186838, 1.1339047, 1.0417905, 1.0724268, 1.1032438, 1.0270531, 1.1112926, 1.0661883, 1.0702491, 1.1244802, 1.1375623, 1.0253674, 1.0884249, 1.0643259, 1.0202274, 1.0698537, 0.99765021, 1.0443397, 1.025305, 1.0432451, 1.1266255, 1.0217812, 0.99942011, 1.1095054, 1.1042974, 1.070256, 1.1441725, 1.0208576, 1.1632383, 1.0465057, 1.1556576, 1.028442, 1.1793435, 1.0619907, 1.1184254, 0.99419844, 1.0611072, 1.1689137, 1.0977323, 1.1216731, 1.0672233, 1.1337564, 0.99444723, 1.0653251, 0.98864222, 1.082994, 1.1499904, 1.0520396, 1.0764451, 0.95332712, 1.1085532, 1.1232256, 1.1776015, 1.0728517, 1.0774217, 1.1270549, 1.0134618, 1.0683262, 1.023182, 1.0622832, 1.1048594, 1.0705515, 1.0866899, 1.1025875, 1.1244876, 1.0459155, 1.1765281, 1.0278025, 0.94923371, 1.0838962, 1.0973305, 1.0193124, 1.1392617, 1.0314941, 1.0756313, 1.1057751, 1.1484714, 1.1270614, 1.1149334, 1.0623622, 0.99420214, 1.0296016, 1.0109632, 1.0619636, 1.0473616, 1.1963043, 1.0829167, 1.0600563, 1.1150639, 1.0887063, 1.0317365, 1.0406013, 1.0586025, 1.0664476, 1.1246258, 1.0826863, 0.9593119, 0.96347755, 0.94897121, 1.1452649, 1.165446, 1.0300428, 1.007021, 1.0602434, 1.1463687, 1.1302732, 0.98616123, 1.0035591, 0.96545255, 1.1612713, 0.98743695, 1.1431336, 1.059931, 1.0748886, 1.0748521, 1.0454673, 1.0702882, 1.1135771, 1.0912826, 1.0623438, 0.99666989, 1.1152999, 1.1879454, 1.0412076, 1.0346136, 1.035486, 1.1601257, 1.1702633, 1.0226133, 1.1193333, 1.1360681, 1.0360991, 1.0832001, 1.0516422, 1.0915624, 1.057117, 1.0665298, 1.118153, 1.0388682, 1.0854046, 1.0728185, 1.0892886, 0.98735517, 1.0794575, 1.0713899, 1.1870847, 1.1066616, 1.0217438, 1.0647728, 1.130239, 1.0505936, 1.0869795, 0.99318457, 1.1279689, 0.99396992, 1.0798683, 0.96546668, 0.99601829, 1.0706724, 1.0067374, 1.064865, 1.0109771, 1.1724598, 1.0992078, 1.091292, 1.0128968, 1.0264616, 1.0758173, 1.149931, 1.161409, 1.089149, 1.1217875, 1.0203583, 1.0170388, 1.0526018, 1.1274321, 1.1365473, 1.0603993, 0.96102083, 1.0318861, 1.1166198, 1.0830426, 1.0915968, 1.0676947, 1.065515, 1.0393217, 1.0628707, 1.0882871, 1.0708122, 1.0322695, 1.0085702, 1.176929, 1.0044522, 1.151276, 1.10559, 1.1342309, 1.1229296, 1.0913301, 1.0238286, 1.0297599, 1.04594, 1.1010317, 1.1032305, 0.96343511, 1.0569265, 1.0451005, 1.117869, 1.0532944, 1.0682024, 1.0728683, 1.1371486, 0.99105775, 1.0047748, 1.0881593, 1.0247837, 1.0963264, 1.0401496, 0.98066103, 1.1549909, 1.1041994, 1.0763363, 0.97478789, 1.0392835, 1.0033487, 1.0851457, 1.0794461, 1.1288135, 1.0005772, 0.9314307, 1.0825489, 1.1554005, 1.0969837, 1.0323558, 1.1351953, 0.97963321, 1.0796173, 1.1135619, 1.0966355, 0.96455467, 1.0070938, 1.10048, 1.0853801, 0.94474816, 1.1108749, 0.98505759, 1.1655459, 1.0233712, 1.0983993, 1.0035825, 1.0516355, 1.0157468, 1.1174722, 1.1010485, 0.97888863, 1.0745964, 1.0243071, 1.1347494, 1.12168, 1.0449709, 0.98425967, 1.0926883, 1.0431857, 1.0974472, 0.98181707, 0.91154069, 0.99682748, 1.0279149, 1.1065656, 1.1391711, 1.0160317, 1.0462151, 1.0098293, 1.1723733, 0.9934265, 1.0652227, 1.1599486, 1.0536191, 1.0527493, 1.085758, 1.0840542, 1.2008159, 1.0889395, 1.1704242, 1.0520267, 1.126471, 1.0237174, 0.99628186, 1.1709695, 1.0855982, 1.0139661, 1.1008396, 1.047601, 1.0148726, 1.0050865, 1.1315289, 1.066258, 1.005996, 0.98711383, 1.0725267, 1.0031803, 0.95407426, 1.0784039, 1.0994344, 1.0534981, 1.0515714, 1.0490345, 1.0220594, 1.0783249, 1.0588255, 1.0467515, 1.0880375, 1.0054519, 1.049983, 1.0766637, 1.0181701, 1.1290832, 1.0645685, 1.0320199, 1.1006728, 0.97852933, 1.0203165, 1.0102553, 1.0111167, 1.0383562, 1.1015084, 1.0747156, 1.1210455, 1.0063809, 1.0665774, 1.1280503, 1.0059001, 1.0291848, 0.96252477, 1.0840261, 1.1184919, 1.1213472, 1.074141, 1.0175076, 1.0096436, 1.0471597, 1.0056032, 1.1112518, 1.0714575, 1.0123783, 1.0707375, 1.019753, 1.0342569, 1.002121, 1.1220585, 1.0785465, 1.0492694, 1.0113623, 1.05689, 1.0516253, 1.1279308, 1.0422809, 1.1223613, 1.1220033, 1.1335064, 1.0635519, 1.0070688, 1.126471, 1.0184127, 1.1945505, 1.0823498, 0.98215413, 1.0498099, 1.1645153, 0.96441925, 1.0644528, 1.1112642, 1.0106812, 1.054353, 1.0319445, 0.9783029, 1.0488162, 1.0428387, 1.1197174, 1.1189761, 1.0133764, 1.0698044, 1.0223224, 1.0072882, 1.0050654, 1.0885563, 1.1057591, 1.0787807, 1.0230505, 1.0511084, 1.1346785, 1.108115, 1.056317, 1.1619067, 1.0826857, 1.1007063, 1.0434558, 1.0407863, 1.1627142, 1.0437281, 0.91762829, 1.2576793, 1.0344976, 1.0266771, 1.1155735, 1.0930848, 0.98707473, 1.0368745, 1.0782754, 1.0965266, 1.0896711, 1.1068555, 1.1234695, 0.98588669, 1.0306855, 1.0139863, 1.0296953, 1.1394148, 0.99019974, 1.0774028, 1.0470486, 0.98520923, 1.0699756, 1.1037414, 1.1345868, 1.0197793, 1.0367229, 1.0131289, 1.0878233, 1.0702804, 0.98573291, 1.0783992, 0.95966434, 1.1293402, 1.133687, 1.0704149, 1.0316072, 1.0724014, 1.0192771, 0.98389041, 1.0909228, 1.0606183, 1.2276183, 1.0768467, 1.115495, 1.0778208, 1.0617177, 1.0378673, 1.1182061, 1.0343446, 1.0390819, 1.0755684, 1.1315546, 1.0512323, 0.98615032, 1.1223105, 1.1763475, 1.0125685, 1.0977644, 0.96031934, 1.0543759, 0.94614047, 1.0129502, 1.0778441, 1.0904779, 1.0682405, 1.038434, 1.0194393, 1.0450504, 1.1128974, 1.0671818, 1.1082487, 1.0906031, 1.0408192, 1.0396819, 0.98060584, 1.0006086, 1.074771, 1.0738297, 1.0055243, 1.0654401, 1.0782259, 0.99630308, 1.0926456, 1.1021028, 1.0682067, 1.092437, 0.99487793, 1.0128431, 1.0506917, 1.0373983, 1.1169059, 1.0222991, 0.96713138, 1.1133425, 1.0532444, 1.1400371, 1.062781, 1.0719085, 1.0258403, 1.0677741, 1.1444643, 1.0269883, 1.0787647, 1.0148213, 1.054831, 1.0541043, 0.99700904, 1.0081894, 1.0507309, 1.1301994, 1.1320699, 1.0313846, 0.95552027, 0.95288831, 1.1331165, 1.0507782, 1.0603143, 1.1298902, 1.0275085, 1.1159379, 1.0267086, 1.0611749, 1.0021839, 1.0405446, 1.1114872, 1.0048295, 1.044021, 1.0354761, 0.99572414, 1.0993409, 1.0028015, 1.1500306, 1.0075483, 1.0454562, 1.0763528, 1.0086937, 1.1520329, 1.0848315, 1.0899494, 1.0580423, 1.0981793, 1.0579381, 1.0474265, 1.0644627, 1.0523723, 1.0951954, 0.99854147, 1.0560796, 1.025532, 0.98442316, 1.106953, 1.009764, 1.0003412, 1.0799724, 1.0673587, 0.97990894, 0.98186016, 1.1364028, 1.1172551, 0.97802341, 1.1456492, 1.1095798, 1.0993735, 1.0371717, 1.1290804, 0.98918575, 1.0506568, 1.0938321, 1.0285732, 1.0666987, 0.94455141, 1.0712336, 1.1058445, 0.98265439, 1.1341732, 1.073061, 0.98201251, 1.0583994, 1.073607, 1.0237703, 1.099339, 1.0504768, 1.0570939, 1.0797005, 1.1178825, 1.1028943, 1.0236132, 1.0008876, 1.0244575, 1.1001855, 1.1376653, 1.1172135, 0.9767493, 0.98138231, 1.0689179, 1.1236608, 0.98819906, 0.97387409, 1.0014546, 1.0783875, 1.0179967, 1.1506593, 1.0881889, 1.1545552, 1.0295439, 1.1569765, 1.0758801, 1.0367153, 1.0397949, 1.1149013, 1.1526458, 1.1661124, 1.0950879, 1.0267202, 1.1557652, 1.0727806, 1.0415475, 1.0661039, 1.0569067, 1.0688188, 1.0510166, 1.0605356, 1.0797691, 1.05601, 1.1844459, 1.0910351, 1.0252194, 1.072268, 1.0405824, 1.0974205, 1.053334, 1.0643623, 1.0066621]
Mean Loss [0.07421875, 0.0859375, 0.0625, 0.09765625, 0.08203125, 0.06640625, 0.05859375, 0.07421875, 0.0703125, 0.0546875, 0.0546875, 0.08203125, 0.0703125, 0.07421875, 0.046875, 0.0625, 0.0625, 0.078125, 0.078125, 0.05859375, 0.06640625, 0.07421875, 0.10546875, 0.078125, 0.05078125, 0.08984375, 0.0703125, 0.05859375, 0.0625, 0.09375, 0.0703125, 0.0625, 0.06640625, 0.08203125, 0.07421875, 0.0859375, 0.12109375, 0.09375, 0.12890625, 0.08984375, 0.12109375, 0.09765625, 0.09375, 0.09765625, 0.10546875, 0.05859375, 0.09375, 0.06640625, 0.0859375, 0.1015625, 0.05078125, 0.0625, 0.046875, 0.04296875, 0.05859375, 0.109375, 0.06640625, 0.08203125, 0.0625, 0.078125, 0.06640625, 0.078125, 0.0546875, 0.08203125, 0.08203125, 0.07421875, 0.078125, 0.08984375, 0.09765625, 0.06640625, 0.12109375, 0.08203125, 0.078125, 0.06640625, 0.08203125, 0.06640625, 0.07421875, 0.1171875, 0.0546875, 0.05078125, 0.06640625, 0.07421875, 0.0625, 0.04296875, 0.06640625, 0.05078125, 0.05859375, 0.078125, 0.03515625, 0.0546875, 0.09375, 0.07421875, 0.0625, 0.09765625, 0.05859375, 0.0546875, 0.125, 0.109375, 0.046875, 0.09375, 0.09765625, 0.07421875, 0.04296875, 0.06640625, 0.0390625, 0.03125, 0.06640625, 0.05078125, 0.06640625, 0.06640625, 0.08984375, 0.07421875, 0.07421875, 0.06640625, 0.0859375, 0.08203125, 0.05859375, 0.06640625, 0.0859375, 0.1015625, 0.09375, 0.08203125, 0.078125, 0.0703125, 0.05859375, 0.08984375, 0.07421875, 0.08203125, 0.078125, 0.07421875, 0.08203125, 0.12890625, 0.07421875, 0.10546875, 0.09375, 0.09765625, 0.09375, 0.13671875, 0.07421875, 0.06640625, 0.0703125, 0.08203125, 0.078125, 0.1171875, 0.078125, 0.09375, 0.08984375, 0.08203125, 0.10546875, 0.08984375, 0.078125, 0.09765625, 0.140625, 0.0703125, 0.05078125, 0.1015625, 0.0859375, 0.07421875, 0.0625, 0.08203125, 0.109375, 0.125, 0.08203125, 0.06640625, 0.10546875, 0.1015625, 0.1171875, 0.0859375, 0.1171875, 0.13671875, 0.0859375, 0.09765625, 0.11328125, 0.0703125, 0.08203125, 0.08203125, 0.12109375, 0.10546875, 0.10546875, 0.078125, 0.08203125, 0.11328125, 0.09375, 0.06640625, 0.078125, 0.06640625, 0.12890625, 0.09765625, 0.109375, 0.08984375, 0.0625, 0.09765625, 0.109375, 0.078125, 0.1015625, 0.078125, 0.07421875, 0.08203125, 0.0625, 0.0703125, 0.10546875, 0.10546875, 0.10546875, 0.06640625, 0.09375, 0.078125, 0.08203125, 0.08203125, 0.05078125, 0.0703125, 0.07421875, 0.078125, 0.109375, 0.0390625, 0.0703125, 0.0859375, 0.05859375, 0.09375, 0.078125, 0.0859375, 0.07421875, 0.07421875, 0.0546875, 0.0859375, 0.078125, 0.09375, 0.1015625, 0.08203125, 0.08984375, 0.0625, 0.08984375, 0.0703125, 0.0703125, 0.078125, 0.1015625, 0.0859375, 0.08203125, 0.06640625, 0.08203125, 0.09765625, 0.08984375, 0.07421875, 0.09375, 0.0703125, 0.09765625, 0.0859375, 0.078125, 0.06640625, 0.0859375, 0.078125, 0.08203125, 0.09375, 0.08203125, 0.06640625, 0.07421875, 0.06640625, 0.05859375, 0.0703125, 0.0703125, 0.046875, 0.06640625, 0.07421875, 0.05859375, 0.1015625, 0.09375, 0.08984375, 0.07421875, 0.08203125, 0.078125, 0.078125, 0.09375, 0.08984375, 0.109375, 0.07421875, 0.078125, 0.08203125, 0.08984375, 0.10546875, 0.12109375, 0.109375, 0.09765625, 0.078125, 0.078125, 0.06640625, 0.09375, 0.125, 0.08203125, 0.11328125, 0.078125, 0.07421875, 0.0859375, 0.07421875, 0.08203125, 0.08984375, 0.04296875, 0.0625, 0.02734375, 0.04296875, 0.046875, 0.0703125, 0.0390625, 0.06640625, 0.05859375, 0.0546875, 0.05078125, 0.06640625, 0.109375, 0.0859375, 0.14453125, 0.09765625, 0.09375, 0.08203125, 0.1015625, 0.0625, 0.09765625, 0.0546875, 0.08203125, 0.07421875, 0.08984375, 0.0859375, 0.11328125, 0.09765625, 0.05859375, 0.09765625, 0.08984375, 0.06640625, 0.09765625, 0.109375, 0.10546875, 0.1015625, 0.09375, 0.109375, 0.09765625, 0.10546875, 0.1015625, 0.06640625, 0.09765625, 0.109375, 0.07421875, 0.08984375, 0.06640625, 0.0390625, 0.078125, 0.10546875, 0.08984375, 0.0625, 0.09765625, 0.09765625, 0.078125, 0.0625, 0.109375, 0.09375, 0.09375, 0.078125, 0.07421875, 0.05859375, 0.0703125, 0.06640625, 0.078125, 0.06640625, 0.06640625, 0.10546875, 0.05078125, 0.0703125, 0.0703125, 0.08203125, 0.1171875, 0.109375, 0.078125, 0.08203125, 0.12890625, 0.0703125, 0.12109375, 0.10546875, 0.06640625, 0.109375, 0.09375, 0.09375, 0.09765625, 0.10546875, 0.1015625, 0.09765625, 0.0703125, 0.07421875, 0.09765625, 0.09375, 0.09765625, 0.08203125, 0.09765625, 0.10546875, 0.0859375, 0.0859375, 0.10546875, 0.07421875, 0.09375, 0.08203125, 0.046875, 0.08984375, 0.12109375, 0.05859375, 0.08203125, 0.08203125, 0.08203125, 0.125, 0.11328125, 0.0859375, 0.125, 0.1171875, 0.10546875, 0.11328125, 0.0859375, 0.1015625, 0.07421875, 0.0859375, 0.046875, 0.09375, 0.0625, 0.05078125, 0.05859375, 0.08984375, 0.0390625, 0.03125, 0.06640625, 0.06640625, 0.05078125, 0.08203125, 0.07421875, 0.06640625, 0.05859375, 0.0859375, 0.1015625, 0.0625, 0.08203125, 0.11328125, 0.109375, 0.0859375, 0.09375, 0.0859375, 0.09765625, 0.06640625, 0.1015625, 0.10546875, 0.1015625, 0.08984375, 0.078125, 0.09375, 0.09375, 0.10546875, 0.08984375, 0.12109375, 0.09375, 0.140625, 0.09765625, 0.08984375, 0.09765625, 0.11328125, 0.109375, 0.06640625, 0.1328125, 0.09375, 0.13671875, 0.12109375, 0.125, 0.10546875, 0.11328125, 0.12109375, 0.15234375, 0.08203125, 0.14453125, 0.07421875, 0.0703125, 0.09375, 0.11328125, 0.08984375, 0.08203125, 0.0859375, 0.1328125, 0.11328125, 0.08203125, 0.1015625, 0.10546875, 0.13671875, 0.06640625, 0.09765625, 0.10546875, 0.1015625, 0.08203125, 0.08203125, 0.09375, 0.078125, 0.09375, 0.08984375, 0.09375, 0.078125, 0.10546875, 0.09375, 0.109375, 0.09375, 0.078125, 0.109375, 0.12890625, 0.09765625, 0.08203125, 0.09375, 0.1171875, 0.10546875, 0.125, 0.12109375, 0.09375, 0.09765625, 0.0703125, 0.08984375, 0.08203125, 0.05859375, 0.07421875, 0.08203125, 0.0859375, 0.078125, 0.09375, 0.09375, 0.11328125, 0.08203125, 0.0859375, 0.109375, 0.0625, 0.09765625, 0.125, 0.12890625, 0.125, 0.1171875, 0.12109375, 0.08203125, 0.1328125, 0.12109375, 0.11328125, 0.12109375, 0.11328125, 0.1171875, 0.140625, 0.12109375, 0.1484375, 0.12890625, 0.125, 0.07421875, 0.09765625, 0.12890625, 0.07421875, 0.078125, 0.078125, 0.0625, 0.09375, 0.08984375, 0.07421875, 0.07421875, 0.1015625, 0.07421875, 0.09375, 0.13671875, 0.09765625, 0.13671875, 0.14453125, 0.1015625, 0.1328125, 0.12109375, 0.10546875, 0.09765625, 0.0859375, 0.109375, 0.11328125, 0.09375, 0.11328125, 0.0703125, 0.08203125, 0.13671875, 0.1015625, 0.0859375, 0.1015625, 0.10546875, 0.05859375, 0.08984375, 0.125, 0.10546875, 0.109375, 0.109375, 0.10546875, 0.11328125, 0.1015625, 0.109375, 0.08984375, 0.1328125, 0.1015625, 0.11328125, 0.10546875, 0.12109375, 0.1328125, 0.09765625, 0.09375, 0.08203125, 0.08203125, 0.0625, 0.1015625, 0.13671875, 0.09765625, 0.10546875, 0.08984375, 0.15625, 0.1328125, 0.11328125, 0.12890625, 0.1171875, 0.12890625, 0.10546875, 0.10546875, 0.08203125, 0.09375, 0.0859375, 0.078125, 0.0625, 0.109375, 0.0546875, 0.0859375, 0.11328125, 0.10546875, 0.09375, 0.08203125, 0.0859375, 0.1640625, 0.08984375, 0.11328125, 0.12890625, 0.12890625, 0.109375, 0.1015625, 0.140625, 0.13671875, 0.1015625, 0.0859375, 0.0859375, 0.1015625, 0.08984375, 0.06640625, 0.09375, 0.12890625, 0.10546875, 0.12109375, 0.0703125, 0.1015625, 0.1015625, 0.109375, 0.10546875, 0.1953125, 0.16015625, 0.1015625, 0.12109375, 0.12109375, 0.140625, 0.08203125, 0.07421875, 0.09765625, 0.08203125, 0.078125, 0.1015625, 0.09765625, 0.1171875, 0.0859375, 0.09765625, 0.1171875, 0.09765625, 0.078125, 0.09375, 0.09375, 0.0859375, 0.10546875, 0.09375, 0.10546875, 0.0859375, 0.1015625, 0.08984375, 0.10546875, 0.11328125, 0.1015625, 0.12109375, 0.1171875, 0.11328125, 0.078125, 0.09765625, 0.1171875, 0.078125, 0.078125, 0.10546875, 0.1171875, 0.1015625, 0.08203125, 0.12890625, 0.1015625, 0.09375, 0.08984375, 0.1015625, 0.14453125, 0.12109375, 0.11328125, 0.1015625, 0.140625, 0.0859375, 0.12890625, 0.09765625, 0.109375, 0.09765625, 0.06640625, 0.09375, 0.140625, 0.078125, 0.1171875, 0.0859375, 0.0859375, 0.1015625, 0.109375, 0.12109375, 0.12890625, 0.09375, 0.08984375, 0.1171875, 0.08984375, 0.0703125, 0.13671875, 0.109375, 0.1015625, 0.10546875, 0.125, 0.125, 0.109375, 0.1171875, 0.13671875, 0.10546875, 0.109375, 0.0703125, 0.1171875, 0.1015625, 0.16015625, 0.10546875, 0.13671875, 0.09765625, 0.10546875, 0.1328125, 0.1015625, 0.1171875, 0.13671875, 0.10546875, 0.078125, 0.12109375, 0.08984375, 0.11328125, 0.109375, 0.09375, 0.140625, 0.09765625, 0.109375, 0.125, 0.10546875, 0.08203125, 0.1015625, 0.11328125, 0.08984375, 0.09375, 0.0703125, 0.10546875, 0.078125, 0.1171875, 0.09375, 0.10546875, 0.1171875, 0.0859375, 0.12890625, 0.125, 0.09765625, 0.09765625, 0.09765625, 0.09765625, 0.1015625, 0.11328125, 0.09375, 0.09375, 0.10546875, 0.11328125, 0.13671875, 0.1171875, 0.109375, 0.1328125, 0.08203125, 0.08984375, 0.109375, 0.11328125, 0.078125, 0.1015625, 0.07421875, 0.12109375, 0.15234375, 0.07421875, 0.16015625, 0.13671875, 0.08203125, 0.125, 0.1015625, 0.078125, 0.1015625, 0.0625, 0.078125, 0.0703125, 0.125, 0.12890625, 0.109375, 0.08203125, 0.08984375, 0.0859375, 0.1171875, 0.0859375, 0.109375, 0.078125, 0.109375, 0.125, 0.10546875, 0.09375, 0.10546875, 0.109375, 0.11328125, 0.09765625, 0.08984375, 0.09765625, 0.125, 0.08984375, 0.109375, 0.13671875, 0.08203125, 0.1171875, 0.10546875, 0.13671875, 0.1171875, 0.11328125, 0.0859375, 0.1640625, 0.08984375, 0.10546875, 0.1171875, 0.12109375, 0.125, 0.11328125, 0.1015625, 0.11328125, 0.08203125, 0.109375, 0.1015625, 0.125, 0.1015625, 0.13671875, 0.11328125, 0.10546875, 0.0859375, 0.06640625, 0.06640625, 0.12890625, 0.12109375, 0.10546875, 0.078125, 0.08203125, 0.12890625, 0.12109375, 0.15234375, 0.140625, 0.1171875, 0.1328125, 0.12109375, 0.1015625, 0.1328125, 0.125, 0.10546875, 0.08984375, 0.09765625, 0.08984375, 0.1171875, 0.09375, 0.140625, 0.09765625, 0.12109375, 0.125, 0.09765625, 0.09765625, 0.12109375, 0.1640625, 0.15625, 0.09765625, 0.11328125, 0.15234375, 0.12109375, 0.1015625, 0.11328125, 0.0859375, 0.08984375, 0.07421875, 0.0859375, 0.0859375, 0.10546875, 0.0859375, 0.12890625, 0.0859375, 0.125, 0.10546875, 0.10546875, 0.16015625, 0.1328125, 0.1171875, 0.09765625, 0.1015625, 0.09765625, 0.09375, 0.1015625, 0.09765625, 0.109375, 0.09375, 0.10546875, 0.109375, 0.08984375, 0.125, 0.12109375, 0.09765625, 0.125, 0.08984375, 0.10546875, 0.0859375, 0.125, 0.0859375, 0.13671875, 0.125, 0.13671875, 0.11328125, 0.1015625, 0.15625, 0.1328125, 0.109375, 0.140625, 0.13671875, 0.0859375, 0.1328125, 0.0859375, 0.12890625, 0.09375, 0.14453125, 0.09375, 0.12890625, 0.09765625, 0.140625, 0.125, 0.109375, 0.13671875, 0.1171875, 0.11328125, 0.13671875, 0.1328125, 0.11328125, 0.08984375, 0.13671875, 0.19921875, 0.12109375, 0.0859375, 0.109375, 0.0703125, 0.109375, 0.12890625, 0.07421875, 0.12890625, 0.0859375, 0.09765625, 0.08984375, 0.10546875, 0.08984375, 0.09765625, 0.09375, 0.09375, 0.11328125, 0.09765625, 0.12109375, 0.11328125, 0.1171875, 0.11328125, 0.1171875, 0.1171875, 0.12109375]

model predict


In [19]:
start = 20
samples = subsamples_in[start::total_batches][:min_batch_len]
predictions = model.predict_on_batch(samples)
for index, prediction in enumerate(predictions):
    print(' '.join(index2word[index] for index in samples[index]))
    pred_word = word2vec_model.similar_by_vector(prediction)[0][0]
    sys.stdout.write("*"+pred_word+" \n")


afc champion denver broncos defeated
*the 
in the pro bowl thomas
*farewell 
of the turf collapsed under
*presidency 
and post-game coverage while martin
*friend 
super bowl record 61-yard return
*in 
freely and royal carps in
*the 
host a permanent exhibition of
*the 
the prince of płock bolesław
*tito 
the plain vistula terraces flooded
*antenna 
a registration number depends on
*this 
the rivers of france evolved
*through 
force of franks into the
*marshalcy 
of arguments as to whether
*it 
did not have the rich
*nco 
his earlier illnessin 1875 tesla
*armyduring 
tesla 's patent would probably
*be 
of the tesla coil the
*performance 
the letter s dotdotdot in
*this 
their former star inventor was
*delayed 
said that he had been
*delayed 
invented or envisioned by tesla
*is 
not intended as a practical
*deity 
which are defined using quantum
*database 
would be a major breakthrough
*in 
be responsible for some or
*individual 
demanding university entrance examinations receive
*compass-m1 
line of work including occupational
*mash 
a standard bachelor 's degree
*complete 
the holy roman emperor charles
*tsar 
soul from purgatory also attested
*as 
assembly of the estates of
*the 
the murderous thieving hordes of
*marshalcy 
and marriage servicesluther and his
*kaidu 
speratus this and seven other
*baars 
to us in his word
*enclosure 
and bucer citing as a
*backup 
that ignored other contributory factors
*in 
for the greater southern california
*oat 
a heavily developed urban environment
*oat 
home to los angeles international
*airport 
rename its skydrive cloud storage
*exit 
can get carriage on a
*antenna 
settlement the area now constituting
*pcf-94 
schools and just over 311800
*in 
arrived on 6 april 1652
*to 
the origin of the name
*redemption 
exodus huguenots remained in large
*nesfamicom 
in 1530 william farel was
*this 
most cases was used for
*partly 
in the case of model
*complete 
with reduction gearing although the
*database 
1606 in 1698 thomas savery
*deafness 
and submarines either use a
*antenna 
were monatomic and that the
*beer 
of welding and cutting metal
*oxidant 
the decay of these organisms
*illafter 
materials in the synthesis of
*antenna 
for arms purchases that exacerbated
*database 
the saudis were forced to
*carry 
including avionics telecommunications and computersthe
*styx 
him on the condition that
*she 
descent propellant surface stay consumables
*and 
with the lunar module eliminating
*matter 
walking distance of the surveyor
*kickback 
in lunar orbit the degree
*given 
to join because of lack
*of 
its work ' but it
*has 
justice has the final say
*mind 
second a citizen or company
*therefore 
rights as general principle of
*nco 
that a free trade area
*can 
36 or as a mandatory
*enclosure 
hours a week in germany
*anathema 
fake laser gun services from
*every 
277 million tons 15 of
*35 
that the rainforest could be
*observed 
has long been debated and
*this 
of the epidermis and have
*238u 
their body size than adults
*given 
beroids have cydippid-like larvae it
*can 
the tower district were developed
*in 
san joaquin river parkway 's
*geographia 
in cities along sr 99
*and 
but still has the dlci
*complete 
to a pad or by
*jail 
the mid 14th-century epidemic as
*this 
by over 100 as no
*error 
more than 125 million deaths
*sited 
to date sections of rock
*are 
of these experiments horizontal layers
*therefore 
elizabeth in 1589 a 25-foot
*unaccountably 
market itself was opened in
*march 
canny a versatile word meaning
*always 
another gangster film the 1988
*dayin 
joining up routes that are
*pressing 
the museum is a non-departmental
*job 
the refreshment rooms reinstated as
*the 
in the art library in
*the 
examples of ceramics especially iznik
*transformation 
and cataloged must be audited
*to 
received the talbot hughes collection
*compass-m1 
church of castello at fontignano
*grovespublic 
sculptors such as dalou who
*deafness 
the entire nbc blue network
*shares 
the morning news program good
*job 
with its first broadcast running
*delayed 
september 2005 rumors circulated that
*she 
a war where the battlefield
*was 
the weekly budget for abc
*'s 
and all of mca 's
*licensing 
program on the network 's
*competitor 
roone arledge which featured a
*redemption 
by a development fund for
*straightness 
a third of the revenue
*complete 
that the abc network became
*itself 
secret history of the mongols
*15391600 
turned over to temüjin by
*his 
remnants of the khwarezmid empire
*governorship 
khan decided to give the
*proposal 
fighting in central asia and
*itself 
and historical figures such as
*redemption 
towards the use of trained
*database 
medications in 2013 being specialty
*database 
innate immune system provides an
*error 
not require activation in order
*to 
some autoimmune diseases such as
*unaccountably 
calcidiol into the steroid hormone
*cyanobacterium 
mainly on the observation that
*photoelectrons 
the majority may be powerful
*object 
very man i have to
*referee 
breaking the law for self-gratification
*as 
the design team a number
*of 
average contractor employed fewer than
*60 
to operate outside of government
*database 
who wish to pursue collegetechnical
*individual 
was initially called new college
*job 
when charles william eliot a
*presidency 
program has been continuously among
*this 
populous city proper in florida
*enclosure 
film production center ended the
*afl 
households had children under the
*pcf-94 
growthanother cause is the rate
*at 
an example income inequality did
*question 
run-up in consumption inequality has
*been 
notarize transaction documents or having
*nco 
life process of time lords
*holding 
a television programme while remaining
*archosaurs 
of regeneration to permit the
*database 
mary tamm and lalla ward
*this 
the theme was released in
*march 
before it aired on fox
*in 
doctor who executive producer russell
*bradshaw 
the fourth doctor in the
*super 
of freedom of inquiry and
*the 
faculty and visiting scholars to
*carry 
hunt to model un in
*redemption 
liberalism defender friedrich hayek meteorologist
*ajax 
modeling his government on the
*ode 
and continue much of the
*conspicuous 
tibetan buddhism flourished although taoism
*self-constructed 
damaging to the mongol nation
*15391600 
used woodblocks to print paper
*effective 
north-west ethiopia to the north
*and 
signified the ultimate defeat of
*the 
inquiry the waki commission commended
*its 
on kenya 's capital fm
*shares 
comprising eight years in primary
*universities 
climate change its potential impacts
*and 
group ii said that what
*was 
lockstep situation of the ipcc
*is 
live in shallow water have
*238u 
approximately 150000 base pair genome
*of 
in terms of function the
*database 
only protein complex needed for
*antenna 
other types of plastids which
*are 
's list of primes up
*to 
chebyshev which states that there
*can 
for these insectsthe concept of
*database 
from flowing into the open
*database 
of the second east-west shipping
*enclosure 
the rhone and danube drained
*the 
depended on whether a state
*decisions 
the terms of the scotland
*proposal 
recorded in text form in
*revenue-sharing 
including borrowing powers and some
*individual 
the constituency and the member
*states 
the detriment of progressive moderates
*redemption 
as semi-legal and was the
*prayer 
and in 2007 it drove
*the 
and 1969 in jordan and
*1777 
area it is not unusual
*complete 
imperialism was largely focused on
*15476 
europe in the middle period
*of 
manifest destiny through policies such
*as 
the baltimore christmas conference of
*styx 
liturgical and charismatic and between
*dying 
senior pastor 's right to
*check 
the council also determines whether
*it 
course of study at an
*incident 
and the main effort by
*revenue-sharing 
november 1749 it went up
*to 
had sent a company of
*selman 
so he ordered an attack
*on 
of the seven years '
*presidency 
of constant velocity unless acted
*on 
a parallelogram gives an equivalent
*redemption 
the feynman diagram represents any
*database 
the sources of the fields
*exclusively 
way that the direction and
*matter 
the spring meetings of the
*1875 
oldest quarterback to play in
*itself 
the 50 given to the
*nfl 
denver score at the end
*of 
the fighters of the warsaw
*governorship 
offer tesla to redesign a
*audience 
kind of memory was tesla
*'s 
used to convey the continuum
*of 
type of accountant other than
*every 
english translation of the bible
*'s 
hymn from depths of woe
*tito 
region of california is palm
*avenue 
uk limited is formerly known
*as 
what was required of huguenot
*healthschwarzenegger 
engines became popular for power
*in 
in what year did lavoisier
*pcf-94 
from the us became 5
*month 
crew members were required to
*offer 
defendant in the case of
*itself 
member state nationals by the
*proposal 
live plants were found to
*be 
where is the neighborhood of
*palermo 
who did internet2 partner with
*the 
to precisely date rocks within
*the 
newcastle provides the majority of
*launchalways 
the va library 's collection
*of 
what is the former name
*of 
night football premiered to which
*bell 
the merger between abc and
*1997 
did jochi try to protect
*redemption 
conflict of interest involving doctors
*and 
of infection involves inserting a
*cyanobacterium 
thoreau was not a well
*deity 
is an example of an
*error 
did harvard stadium become the
*overalldetroit 
is the united states at
*the 
the vast disparities in wealth
*to 
the main reason for the
*cyanobacterium 
in the fall quarter of
*1951 
decided not to come visit
*matter 
what do ftsz1 and ftsz2
*guangxi 
and 1 what would be
*active 
when did europe slowly begin
*to 
four years are the ordinary
*appointment 
the ottoman caliphate is believed
*to 
who were two of the
*backup 
did king louis xv respond
*to 

Accuracy


In [20]:
def accuracy():
    start = 27
    count = 0
    correct = 0
    predictions = model.predict_on_batch(subsamples_in[start::total_batches][:min_batch_len])
    ytrue = subsamples_out[start::total_batches][:min_batch_len]
    for index, prediction in enumerate(predictions):
        pred_word = word2vec_model.similar_by_vector(prediction)[0][0]
        true_word = word2vec_model.similar_by_vector(ytrue[index])[0][0]
        sim = word2vec_model.similarity(pred_word, true_word)
        if (sim >= 0.85):
            correct += 1
        count += 1
    accur = float(correct/(count))
    print('accuracy = ', float(accur))

In [21]:
# n = no. of predictions
print(accuracy())


accuracy =  0.1484375
None

In [ ]: