In [ ]:

import numpy as np import pandas as pd import tensorflow as tf import random import re import time

Config


In [2]:
vocab_size = 400000
embedding_dim = 50
hidden_layer_size = 80
num_rows = 1000
num_steps = 3
batch_size = 100
num_batches = num_rows/batch_size

Preprocessing


In [3]:
def read_glove(filename):
    file = open(filename)    
    embedding = np.ndarray([vocab_size, embedding_dim])
    word_id_dict = {}
    id_word_dict = {}
    id = 0
    for line in file:
        items = line.split(' ')
        word_id_dict[items[0]] = id
        id_word_dict[id] = items[0]
        embedding[id,:] = np.array([float(i) for i in items[1:]])
        id += 1
    file.close()
    return(embedding, word_id_dict, id_word_dict)

embedding, word_id_dict, id_word_dict = read_glove("../../../datasets/glove_6b/glove.6B.50d.txt")

def read_data(filename):
    file = open(filename)
    chapter_input = []
    data = []
    for line in file:
        items = re.sub('[?.]', '', line).lower().split()
        if items[0] == '1':
            chapter_input = items[1:] + ['.']
        elif items[-1].isdigit():
            data.append({'I': chapter_input,
                         'Q': items[1:-2],
                         'A': [items[-2]]})
        else:
            chapter_input = chapter_input + items[1:] + ['.']
    file.close()
    return(data)

def max_len(data, iqa):
    max_len = 0
    for i in data:
        max_len = max(max_len, len(i[iqa]))
    return(max_len)

def embed_and_pad(data):
    inputs = np.zeros([len(data), max_len(data, 'I'), embedding_dim])
    questions = np.zeros([len(data), max_len(data, 'Q'), embedding_dim])
    for index, row in enumerate(data):
        inputs[index,0:len(row['I']),:] = embedding[[word_id_dict[token] for token in row['I']]]
        questions[index,0:len(row['Q']),:] = embedding[[word_id_dict[token] for token in row['Q']]]
    return((inputs, questions))

def get_answer_index(data):
    answers = np.zeros(num_rows)
    for index, row in enumerate(data):
        answers[index] = word_id_dict[row['A'][0]]
    return(answers)

def get_input_sequence_lengths(data):
    input_sequence_lengths = []
    for i in data:
        input_sequence_lengths.append(len(i['I']))
    return(input_sequence_lengths)

def get_input_period_boolean(data):
    input_period_boolean = np.zeros((num_rows, max_input_len), dtype=bool)
    for index, row in enumerate(data):
        input_period_boolean[index, [i for i, j in enumerate(row['I']) if j=='.']] = True
    return(input_period_boolean)

def get_max_facts(input_period_boolean):
    max_facts = max([sum(i) for i in input_period_boolean])
    return(max_facts)

In [4]:
data = read_data("../../../datasets/facebook_babi/tasks_1-20_v1-2/en/qa1_single-supporting-fact_train.txt")
max_input_len = max_len(data, 'I')
max_question_len = max_len(data, 'Q')
max_answer_len = max_len(data, 'A')
data_inputs, data_questions = embed_and_pad(data)
data_answers = get_answer_index(data)
input_sequence_lengths = get_input_sequence_lengths(data)
input_period_boolean = get_input_period_boolean(data)
max_facts = get_max_facts(input_period_boolean)

DMN Implementation


In [5]:
## Placeholders
inputs = tf.placeholder(tf.float32, shape=[batch_size, max_input_len, embedding_dim])
questions = tf.placeholder(tf.float32, shape=[batch_size, max_question_len, embedding_dim])
answers = tf.placeholder(tf.int32, shape=[batch_size])
periods = tf.placeholder(tf.bool, shape=[batch_size, max_input_len])
input_sequence_lengths_placeholder = tf.placeholder(tf.int32, shape=[batch_size])

In [8]:
# with tf.variable_scope('trial7') as scope:
# #     gru_cell_2 = tf.contrib.rnn.GRUCell(hidden_layer_size)
#     trial_data = tf.constant(np.random.rand(10, 1000, 320), dtype = tf.float32)
    
#     w1 = tf.get_variable("w1", [4*hidden_layer_size, hidden_layer_size],
#             initializer=tf.random_normal_initializer())
# #     layer1 = tf.nn.tanh(tf.matmul(z, w1))
# #     scope.reuse_variables()        
#     w2 = tf.get_variable("weights2", [hidden_layer_size, hidden_layer_size],
#             initializer=tf.random_normal_initializer())
# #     g_t = tf.nn.sigmoid(tf.matmul(layer1, w2))
    
#     for i in range(3):
#         for j in range(10):
#             inp = trial_data[j, :, :]
#             layer1 = tf.nn.tanh(tf.matmul(inp, w1))
#             g_t = tf.nn.sigmoid(tf.matmul(layer1, w2))
# #     m = q
# #     for i in range(10):
# #         inp = trial_data[0, :, :]
# #         m = gru_cell_2(inp, m)[1]

In [9]:
# with tf.variable_scope('trial5') as scope:
#     gru_cell_2 = tf.contrib.rnn.GRUCell(hidden_layer_size)
#     trial_data = tf.constant(np.random.rand(10, 1000, 80), dtype = tf.float32)
#     m = q
#     for i in range(10):
#         inp = trial_data[0, :, :]
#         m = gru_cell_2(inp, m)[1]

In [10]:
# ## Question module
# with tf.variable_scope('question_module_1') as scope:
#     _, q = tf.nn.dynamic_rnn(gru_cell,
#                                   questions,
#                                   dtype=tf.float64)

# ## Input module
# with tf.variable_scope('input_module_1') as scope:
#     i_output, _ = tf.nn.dynamic_rnn(gru_cell,
#                                           inputs,
#                                           dtype=tf.float32,
#                                           sequence_length=input_sequence_lengths)
#     c = []
#     for index in range(num_rows):
#         states_at_periods = tf.boolean_mask(i_output[index,:,:], periods[index,:])
#         padding = tf.zeros([max_facts - tf.shape(states_at_periods)[0], hidden_layer_size])
#         c.append(tf.concat([states_at_periods, padding], 0))
#     c = tf.unstack(tf.transpose(tf.stack(c), perm=[1,0,2]), num = max_facts)
#     c_stacked = tf.transpose(tf.stack(c), perm = [1, 0, 2])

In [6]:
## Question and Input module
with tf.variable_scope('question_and_input_module') as scope:
    input_gru_cell = tf.contrib.rnn.GRUCell(hidden_layer_size)

    _, q = tf.nn.dynamic_rnn(input_gru_cell,
                                  questions,
                                  dtype=tf.float32)
    scope.reuse_variables()

    i_output, _ = tf.nn.dynamic_rnn(input_gru_cell,
                                          inputs,
                                          dtype=tf.float32,
                                          sequence_length=input_sequence_lengths_placeholder)
    
    c = []
    for index in range(batch_size):
        states_at_periods = tf.boolean_mask(i_output[index,:,:], periods[index,:])
        padding = tf.zeros([max_facts - tf.shape(states_at_periods)[0], hidden_layer_size])
        c.append(tf.concat([states_at_periods, padding], 0))
    c = tf.unstack(tf.transpose(tf.stack(c), perm=[1,0,2]), num = max_facts)
    c_stacked = tf.transpose(tf.stack(c), perm = [1, 0, 2])

In [7]:
## Episodic Memory module
with tf.variable_scope('episodic_memory_module'):
    episodic_gru_cell = tf.contrib.rnn.GRUCell(hidden_layer_size)
    w1 = tf.get_variable("weight_1", [4*hidden_layer_size, hidden_layer_size],
                        initializer=tf.random_normal_initializer())
    w2 = tf.get_variable("weight_2", [hidden_layer_size, hidden_layer_size],
                        initializer=tf.random_normal_initializer())
    
    m_i = q
    for step in range(num_steps):
#         h_t = tf.zeros_like(c[0])
        e_i = tf.zeros_like(c[0])
        g = []
        for c_t in c:
            # calculate g
            z = tf.concat([tf.multiply(c_t, q), 
                           tf.multiply(c_t, m_i),
                           tf.abs(tf.subtract(c_t, q)),
                           tf.abs(tf.subtract(c_t, m_i))], 1) # [N x 4d]
#             with tf.variable_scope("layers", reuse = True) as scope:
# #                 w1 = tf.get_variable("weights1", [4*hidden_layer_size, hidden_layer_size],
# #                         initializer=tf.random_normal_initializer())
#                 layer1 = tf.nn.tanh(tf.matmul(z, w1))
#                 scope.reuse_variables()        
# #                 w2 = tf.get_variable("weights2", [hidden_layer_size, hidden_layer_size],
# #                         initializer=tf.random_normal_initializer())
#                 g_t = tf.nn.sigmoid(tf.matmul(layer1, w2))
# #                 scope.reuse_variables()
# #             layer1 = tf.contrib.layers.fully_connected(inputs = z,
# #                                                       num_outputs = hidden_layer_size,
# #                                                       activation_fn = tf.nn.tanh,
# #                                                       reuse = None,
# #                                                       scope = "layer_1")
# # #             with tf.variable_scope("layer_22"):
# #             g_t = tf.contrib.layers.fully_connected(inputs = layer1,
# #                                                       num_outputs = 1,
# #                                                       activation_fn = tf.nn.sigmoid,
# #                                                    reuse = None,
# #                                                    scope = "layer_2")
            layer1 = tf.nn.tanh(tf.matmul(z, w1))
            g_t = tf.nn.sigmoid(tf.matmul(layer1, w2))
            g.append(g_t)
        g = tf.transpose(tf.stack(g), perm = [1, 0, 2])
        g_softmax = tf.nn.softmax(g, dim = 1)
        e_i = tf.reduce_sum(tf.multiply(g_softmax, c_stacked), axis = 1)

#             # compute episode for pass i
#             h_t = tf.multiply(g, gru_cell(c_t, h_t)[1]) + tf.multiply(1 - g, h_t)
#             scope.reuse_variables()
#         # episode is the last hidden state
#         e_i = h_t

        m_i = episodic_gru_cell(e_i, m_i)[1]
#         scope.reuse_variables()

In [8]:
## Answer module
with tf.variable_scope('answer_module') as scope:
    logits = tf.contrib.layers.fully_connected(inputs = m_i,
                                              num_outputs = vocab_size,
                                              activation_fn = None)
    
    ## Loss and metrics
    cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits = logits, labels = answers)
    loss = tf.reduce_mean(cross_entropy)
    optimizer = tf.train.AdamOptimizer(0.01).minimize(loss)
    
    prediction = tf.cast(tf.argmax(logits, 1), 'int32')
    num_correct = tf.reduce_sum(tf.cast(tf.equal(prediction, answers), tf.int32))
    accuracy = tf.reduce_mean(tf.cast(tf.equal(prediction, answers), tf.float32))

Training


In [9]:
sess = tf.Session()
sess.run(tf.global_variables_initializer())

In [10]:
def get_batch(batch_number):
    return {inputs: data_inputs[batch_number*batch_size: (batch_number+1)*batch_size],
            questions: data_questions[batch_number*batch_size: (batch_number+1)*batch_size],
            answers: data_answers[batch_number*batch_size: (batch_number+1)*batch_size],
            periods: input_period_boolean[batch_number*batch_size: (batch_number+1)*batch_size],
            input_sequence_lengths_placeholder: input_sequence_lengths[batch_number*batch_size: (batch_number+1)*batch_size]
           }

In [12]:
def run_model(sess, num_epochs):
    start_time = time.time()
    for epoch in range(num_epochs):
        epoch_loss = epoch_num_correct =0
        for batch_idx in range(num_batches):
            batch_loss, _, batch_num_correct, batch_accuracy, res_optimizer = sess.run((loss, optimizer, num_correct, accuracy, optimizer), 
                                                                        feed_dict=get_batch(batch_idx))
            epoch_loss += batch_loss
            epoch_num_correct += batch_num_correct
#             epoch_accuracy += batch_accuracy
        print("Epoch %d: %.2f%% complete, %d mins, Loss: %.9f, Num correct: %d" % (epoch, 
                                                                               epoch*100/num_epochs,
                                                                                (time.time() - start_time)/60,
                                                                               epoch_loss, 
                                                                               epoch_num_correct))
    end_time = time.time()
    print("Duration: %d mins" % int((end_time - start_time)/60))

In [13]:
run_model(sess, 50)


Epoch 0: 0.00% complete, 0 mins, Loss: 83.210313797, Num correct: 152, Accuracy: 152.00%
Epoch 1: 0.00% complete, 0 mins, Loss: 18.377319932, Num correct: 158, Accuracy: 158.00%
Epoch 2: 0.00% complete, 0 mins, Loss: 18.358591676, Num correct: 142, Accuracy: 142.00%
Epoch 3: 0.00% complete, 0 mins, Loss: 18.104773045, Num correct: 133, Accuracy: 133.00%
Epoch 4: 0.00% complete, 0 mins, Loss: 18.138466835, Num correct: 141, Accuracy: 141.00%
Epoch 5: 0.00% complete, 0 mins, Loss: 18.227027893, Num correct: 145, Accuracy: 145.00%
Epoch 6: 0.00% complete, 0 mins, Loss: 18.191586494, Num correct: 144, Accuracy: 144.00%
Epoch 7: 0.00% complete, 0 mins, Loss: 18.203004122, Num correct: 144, Accuracy: 144.00%
Epoch 8: 0.00% complete, 0 mins, Loss: 18.227482438, Num correct: 145, Accuracy: 145.00%
Epoch 9: 0.00% complete, 0 mins, Loss: 18.230713606, Num correct: 144, Accuracy: 144.00%
Epoch 10: 0.00% complete, 0 mins, Loss: 18.236656547, Num correct: 144, Accuracy: 144.00%
Epoch 11: 0.00% complete, 0 mins, Loss: 18.243286729, Num correct: 144, Accuracy: 144.00%
Epoch 12: 0.00% complete, 0 mins, Loss: 18.247909307, Num correct: 144, Accuracy: 144.00%
Epoch 13: 0.00% complete, 0 mins, Loss: 18.252863169, Num correct: 144, Accuracy: 144.00%
Epoch 14: 0.00% complete, 0 mins, Loss: 18.256379604, Num correct: 144, Accuracy: 144.00%
Epoch 15: 0.00% complete, 0 mins, Loss: 18.258986592, Num correct: 144, Accuracy: 144.00%
Epoch 16: 0.00% complete, 0 mins, Loss: 18.261907458, Num correct: 144, Accuracy: 144.00%
Epoch 17: 0.00% complete, 0 mins, Loss: 18.264410973, Num correct: 144, Accuracy: 144.00%
Epoch 18: 0.00% complete, 0 mins, Loss: 18.266320229, Num correct: 144, Accuracy: 144.00%
Epoch 19: 0.00% complete, 0 mins, Loss: 18.268107891, Num correct: 144, Accuracy: 144.00%
Epoch 20: 0.00% complete, 0 mins, Loss: 18.269756794, Num correct: 144, Accuracy: 144.00%
Epoch 21: 0.00% complete, 0 mins, Loss: 18.271162868, Num correct: 144, Accuracy: 144.00%
Epoch 22: 0.00% complete, 0 mins, Loss: 18.272403598, Num correct: 144, Accuracy: 144.00%
Epoch 23: 0.00% complete, 0 mins, Loss: 18.273530722, Num correct: 144, Accuracy: 144.00%
Epoch 24: 0.00% complete, 0 mins, Loss: 18.274538040, Num correct: 144, Accuracy: 144.00%
Epoch 25: 0.00% complete, 0 mins, Loss: 18.275440693, Num correct: 144, Accuracy: 144.00%
Epoch 26: 0.00% complete, 0 mins, Loss: 18.276257157, Num correct: 144, Accuracy: 144.00%
Epoch 27: 0.00% complete, 0 mins, Loss: 18.276996136, Num correct: 144, Accuracy: 144.00%
Epoch 28: 0.00% complete, 0 mins, Loss: 18.277667761, Num correct: 144, Accuracy: 144.00%
Epoch 29: 0.00% complete, 0 mins, Loss: 18.278278351, Num correct: 144, Accuracy: 144.00%
Epoch 30: 0.00% complete, 0 mins, Loss: 18.278833628, Num correct: 144, Accuracy: 144.00%
Epoch 31: 0.00% complete, 0 mins, Loss: 18.279342651, Num correct: 144, Accuracy: 144.00%
Epoch 32: 0.00% complete, 0 mins, Loss: 18.279813409, Num correct: 144, Accuracy: 144.00%
Epoch 33: 0.00% complete, 0 mins, Loss: 18.280241132, Num correct: 144, Accuracy: 144.00%
Epoch 34: 0.00% complete, 0 mins, Loss: 18.280634999, Num correct: 144, Accuracy: 144.00%
Epoch 35: 0.00% complete, 0 mins, Loss: 18.281001210, Num correct: 144, Accuracy: 144.00%
Epoch 36: 0.00% complete, 0 mins, Loss: 18.281338811, Num correct: 144, Accuracy: 144.00%
Epoch 37: 0.00% complete, 0 mins, Loss: 18.281652927, Num correct: 144, Accuracy: 144.00%
Epoch 38: 0.00% complete, 0 mins, Loss: 18.281942844, Num correct: 144, Accuracy: 144.00%
Epoch 39: 0.00% complete, 0 mins, Loss: 18.282213330, Num correct: 144, Accuracy: 144.00%
Epoch 40: 0.00% complete, 0 mins, Loss: 18.282465458, Num correct: 144, Accuracy: 144.00%
Epoch 41: 0.00% complete, 0 mins, Loss: 18.282702923, Num correct: 144, Accuracy: 144.00%
Epoch 42: 0.00% complete, 0 mins, Loss: 18.282920599, Num correct: 144, Accuracy: 144.00%
Epoch 43: 0.00% complete, 0 mins, Loss: 18.283125401, Num correct: 144, Accuracy: 144.00%
Epoch 44: 0.00% complete, 0 mins, Loss: 18.283312201, Num correct: 144, Accuracy: 144.00%
Epoch 45: 0.00% complete, 0 mins, Loss: 18.283497095, Num correct: 144, Accuracy: 144.00%
Epoch 46: 0.00% complete, 0 mins, Loss: 18.283668995, Num correct: 144, Accuracy: 144.00%
Epoch 47: 0.00% complete, 0 mins, Loss: 18.283825994, Num correct: 144, Accuracy: 144.00%
Epoch 48: 0.00% complete, 0 mins, Loss: 18.283975005, Num correct: 144, Accuracy: 144.00%
Epoch 49: 0.00% complete, 0 mins, Loss: 18.284123659, Num correct: 144, Accuracy: 144.00%
Duration: 0 mins

In [14]:
run_model(sess, 1000)


Epoch 0: 0.00% complete, 0 mins, Loss: 18.284256458, Num correct: 144, Accuracy: 144.00%
Epoch 1: 0.00% complete, 0 mins, Loss: 18.284379840, Num correct: 144, Accuracy: 144.00%
Epoch 2: 0.00% complete, 0 mins, Loss: 18.284500837, Num correct: 144, Accuracy: 144.00%
Epoch 3: 0.00% complete, 0 mins, Loss: 18.284611702, Num correct: 144, Accuracy: 144.00%
Epoch 4: 0.00% complete, 0 mins, Loss: 18.284720302, Num correct: 144, Accuracy: 144.00%
Epoch 5: 0.00% complete, 0 mins, Loss: 18.284821749, Num correct: 144, Accuracy: 144.00%
Epoch 6: 0.00% complete, 0 mins, Loss: 18.284917235, Num correct: 144, Accuracy: 144.00%
Epoch 7: 0.00% complete, 0 mins, Loss: 18.285007954, Num correct: 144, Accuracy: 144.00%
Epoch 8: 0.00% complete, 0 mins, Loss: 18.285097480, Num correct: 144, Accuracy: 144.00%
Epoch 9: 0.00% complete, 0 mins, Loss: 18.285177469, Num correct: 144, Accuracy: 144.00%
Epoch 10: 0.00% complete, 0 mins, Loss: 18.285256028, Num correct: 144, Accuracy: 144.00%
Epoch 11: 0.00% complete, 0 mins, Loss: 18.285336256, Num correct: 144, Accuracy: 144.00%
Epoch 12: 0.00% complete, 0 mins, Loss: 18.285403848, Num correct: 144, Accuracy: 144.00%
Epoch 13: 0.00% complete, 0 mins, Loss: 18.285475731, Num correct: 144, Accuracy: 144.00%
Epoch 14: 0.00% complete, 0 mins, Loss: 18.285539746, Num correct: 144, Accuracy: 144.00%
Epoch 15: 0.00% complete, 0 mins, Loss: 18.285602450, Num correct: 144, Accuracy: 144.00%
Epoch 16: 0.00% complete, 0 mins, Loss: 18.285660386, Num correct: 144, Accuracy: 144.00%
Epoch 17: 0.00% complete, 0 mins, Loss: 18.285717010, Num correct: 144, Accuracy: 144.00%
Epoch 18: 0.00% complete, 0 mins, Loss: 18.285771728, Num correct: 144, Accuracy: 144.00%
Epoch 19: 0.00% complete, 0 mins, Loss: 18.285822153, Num correct: 144, Accuracy: 144.00%
Epoch 20: 0.00% complete, 0 mins, Loss: 18.285874486, Num correct: 144, Accuracy: 144.00%
Epoch 21: 0.00% complete, 0 mins, Loss: 18.285923123, Num correct: 144, Accuracy: 144.00%
Epoch 22: 0.00% complete, 0 mins, Loss: 18.285969615, Num correct: 144, Accuracy: 144.00%
Epoch 23: 0.00% complete, 0 mins, Loss: 18.286013365, Num correct: 144, Accuracy: 144.00%
Epoch 24: 0.00% complete, 0 mins, Loss: 18.286054611, Num correct: 144, Accuracy: 144.00%
Epoch 25: 0.00% complete, 0 mins, Loss: 18.286094904, Num correct: 144, Accuracy: 144.00%
Epoch 26: 0.00% complete, 0 mins, Loss: 18.286134601, Num correct: 144, Accuracy: 144.00%
Epoch 27: 0.00% complete, 0 mins, Loss: 18.286174655, Num correct: 144, Accuracy: 144.00%
Epoch 28: 0.00% complete, 0 mins, Loss: 18.286209226, Num correct: 144, Accuracy: 144.00%
Epoch 29: 0.00% complete, 0 mins, Loss: 18.286242962, Num correct: 144, Accuracy: 144.00%
Epoch 30: 0.00% complete, 0 mins, Loss: 18.286281466, Num correct: 144, Accuracy: 144.00%
Epoch 31: 0.00% complete, 0 mins, Loss: 18.286312342, Num correct: 144, Accuracy: 144.00%
Epoch 32: 0.00% complete, 0 mins, Loss: 18.286344767, Num correct: 144, Accuracy: 144.00%
Epoch 33: 0.00% complete, 0 mins, Loss: 18.286371946, Num correct: 144, Accuracy: 144.00%
Epoch 34: 0.00% complete, 0 mins, Loss: 18.286399722, Num correct: 144, Accuracy: 144.00%
Epoch 35: 0.00% complete, 0 mins, Loss: 18.286431909, Num correct: 144, Accuracy: 144.00%
Epoch 36: 0.00% complete, 0 mins, Loss: 18.286457777, Num correct: 144, Accuracy: 144.00%
Epoch 37: 0.00% complete, 0 mins, Loss: 18.286481261, Num correct: 144, Accuracy: 144.00%
Epoch 38: 0.00% complete, 0 mins, Loss: 18.286508679, Num correct: 144, Accuracy: 144.00%
Epoch 39: 0.00% complete, 0 mins, Loss: 18.286535740, Num correct: 144, Accuracy: 144.00%
Epoch 40: 0.00% complete, 0 mins, Loss: 18.286555171, Num correct: 144, Accuracy: 144.00%
Epoch 41: 0.00% complete, 0 mins, Loss: 18.286579013, Num correct: 144, Accuracy: 144.00%
Epoch 42: 0.00% complete, 0 mins, Loss: 18.286599398, Num correct: 144, Accuracy: 144.00%
Epoch 43: 0.00% complete, 0 mins, Loss: 18.286623836, Num correct: 144, Accuracy: 144.00%
Epoch 44: 0.00% complete, 0 mins, Loss: 18.286642790, Num correct: 144, Accuracy: 144.00%
Epoch 45: 0.00% complete, 0 mins, Loss: 18.286662817, Num correct: 144, Accuracy: 144.00%
Epoch 46: 0.00% complete, 0 mins, Loss: 18.286678791, Num correct: 144, Accuracy: 144.00%
Epoch 47: 0.00% complete, 0 mins, Loss: 18.286700249, Num correct: 144, Accuracy: 144.00%
Epoch 48: 0.00% complete, 0 mins, Loss: 18.286719203, Num correct: 144, Accuracy: 144.00%
Epoch 49: 0.00% complete, 0 mins, Loss: 18.286739349, Num correct: 144, Accuracy: 144.00%
Epoch 50: 0.00% complete, 0 mins, Loss: 18.286754847, Num correct: 144, Accuracy: 144.00%
Epoch 51: 0.00% complete, 0 mins, Loss: 18.286771774, Num correct: 144, Accuracy: 144.00%
Epoch 52: 0.00% complete, 0 mins, Loss: 18.286788106, Num correct: 144, Accuracy: 144.00%
Epoch 53: 0.00% complete, 0 mins, Loss: 18.286800623, Num correct: 144, Accuracy: 144.00%
Epoch 54: 0.00% complete, 1 mins, Loss: 18.286816359, Num correct: 144, Accuracy: 144.00%
Epoch 55: 0.00% complete, 1 mins, Loss: 18.286830783, Num correct: 144, Accuracy: 144.00%
Epoch 56: 0.00% complete, 1 mins, Loss: 18.286845565, Num correct: 144, Accuracy: 144.00%
Epoch 57: 0.00% complete, 1 mins, Loss: 18.286860228, Num correct: 144, Accuracy: 144.00%
Epoch 58: 0.00% complete, 1 mins, Loss: 18.286868811, Num correct: 144, Accuracy: 144.00%
Epoch 59: 0.00% complete, 1 mins, Loss: 18.286887765, Num correct: 144, Accuracy: 144.00%
Epoch 60: 0.00% complete, 1 mins, Loss: 18.286897421, Num correct: 144, Accuracy: 144.00%
Epoch 61: 0.00% complete, 1 mins, Loss: 18.286910653, Num correct: 144, Accuracy: 144.00%
Epoch 62: 0.00% complete, 1 mins, Loss: 18.286922336, Num correct: 144, Accuracy: 144.00%
Epoch 63: 0.00% complete, 1 mins, Loss: 18.286933422, Num correct: 144, Accuracy: 144.00%
Epoch 64: 0.00% complete, 1 mins, Loss: 18.286945105, Num correct: 144, Accuracy: 144.00%
Epoch 65: 0.00% complete, 1 mins, Loss: 18.286955357, Num correct: 144, Accuracy: 144.00%
Epoch 66: 0.00% complete, 1 mins, Loss: 18.286965847, Num correct: 144, Accuracy: 144.00%
Epoch 67: 0.00% complete, 1 mins, Loss: 18.286976457, Num correct: 144, Accuracy: 144.00%
Epoch 68: 0.00% complete, 1 mins, Loss: 18.286987543, Num correct: 144, Accuracy: 144.00%
Epoch 69: 0.00% complete, 1 mins, Loss: 18.286994815, Num correct: 144, Accuracy: 144.00%
Epoch 70: 0.00% complete, 1 mins, Loss: 18.287004113, Num correct: 144, Accuracy: 144.00%
Epoch 71: 0.00% complete, 1 mins, Loss: 18.287012935, Num correct: 144, Accuracy: 144.00%
Epoch 72: 0.00% complete, 1 mins, Loss: 18.287024140, Num correct: 144, Accuracy: 144.00%
Epoch 73: 0.00% complete, 1 mins, Loss: 18.287031889, Num correct: 144, Accuracy: 144.00%
Epoch 74: 0.00% complete, 1 mins, Loss: 18.287041306, Num correct: 144, Accuracy: 144.00%
Epoch 75: 0.00% complete, 1 mins, Loss: 18.287049651, Num correct: 144, Accuracy: 144.00%
Epoch 76: 0.00% complete, 1 mins, Loss: 18.287056446, Num correct: 144, Accuracy: 144.00%
Epoch 77: 0.00% complete, 1 mins, Loss: 18.287064910, Num correct: 144, Accuracy: 144.00%
Epoch 78: 0.00% complete, 1 mins, Loss: 18.287075639, Num correct: 144, Accuracy: 144.00%
Epoch 79: 0.00% complete, 1 mins, Loss: 18.287080407, Num correct: 144, Accuracy: 144.00%
Epoch 80: 0.00% complete, 1 mins, Loss: 18.287086606, Num correct: 144, Accuracy: 144.00%
Epoch 81: 0.00% complete, 1 mins, Loss: 18.287097454, Num correct: 144, Accuracy: 144.00%
Epoch 82: 0.00% complete, 1 mins, Loss: 18.287102103, Num correct: 144, Accuracy: 144.00%
Epoch 83: 0.00% complete, 1 mins, Loss: 18.287109375, Num correct: 144, Accuracy: 144.00%
Epoch 84: 0.00% complete, 1 mins, Loss: 18.287117481, Num correct: 144, Accuracy: 144.00%
Epoch 85: 0.00% complete, 1 mins, Loss: 18.287121058, Num correct: 144, Accuracy: 144.00%
Epoch 86: 0.00% complete, 1 mins, Loss: 18.287127733, Num correct: 144, Accuracy: 144.00%
Epoch 87: 0.00% complete, 1 mins, Loss: 18.287136197, Num correct: 144, Accuracy: 144.00%
Epoch 88: 0.00% complete, 1 mins, Loss: 18.287138700, Num correct: 144, Accuracy: 144.00%
Epoch 89: 0.00% complete, 1 mins, Loss: 18.287144661, Num correct: 144, Accuracy: 144.00%
Epoch 90: 0.00% complete, 1 mins, Loss: 18.287150025, Num correct: 144, Accuracy: 144.00%
Epoch 91: 0.00% complete, 1 mins, Loss: 18.287157416, Num correct: 144, Accuracy: 144.00%
Epoch 92: 0.00% complete, 1 mins, Loss: 18.287165642, Num correct: 144, Accuracy: 144.00%
Epoch 93: 0.00% complete, 1 mins, Loss: 18.287169099, Num correct: 144, Accuracy: 144.00%
Epoch 94: 0.00% complete, 1 mins, Loss: 18.287173867, Num correct: 144, Accuracy: 144.00%
Epoch 95: 0.00% complete, 1 mins, Loss: 18.287177563, Num correct: 144, Accuracy: 144.00%
Epoch 96: 0.00% complete, 1 mins, Loss: 18.287181616, Num correct: 144, Accuracy: 144.00%
Epoch 97: 0.00% complete, 1 mins, Loss: 18.287186623, Num correct: 144, Accuracy: 144.00%
Epoch 98: 0.00% complete, 1 mins, Loss: 18.287192702, Num correct: 144, Accuracy: 144.00%
Epoch 99: 0.00% complete, 1 mins, Loss: 18.287196875, Num correct: 144, Accuracy: 144.00%
Epoch 100: 0.00% complete, 1 mins, Loss: 18.287204146, Num correct: 144, Accuracy: 144.00%
Epoch 101: 0.00% complete, 1 mins, Loss: 18.287206054, Num correct: 144, Accuracy: 144.00%
Epoch 102: 0.00% complete, 1 mins, Loss: 18.287210464, Num correct: 144, Accuracy: 144.00%
Epoch 103: 0.00% complete, 1 mins, Loss: 18.287214756, Num correct: 144, Accuracy: 144.00%
Epoch 104: 0.00% complete, 1 mins, Loss: 18.287218094, Num correct: 144, Accuracy: 144.00%
Epoch 105: 0.00% complete, 1 mins, Loss: 18.287224650, Num correct: 144, Accuracy: 144.00%
Epoch 106: 0.00% complete, 2 mins, Loss: 18.287227631, Num correct: 144, Accuracy: 144.00%
Epoch 107: 0.00% complete, 2 mins, Loss: 18.287229061, Num correct: 144, Accuracy: 144.00%
Epoch 108: 0.00% complete, 2 mins, Loss: 18.287235141, Num correct: 144, Accuracy: 144.00%
Epoch 109: 0.00% complete, 2 mins, Loss: 18.287239313, Num correct: 144, Accuracy: 144.00%
Epoch 110: 0.00% complete, 2 mins, Loss: 18.287242293, Num correct: 144, Accuracy: 144.00%
Epoch 111: 0.00% complete, 2 mins, Loss: 18.287245035, Num correct: 144, Accuracy: 144.00%
Epoch 112: 0.00% complete, 2 mins, Loss: 18.287247419, Num correct: 144, Accuracy: 144.00%
Epoch 113: 0.00% complete, 2 mins, Loss: 18.287252188, Num correct: 144, Accuracy: 144.00%
Epoch 114: 0.00% complete, 2 mins, Loss: 18.287255287, Num correct: 144, Accuracy: 144.00%
Epoch 115: 0.00% complete, 2 mins, Loss: 18.287257791, Num correct: 144, Accuracy: 144.00%
Epoch 116: 0.00% complete, 2 mins, Loss: 18.287264109, Num correct: 144, Accuracy: 144.00%
Epoch 117: 0.00% complete, 2 mins, Loss: 18.287266254, Num correct: 144, Accuracy: 144.00%
Epoch 118: 0.00% complete, 2 mins, Loss: 18.287270904, Num correct: 144, Accuracy: 144.00%
Epoch 119: 0.00% complete, 2 mins, Loss: 18.287272692, Num correct: 144, Accuracy: 144.00%
Epoch 120: 0.00% complete, 2 mins, Loss: 18.287272692, Num correct: 144, Accuracy: 144.00%
Epoch 121: 0.00% complete, 2 mins, Loss: 18.287275672, Num correct: 144, Accuracy: 144.00%
Epoch 122: 0.00% complete, 2 mins, Loss: 18.287279010, Num correct: 144, Accuracy: 144.00%
Epoch 123: 0.00% complete, 2 mins, Loss: 18.287285089, Num correct: 144, Accuracy: 144.00%
Epoch 124: 0.00% complete, 2 mins, Loss: 18.287287712, Num correct: 144, Accuracy: 144.00%
Epoch 125: 0.00% complete, 2 mins, Loss: 18.287290573, Num correct: 144, Accuracy: 144.00%
Epoch 126: 0.00% complete, 2 mins, Loss: 18.287288785, Num correct: 144, Accuracy: 144.00%
Epoch 127: 0.00% complete, 2 mins, Loss: 18.287290335, Num correct: 144, Accuracy: 144.00%
Epoch 128: 0.00% complete, 2 mins, Loss: 18.287292719, Num correct: 144, Accuracy: 144.00%
Epoch 129: 0.00% complete, 2 mins, Loss: 18.287294388, Num correct: 144, Accuracy: 144.00%
Epoch 130: 0.00% complete, 2 mins, Loss: 18.287301660, Num correct: 144, Accuracy: 144.00%
Epoch 131: 0.00% complete, 2 mins, Loss: 18.287306309, Num correct: 144, Accuracy: 144.00%
Epoch 132: 0.00% complete, 2 mins, Loss: 18.287309527, Num correct: 144, Accuracy: 144.00%
Epoch 133: 0.00% complete, 2 mins, Loss: 18.287310362, Num correct: 144, Accuracy: 144.00%
Epoch 134: 0.00% complete, 2 mins, Loss: 18.287308812, Num correct: 144, Accuracy: 144.00%
Epoch 135: 0.00% complete, 2 mins, Loss: 18.287307024, Num correct: 144, Accuracy: 144.00%
Epoch 136: 0.00% complete, 2 mins, Loss: 18.287310958, Num correct: 144, Accuracy: 144.00%
Epoch 137: 0.00% complete, 2 mins, Loss: 18.287313342, Num correct: 144, Accuracy: 144.00%
Epoch 138: 0.00% complete, 2 mins, Loss: 18.287315488, Num correct: 144, Accuracy: 144.00%
Epoch 139: 0.00% complete, 2 mins, Loss: 18.287317753, Num correct: 144, Accuracy: 144.00%
Epoch 140: 0.00% complete, 2 mins, Loss: 18.287320256, Num correct: 144, Accuracy: 144.00%
Epoch 141: 0.00% complete, 2 mins, Loss: 18.287323236, Num correct: 144, Accuracy: 144.00%
Epoch 142: 0.00% complete, 2 mins, Loss: 18.287326694, Num correct: 144, Accuracy: 144.00%
Epoch 143: 0.00% complete, 2 mins, Loss: 18.287329674, Num correct: 144, Accuracy: 144.00%
Epoch 144: 0.00% complete, 2 mins, Loss: 18.287328362, Num correct: 144, Accuracy: 144.00%
Epoch 145: 0.00% complete, 2 mins, Loss: 18.287331104, Num correct: 144, Accuracy: 144.00%
Epoch 146: 0.00% complete, 2 mins, Loss: 18.287332416, Num correct: 144, Accuracy: 144.00%
Epoch 147: 0.00% complete, 2 mins, Loss: 18.287331581, Num correct: 144, Accuracy: 144.00%
Epoch 148: 0.00% complete, 2 mins, Loss: 18.287335515, Num correct: 144, Accuracy: 144.00%
Epoch 149: 0.00% complete, 2 mins, Loss: 18.287333727, Num correct: 144, Accuracy: 144.00%
Epoch 150: 0.00% complete, 2 mins, Loss: 18.287338495, Num correct: 144, Accuracy: 144.00%
Epoch 151: 0.00% complete, 2 mins, Loss: 18.287339687, Num correct: 144, Accuracy: 144.00%
Epoch 152: 0.00% complete, 2 mins, Loss: 18.287339211, Num correct: 144, Accuracy: 144.00%
Epoch 153: 0.00% complete, 2 mins, Loss: 18.287342072, Num correct: 144, Accuracy: 144.00%
Epoch 154: 0.00% complete, 2 mins, Loss: 18.287345529, Num correct: 144, Accuracy: 144.00%
Epoch 155: 0.00% complete, 2 mins, Loss: 18.287349224, Num correct: 144, Accuracy: 144.00%
Epoch 156: 0.00% complete, 2 mins, Loss: 18.287348509, Num correct: 144, Accuracy: 144.00%
Epoch 157: 0.00% complete, 2 mins, Loss: 18.287349582, Num correct: 144, Accuracy: 144.00%
Epoch 158: 0.00% complete, 2 mins, Loss: 18.287350893, Num correct: 144, Accuracy: 144.00%
Epoch 159: 0.00% complete, 2 mins, Loss: 18.287353277, Num correct: 144, Accuracy: 144.00%
Epoch 160: 0.00% complete, 2 mins, Loss: 18.287354827, Num correct: 144, Accuracy: 144.00%
Epoch 161: 0.00% complete, 3 mins, Loss: 18.287353873, Num correct: 144, Accuracy: 144.00%
Epoch 162: 0.00% complete, 3 mins, Loss: 18.287356973, Num correct: 144, Accuracy: 144.00%
Epoch 163: 0.00% complete, 3 mins, Loss: 18.287356615, Num correct: 144, Accuracy: 144.00%
Epoch 164: 0.00% complete, 3 mins, Loss: 18.287357092, Num correct: 144, Accuracy: 144.00%
Epoch 165: 0.00% complete, 3 mins, Loss: 18.287359953, Num correct: 144, Accuracy: 144.00%
Epoch 166: 0.00% complete, 3 mins, Loss: 18.287360668, Num correct: 144, Accuracy: 144.00%
Epoch 167: 0.00% complete, 3 mins, Loss: 18.287362099, Num correct: 144, Accuracy: 144.00%
Epoch 168: 0.00% complete, 3 mins, Loss: 18.287362933, Num correct: 144, Accuracy: 144.00%
Epoch 169: 0.00% complete, 3 mins, Loss: 18.287363172, Num correct: 144, Accuracy: 144.00%
Epoch 170: 0.00% complete, 3 mins, Loss: 18.287364602, Num correct: 144, Accuracy: 144.00%
Epoch 171: 0.00% complete, 3 mins, Loss: 18.287364960, Num correct: 144, Accuracy: 144.00%
Epoch 172: 0.00% complete, 3 mins, Loss: 18.287363887, Num correct: 144, Accuracy: 144.00%
Epoch 173: 0.00% complete, 3 mins, Loss: 18.287366986, Num correct: 144, Accuracy: 144.00%
Epoch 174: 0.00% complete, 3 mins, Loss: 18.287369251, Num correct: 144, Accuracy: 144.00%
Epoch 175: 0.00% complete, 3 mins, Loss: 18.287368774, Num correct: 144, Accuracy: 144.00%
Epoch 176: 0.00% complete, 3 mins, Loss: 18.287370563, Num correct: 144, Accuracy: 144.00%
Epoch 177: 0.00% complete, 3 mins, Loss: 18.287372470, Num correct: 144, Accuracy: 144.00%
Epoch 178: 0.00% complete, 3 mins, Loss: 18.287372708, Num correct: 144, Accuracy: 144.00%
Epoch 179: 0.00% complete, 3 mins, Loss: 18.287373900, Num correct: 144, Accuracy: 144.00%
Epoch 180: 0.00% complete, 3 mins, Loss: 18.287375212, Num correct: 144, Accuracy: 144.00%
Epoch 181: 0.00% complete, 3 mins, Loss: 18.287376165, Num correct: 144, Accuracy: 144.00%
Epoch 182: 0.00% complete, 3 mins, Loss: 18.287376642, Num correct: 144, Accuracy: 144.00%
Epoch 183: 0.00% complete, 3 mins, Loss: 18.287377000, Num correct: 144, Accuracy: 144.00%
Epoch 184: 0.00% complete, 3 mins, Loss: 18.287377715, Num correct: 144, Accuracy: 144.00%
Epoch 185: 0.00% complete, 3 mins, Loss: 18.287379622, Num correct: 144, Accuracy: 144.00%
Epoch 186: 0.00% complete, 3 mins, Loss: 18.287380815, Num correct: 144, Accuracy: 144.00%
Epoch 187: 0.00% complete, 3 mins, Loss: 18.287381291, Num correct: 144, Accuracy: 144.00%
Epoch 188: 0.00% complete, 3 mins, Loss: 18.287384033, Num correct: 144, Accuracy: 144.00%
Epoch 189: 0.00% complete, 3 mins, Loss: 18.287383914, Num correct: 144, Accuracy: 144.00%
Epoch 190: 0.00% complete, 3 mins, Loss: 18.287384987, Num correct: 144, Accuracy: 144.00%
Epoch 191: 0.00% complete, 3 mins, Loss: 18.287384033, Num correct: 144, Accuracy: 144.00%
Epoch 192: 0.00% complete, 3 mins, Loss: 18.287385225, Num correct: 144, Accuracy: 144.00%
Epoch 193: 0.00% complete, 3 mins, Loss: 18.287386656, Num correct: 144, Accuracy: 144.00%
Epoch 194: 0.00% complete, 3 mins, Loss: 18.287385702, Num correct: 144, Accuracy: 144.00%
Epoch 195: 0.00% complete, 3 mins, Loss: 18.287385106, Num correct: 144, Accuracy: 144.00%
Epoch 196: 0.00% complete, 3 mins, Loss: 18.287387967, Num correct: 144, Accuracy: 144.00%
Epoch 197: 0.00% complete, 3 mins, Loss: 18.287388444, Num correct: 144, Accuracy: 144.00%
Epoch 198: 0.00% complete, 3 mins, Loss: 18.287387848, Num correct: 144, Accuracy: 144.00%
Epoch 199: 0.00% complete, 3 mins, Loss: 18.287388444, Num correct: 144, Accuracy: 144.00%
Epoch 200: 0.00% complete, 3 mins, Loss: 18.287390232, Num correct: 144, Accuracy: 144.00%
Epoch 201: 0.00% complete, 3 mins, Loss: 18.287391305, Num correct: 144, Accuracy: 144.00%
Epoch 202: 0.00% complete, 3 mins, Loss: 18.287392139, Num correct: 144, Accuracy: 144.00%
Epoch 203: 0.00% complete, 3 mins, Loss: 18.287393928, Num correct: 144, Accuracy: 144.00%
Epoch 204: 0.00% complete, 3 mins, Loss: 18.287394524, Num correct: 144, Accuracy: 144.00%
Epoch 205: 0.00% complete, 3 mins, Loss: 18.287393689, Num correct: 144, Accuracy: 144.00%
Epoch 206: 0.00% complete, 3 mins, Loss: 18.287395000, Num correct: 144, Accuracy: 144.00%
Epoch 207: 0.00% complete, 3 mins, Loss: 18.287395000, Num correct: 144, Accuracy: 144.00%
Epoch 208: 0.00% complete, 3 mins, Loss: 18.287393689, Num correct: 144, Accuracy: 144.00%
Epoch 209: 0.00% complete, 3 mins, Loss: 18.287394166, Num correct: 144, Accuracy: 144.00%
Epoch 210: 0.00% complete, 3 mins, Loss: 18.287395239, Num correct: 144, Accuracy: 144.00%
Epoch 211: 0.00% complete, 3 mins, Loss: 18.287393689, Num correct: 144, Accuracy: 144.00%
Epoch 212: 0.00% complete, 3 mins, Loss: 18.287393928, Num correct: 144, Accuracy: 144.00%
Epoch 213: 0.00% complete, 3 mins, Loss: 18.287397742, Num correct: 144, Accuracy: 144.00%
Epoch 214: 0.00% complete, 3 mins, Loss: 18.287398219, Num correct: 144, Accuracy: 144.00%
Epoch 215: 0.00% complete, 3 mins, Loss: 18.287399411, Num correct: 144, Accuracy: 144.00%
Epoch 216: 0.00% complete, 4 mins, Loss: 18.287400126, Num correct: 144, Accuracy: 144.00%
Epoch 217: 0.00% complete, 4 mins, Loss: 18.287398577, Num correct: 144, Accuracy: 144.00%
Epoch 218: 0.00% complete, 4 mins, Loss: 18.287398934, Num correct: 144, Accuracy: 144.00%
Epoch 219: 0.00% complete, 4 mins, Loss: 18.287398338, Num correct: 144, Accuracy: 144.00%
Epoch 220: 0.00% complete, 4 mins, Loss: 18.287398696, Num correct: 144, Accuracy: 144.00%
Epoch 221: 0.00% complete, 4 mins, Loss: 18.287400365, Num correct: 144, Accuracy: 144.00%
Epoch 222: 0.00% complete, 4 mins, Loss: 18.287399650, Num correct: 144, Accuracy: 144.00%
Epoch 223: 0.00% complete, 4 mins, Loss: 18.287400603, Num correct: 144, Accuracy: 144.00%
Epoch 224: 0.00% complete, 4 mins, Loss: 18.287402272, Num correct: 144, Accuracy: 144.00%
Epoch 225: 0.00% complete, 4 mins, Loss: 18.287402153, Num correct: 144, Accuracy: 144.00%
Epoch 226: 0.00% complete, 4 mins, Loss: 18.287402153, Num correct: 144, Accuracy: 144.00%
Epoch 227: 0.00% complete, 4 mins, Loss: 18.287402153, Num correct: 144, Accuracy: 144.00%
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-14-fb866bde130b> in <module>()
----> 1 run_model(sess, 1000)

<ipython-input-12-b9e5a9f5c1cd> in run_model(sess, num_epochs)
      5         for batch_idx in range(num_batches):
      6             batch_loss, _, batch_num_correct, batch_accuracy, res_optimizer = sess.run((loss, optimizer, num_correct, accuracy, optimizer), 
----> 7                                                                         feed_dict=get_batch(batch_idx))
      8             epoch_loss += batch_loss
      9             epoch_num_correct += batch_num_correct

/home/amolmane1/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
    893     try:
    894       result = self._run(None, fetches, feed_dict, options_ptr,
--> 895                          run_metadata_ptr)
    896       if run_metadata:
    897         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/home/amolmane1/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
   1122     if final_fetches or final_targets or (handle and feed_dict_tensor):
   1123       results = self._do_run(handle, final_targets, final_fetches,
-> 1124                              feed_dict_tensor, options, run_metadata)
   1125     else:
   1126       results = []

/home/amolmane1/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
   1319     if handle is None:
   1320       return self._do_call(_run_fn, self._session, feeds, fetches, targets,
-> 1321                            options, run_metadata)
   1322     else:
   1323       return self._do_call(_prun_fn, self._session, handle, feeds, fetches)

/home/amolmane1/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_call(self, fn, *args)
   1325   def _do_call(self, fn, *args):
   1326     try:
-> 1327       return fn(*args)
   1328     except errors.OpError as e:
   1329       message = compat.as_text(e.message)

/home/amolmane1/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
   1304           return tf_session.TF_Run(session, options,
   1305                                    feed_dict, fetch_list, target_list,
-> 1306                                    status, run_metadata)
   1307 
   1308     def _prun_fn(session, handle, feed_dict, fetch_list):

KeyboardInterrupt: 

In [14]:
res_pred = sess.run(prediction, feed_dict=get_batch(9))
res_pred


Out[14]:
array([2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637, 2637,
       2637], dtype=int32)

In [15]:
id_word_dict[2637]


Out[15]:
'garden'

Testing


In [ ]:
# data = read_data("../../datasets/facebook_babi/tasks_1-20_v1-2/en/qa1_single-supporting-fact_test.txt")
# max_input_len = max_len(data, 'I')
# max_question_len = max_len(data, 'Q')
# max_answer_len = max_len(data, 'A')
# data_inputs, data_questions = embed_and_pad(data)
# data_answers = get_answer_index(data)
# input_sequence_lengths = get_input_sequence_lengths(data)
# input_period_boolean = get_input_period_boolean(data)
# max_facts = get_max_facts(input_period_boolean)

In [ ]:
# test_loss, test_prediction, test_num_correct, test_accuracy = sess.run((loss, prediction, num_correct, accuracy), feed_dict={inputs: data_inputs, 
#                                                         questions: data_questions, 
#                                                         answers: data_answers, 
#                                                         periods: input_period_boolean})