In [1]:
# -*- coding: utf-8 -*-
# Import Packages
import numpy as np
import tensorflow as tf
import collections
import argparse
import time
import os
from six.moves import cPickle
from TextLoader import *
from Hangulpy import *
print ("Packages Imported")
Packages Imported
In [2]:
corpus_name = "invisible_dragon" # "nine_dreams"
data_dir = "data/" + corpus_name
batch_size = 10
seq_length = 100
data_loader = TextLoader(data_dir, batch_size, seq_length)
reading text file
In [3]:
vocab_size = data_loader.vocab_size
vocab = data_loader.vocab
chars = data_loader.chars
print ( "type of 'data_loader.vocab' is %s, length is %d"
% (type(data_loader.vocab), len(data_loader.vocab)) )
print ( "type of 'data_loader.chars' is %s, length is %d"
% (type(data_loader.chars), len(data_loader.chars)) )
type of 'data_loader.vocab' is <type 'dict'>, length is 84
type of 'data_loader.chars' is <type 'tuple'>, length is 84
In [4]:
print (data_loader.vocab)
{u'_': 81, u'6': 63, u'|': 83, u'\n': 6, u'\r': 7, u',': 74, u'x': 75, u';': 73, u'[': 71, u'\u3144': 60, u'!': 28, u' ': 2, u'#': 68, u'"': 34, u'\u1d25': 0, u"'": 65, u')': 50, u'(': 51, u'+': 76, u'*': 82, u']': 72, u'\u3133': 58, u'/': 45, u'.': 24, u'\u3131': 5, u'0': 27, u'3': 54, u'2': 36, u'5': 61, u'\u3134': 4, u'\u3137': 11, u'\u3136': 49, u'\u3139': 8, u'\u3138': 31, u'\u3156': 52, u':': 40, u'\u313c': 67, u'?': 44, u'4': 59, u'\u3141': 14, u'\u3140': 77, u'\u3143': 57, u'\u3142': 21, u'\u3145': 15, u'7': 47, u'\u3147': 1, u'\u3146': 22, u'\u3149': 38, u'\u3148': 16, u'\u314b': 26, u'\u314a': 30, u'\u314d': 33, u'\u314c': 25, u'\u314f': 3, u'\u314e': 19, u'\u3151': 32, u'\u3150': 18, u'\u3153': 13, u'\u3152': 69, u'\u3155': 20, u'\u3154': 23, u'\u3157': 12, u'8': 46, u'\u3159': 62, u'\u3158': 41, u'\u315b': 29, u'\u315a': 53, u'\u315d': 48, u'\u315c': 17, u'\u315f': 39, u'^': 64, u'\u3161': 10, u'\u3160': 55, u'\u3163': 9, u'\u3162': 43, u'k': 78, u'9': 35, u'\u313a': 70, u'1': 56, u'\u3132': 42, u'%': 66, u'}': 80, u'<': 79, u'~': 37}
In [5]:
print (data_loader.chars)
# USAGE
print (data_loader.chars[0])
(u'\u1d25', u'\u3147', u' ', u'\u314f', u'\u3134', u'\u3131', u'\n', u'\r', u'\u3139', u'\u3163', u'\u3161', u'\u3137', u'\u3157', u'\u3153', u'\u3141', u'\u3145', u'\u3148', u'\u315c', u'\u3150', u'\u314e', u'\u3155', u'\u3142', u'\u3146', u'\u3154', u'.', u'\u314c', u'\u314b', u'0', u'!', u'\u315b', u'\u314a', u'\u3138', u'\u3151', u'\u314d', u'"', u'9', u'2', u'~', u'\u3149', u'\u315f', u':', u'\u3158', u'\u3132', u'\u3162', u'?', u'/', u'8', u'7', u'\u315d', u'\u3136', u')', u'(', u'\u3156', u'\u315a', u'3', u'\u3160', u'1', u'\u3143', u'\u3133', u'4', u'\u3144', u'5', u'\u3159', u'6', u'^', u"'", u'%', u'\u313c', u'#', u'\u3152', u'\u313a', u'[', u']', u';', u',', u'x', u'+', u'\u3140', u'k', u'<', u'}', u'_', u'*', u'|')
ᴥ
In [6]:
x, y = data_loader.next_batch()
print ("Type of 'x' is %s. Shape is %s" % (type(x), x.shape,))
print ("x looks like \n%s" % (x))
print
print ("Type of 'y' is %s. Shape is %s" % (type(y), y.shape,))
print ("y looks like \n%s" % (y))
Type of 'x' is <type 'numpy.ndarray'>. Shape is (10, 100)
x looks like
[[ 7 6 1 20 0 8 13 0 21 10 4 0 2 16 18 0 5 3 0 2 11 10 0 11
9 0 1 13 0 2 5 10 8 0 1 10 8 0 2 22 10 14 0 4 9 0 11 3
0 37 7 6 14 3 4 0 1 9 0 2 21 41 0 16 17 0 15 18 0 1 29 0
37 7 6 4 18 0 1 29 1 0 1 10 4 0 2 30 53 0 5 3 1 0 1 43
0 2 11 10]
[23 0 34 7 6 7 6 2 7 6 7 6 5 10 0 8 13 0 16 3 0 2 16 13
0 5 10 8 0 8 9 1 0 5 41 0 19 9 0 11 10 0 8 3 0 11 10 8
0 1 10 4 0 2 11 17 0 8 20 0 1 17 14 0 1 23 0 14 12 14 0 1
10 8 0 2 31 13 8 0 1 13 22 0 11 3 0 7 6 5 10 4 0 11 23 0
1 3 4 0]
[ 1 13 21 0 15 13 22 0 11 3 0 7 6 25 17 0 14 20 1 0 19 18 0 15
13 0 1 3 4 0 21 12 0 1 20 0 15 13 0 2 5 10 0 8 18 0 15 13
0 2 11 13 0 14 17 0 15 13 0 1 48 0 15 13 0 16 12 8 0 8 3 0
2 14 12 14 0 1 10 8 0 2 14 3 5 0 2 14 17 0 15 13 0 1 48 0
15 13 0 7]
[ 5 18 0 21 3 8 0 33 9 8 0 15 3 8 0 5 9 0 1 9 4 0 2 16
3 0 33 12 5 0 33 9 0 19 3 0 5 9 0 1 20 15 0 11 3 0 2 19
3 0 16 9 0 14 3 4 0 1 9 0 5 13 4 0 11 3 4 0 16 13 14 0
1 9 0 2 1 9 22 0 1 13 15 0 11 3 0 7 6 5 10 0 5 18 0 14
48 0 4 32]
[ 0 2 5 13 15 0 1 9 0 1 13 15 0 11 3 0 7 6 19 3 0 16 9 0
14 3 4 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 1 9
0 2 5 10 0 31 18 0 11 39 0 26 10 0 8 10 8 0 21 12 0 5 12 0
2 15 3 8 0 8 20 0 11 17 8 0 8 9 0 5 3 0 2 1 13 21 0 1
13 15 0 11]
[ 0 1 3 8 0 1 10 8 0 2 42 18 0 5 12 0 4 3 0 1 41 15 0 4
10 4 0 11 23 0 2 1 13 14 0 14 3 0 19 3 0 5 12 0 1 3 0 57
3 0 5 3 0 1 13 21 0 15 13 15 0 11 3 0 7 6 5 10 0 8 18 0
15 13 0 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 1 10 4 0
2 15 10 8]
[55 5 0 21 18 5 0 14 20 1 0 1 10 4 0 15 17 4 0 15 9 5 0 5
3 4 0 1 23 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0
1 43 0 2 16 17 0 1 39 0 8 10 8 0 7 6 11 17 8 0 8 13 0 22
3 22 0 5 12 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0
1 10 4 0]
[ 5 12 0 2 1 9 0 2 1 3 4 0 21 12 0 1 9 0 4 10 4 0 2 25
17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 11 12 0 2 5 10 0 8
13 19 0 5 12 0 7 6 11 3 0 2 21 12 5 0 15 17 0 19 3 0 8 13
0 2 31 13 0 4 3 4 0 11 3 0 5 12 0 19 3 0 5 12 0 24 24 24
7 6 7 6]
[ 7 6 7 6 5 10 0 31 18 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0
5 12 4 0 1 10 4 0 2 16 9 4 0 38 3 0 2 16 12 8 0 8 3 0
2 38 3 1 0 2 14 13 15 0 1 9 22 0 5 23 0 2 19 3 4 0 14 3
0 11 9 0 8 10 8 0 2 4 18 0 21 18 25 0 1 13 22 0 11 3 0 28
7 6 7 6]
[17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 11 12 0 2 1 9 0 16
23 0 2 16 17 5 0 1 13 0 5 3 0 5 12 0 2 1 9 22 0 1 13 22
0 11 3 0 24 24 24 24 24 24 24 24 7 6 1 13 21 0 5 10 0 8 23 0
1 9 0 15 20 4 0 2 16 3 0 33 12 5 0 33 9 0 19 3 0 5 9 0
8 12 0 2]]
Type of 'y' is <type 'numpy.ndarray'>. Shape is (10, 100)
y looks like
[[ 6 1 20 0 8 13 0 21 10 4 0 2 16 18 0 5 3 0 2 11 10 0 11 9
0 1 13 0 2 5 10 8 0 1 10 8 0 2 22 10 14 0 4 9 0 11 3 0
37 7 6 14 3 4 0 1 9 0 2 21 41 0 16 17 0 15 18 0 1 29 0 37
7 6 4 18 0 1 29 1 0 1 10 4 0 2 30 53 0 5 3 1 0 1 43 0
2 11 10 0]
[ 0 34 7 6 7 6 2 7 6 7 6 5 10 0 8 13 0 16 3 0 2 16 13 0
5 10 8 0 8 9 1 0 5 41 0 19 9 0 11 10 0 8 3 0 11 10 8 0
1 10 4 0 2 11 17 0 8 20 0 1 17 14 0 1 23 0 14 12 14 0 1 10
8 0 2 31 13 8 0 1 13 22 0 11 3 0 7 6 5 10 4 0 11 23 0 1
3 4 0 21]
[13 21 0 15 13 22 0 11 3 0 7 6 25 17 0 14 20 1 0 19 18 0 15 13
0 1 3 4 0 21 12 0 1 20 0 15 13 0 2 5 10 0 8 18 0 15 13 0
2 11 13 0 14 17 0 15 13 0 1 48 0 15 13 0 16 12 8 0 8 3 0 2
14 12 14 0 1 10 8 0 2 14 3 5 0 2 14 17 0 15 13 0 1 48 0 15
13 0 7 6]
[18 0 21 3 8 0 33 9 8 0 15 3 8 0 5 9 0 1 9 4 0 2 16 3
0 33 12 5 0 33 9 0 19 3 0 5 9 0 1 20 15 0 11 3 0 2 19 3
0 16 9 0 14 3 4 0 1 9 0 5 13 4 0 11 3 4 0 16 13 14 0 1
9 0 2 1 9 22 0 1 13 15 0 11 3 0 7 6 5 10 0 5 18 0 14 48
0 4 32 0]
[ 2 5 13 15 0 1 9 0 1 13 15 0 11 3 0 7 6 19 3 0 16 9 0 14
3 4 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 1 9 0
2 5 10 0 31 18 0 11 39 0 26 10 0 8 10 8 0 21 12 0 5 12 0 2
15 3 8 0 8 20 0 11 17 8 0 8 9 0 5 3 0 2 1 13 21 0 1 13
15 0 11 3]
[ 1 3 8 0 1 10 8 0 2 42 18 0 5 12 0 4 3 0 1 41 15 0 4 10
4 0 11 23 0 2 1 13 14 0 14 3 0 19 3 0 5 12 0 1 3 0 57 3
0 5 3 0 1 13 21 0 15 13 15 0 11 3 0 7 6 5 10 0 8 18 0 15
13 0 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 1 10 4 0 2
15 10 8 0]
[ 5 0 21 18 5 0 14 20 1 0 1 10 4 0 15 17 4 0 15 9 5 0 5 3
4 0 1 23 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 1
43 0 2 16 17 0 1 39 0 8 10 8 0 7 6 11 17 8 0 8 13 0 22 3
22 0 5 12 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 1
10 4 0 2]
[12 0 2 1 9 0 2 1 3 4 0 21 12 0 1 9 0 4 10 4 0 2 25 17
0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 11 12 0 2 5 10 0 8 13
19 0 5 12 0 7 6 11 3 0 2 21 12 5 0 15 17 0 19 3 0 8 13 0
2 31 13 0 4 3 4 0 11 3 0 5 12 0 19 3 0 5 12 0 24 24 24 7
6 7 6 2]
[ 6 7 6 5 10 0 31 18 0 2 25 17 0 14 20 1 0 11 10 0 8 18 0 5
12 4 0 1 10 4 0 2 16 9 4 0 38 3 0 2 16 12 8 0 8 3 0 2
38 3 1 0 2 14 13 15 0 1 9 22 0 5 23 0 2 19 3 4 0 14 3 0
11 9 0 8 10 8 0 2 4 18 0 21 18 25 0 1 13 22 0 11 3 0 28 7
6 7 6 2]
[ 0 14 20 1 0 11 10 0 8 18 0 5 12 4 0 11 12 0 2 1 9 0 16 23
0 2 16 17 5 0 1 13 0 5 3 0 5 12 0 2 1 9 22 0 1 13 22 0
11 3 0 24 24 24 24 24 24 24 24 7 6 1 13 21 0 5 10 0 8 23 0 1
9 0 15 20 4 0 2 16 3 0 33 12 5 0 33 9 0 19 3 0 5 9 0 8
12 0 2 19]]
In [7]:
rnn_size = 128
num_layers = 2
grad_clip = 5. # <= GRADIENT CLIPPING (PRACTICALLY IMPORTANT)
vocab_size = data_loader.vocab_size
# SELECT RNN CELL (MULTI LAYER LSTM)
def unit_cell():
return tf.contrib.rnn.BasicLSTMCell(rnn_size,state_is_tuple=True,reuse=tf.get_variable_scope().reuse)
cell = tf.contrib.rnn.MultiRNNCell([unit_cell() for _ in range(num_layers)])
# Set paths to the graph
input_data = tf.placeholder(tf.int32, [batch_size, seq_length])
targets = tf.placeholder(tf.int32, [batch_size, seq_length])
initial_state = cell.zero_state(batch_size, tf.float32)
# Set Network
with tf.variable_scope('rnnlm'):
softmax_w = tf.get_variable("softmax_w", [rnn_size, vocab_size])
softmax_b = tf.get_variable("softmax_b", [vocab_size])
with tf.device("/cpu:0"):
embedding = tf.get_variable("embedding", [vocab_size, rnn_size])
inputs = tf.split(tf.nn.embedding_lookup(embedding, input_data), seq_length, 1)
inputs = [tf.squeeze(input_, [1]) for input_ in inputs]
print ("Network ready")
Network ready
In [8]:
# Output of RNN
outputs, last_state = tf.contrib.rnn.static_rnn(cell,inputs, initial_state,
scope='rnnlm')
output = tf.reshape(tf.concat(outputs,1), [-1, rnn_size])
logits = tf.nn.xw_plus_b(output, softmax_w, softmax_b)
# Next word probability
probs = tf.nn.softmax(logits)
print ("FUNCTIONS READY")
FUNCTIONS READY
In [9]:
loss = tf.contrib.legacy_seq2seq.sequence_loss_by_example([logits], # Input
[tf.reshape(targets, [-1])], # Target
[tf.ones([batch_size * seq_length])], # Weight
vocab_size)
print ("LOSS FUNCTION")
LOSS FUNCTION
In [10]:
cost = tf.reduce_sum(loss) / batch_size / seq_length
# GRADIENT CLIPPING !
lr = tf.Variable(0.0, trainable=False) # <= LEARNING RATE
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(cost, tvars), grad_clip)
_optm = tf.train.AdamOptimizer(lr)
optm = _optm.apply_gradients(zip(grads, tvars))
final_state = last_state
print ("NETWORK READY")
NETWORK READY
In [11]:
num_epochs = 5000
save_every = 2000
learning_rate = 0.001
decay_rate = 0.999
save_dir = 'data/' + corpus_name
sess = tf.InteractiveSession()
sess.run(tf.initialize_all_variables())
WARNING:tensorflow:From /home/yj/.virtualenvs/lecture/local/lib/python2.7/site-packages/tensorflow/python/util/tf_should_use.py:170: initialize_all_variables (from tensorflow.python.ops.variables) is deprecated and will be removed after 2017-03-02.
Instructions for updating:
Use `tf.global_variables_initializer` instead.
In [12]:
summary_writer = tf.summary.FileWriter(save_dir
, graph=sess.graph)
saver = tf.train.Saver(tf.all_variables())
for e in range(num_epochs): # for all epochs
# LEARNING RATE SCHEDULING
sess.run(tf.assign(lr, learning_rate * (decay_rate ** e)))
data_loader.reset_batch_pointer()
state = sess.run(initial_state)
for b in range(data_loader.num_batches):
start = time.time()
x, y = data_loader.next_batch()
feed = {input_data: x, targets: y, initial_state: state}
# Train!
train_loss, state, _ = sess.run([cost, final_state, optm], feed)
end = time.time()
# PRINT
if b % 100 == 0:
print ("%d/%d (epoch: %d), loss: %.3f, time/batch: %.3f"
% (e * data_loader.num_batches + b
, num_epochs * data_loader.num_batches
, e, train_loss, end - start))
# SAVE MODEL
if (e * data_loader.num_batches + b) % save_every == 0:
checkpoint_path = os.path.join(save_dir, 'model.ckpt')
saver.save(sess, checkpoint_path
, global_step = e * data_loader.num_batches + b)
print("model saved to {}".format(checkpoint_path))
WARNING:tensorflow:From <ipython-input-12-36cc09519909>:4: all_variables (from tensorflow.python.ops.variables) is deprecated and will be removed after 2017-03-02.
Instructions for updating:
Please use tf.global_variables instead.
0/325000 (epoch: 0), loss: 4.437, time/batch: 0.968
model saved to data/invisible_dragon/model.ckpt
65/325000 (epoch: 1), loss: 3.019, time/batch: 0.074
130/325000 (epoch: 2), loss: 2.194, time/batch: 0.073
195/325000 (epoch: 3), loss: 1.977, time/batch: 0.072
260/325000 (epoch: 4), loss: 1.880, time/batch: 0.087
325/325000 (epoch: 5), loss: 1.791, time/batch: 0.073
390/325000 (epoch: 6), loss: 1.725, time/batch: 0.071
455/325000 (epoch: 7), loss: 1.669, time/batch: 0.071
520/325000 (epoch: 8), loss: 1.621, time/batch: 0.072
585/325000 (epoch: 9), loss: 1.576, time/batch: 0.093
650/325000 (epoch: 10), loss: 1.538, time/batch: 0.085
715/325000 (epoch: 11), loss: 1.502, time/batch: 0.081
780/325000 (epoch: 12), loss: 1.469, time/batch: 0.071
845/325000 (epoch: 13), loss: 1.443, time/batch: 0.084
910/325000 (epoch: 14), loss: 1.420, time/batch: 0.072
975/325000 (epoch: 15), loss: 1.398, time/batch: 0.091
1040/325000 (epoch: 16), loss: 1.379, time/batch: 0.073
1105/325000 (epoch: 17), loss: 1.362, time/batch: 0.079
1170/325000 (epoch: 18), loss: 1.346, time/batch: 0.079
1235/325000 (epoch: 19), loss: 1.330, time/batch: 0.087
1300/325000 (epoch: 20), loss: 1.315, time/batch: 0.161
1365/325000 (epoch: 21), loss: 1.298, time/batch: 0.081
1430/325000 (epoch: 22), loss: 1.282, time/batch: 0.084
1495/325000 (epoch: 23), loss: 1.267, time/batch: 0.077
1560/325000 (epoch: 24), loss: 1.253, time/batch: 0.085
1625/325000 (epoch: 25), loss: 1.239, time/batch: 0.071
1690/325000 (epoch: 26), loss: 1.225, time/batch: 0.077
1755/325000 (epoch: 27), loss: 1.211, time/batch: 0.075
1820/325000 (epoch: 28), loss: 1.195, time/batch: 0.073
1885/325000 (epoch: 29), loss: 1.181, time/batch: 0.081
1950/325000 (epoch: 30), loss: 1.166, time/batch: 0.072
model saved to data/invisible_dragon/model.ckpt
2015/325000 (epoch: 31), loss: 1.151, time/batch: 0.071
2080/325000 (epoch: 32), loss: 1.136, time/batch: 0.076
2145/325000 (epoch: 33), loss: 1.123, time/batch: 0.101
2210/325000 (epoch: 34), loss: 1.110, time/batch: 0.072
2275/325000 (epoch: 35), loss: 1.096, time/batch: 0.075
2340/325000 (epoch: 36), loss: 1.082, time/batch: 0.073
2405/325000 (epoch: 37), loss: 1.070, time/batch: 0.072
2470/325000 (epoch: 38), loss: 1.057, time/batch: 0.078
2535/325000 (epoch: 39), loss: 1.048, time/batch: 0.106
2600/325000 (epoch: 40), loss: 1.032, time/batch: 0.075
2665/325000 (epoch: 41), loss: 1.021, time/batch: 0.076
2730/325000 (epoch: 42), loss: 1.012, time/batch: 0.076
2795/325000 (epoch: 43), loss: 0.999, time/batch: 0.075
2860/325000 (epoch: 44), loss: 0.985, time/batch: 0.081
2925/325000 (epoch: 45), loss: 0.975, time/batch: 0.071
2990/325000 (epoch: 46), loss: 0.964, time/batch: 0.080
3055/325000 (epoch: 47), loss: 0.958, time/batch: 0.079
3120/325000 (epoch: 48), loss: 0.947, time/batch: 0.071
3185/325000 (epoch: 49), loss: 0.936, time/batch: 0.078
3250/325000 (epoch: 50), loss: 0.924, time/batch: 0.073
3315/325000 (epoch: 51), loss: 0.909, time/batch: 0.070
3380/325000 (epoch: 52), loss: 0.897, time/batch: 0.075
3445/325000 (epoch: 53), loss: 0.882, time/batch: 0.077
3510/325000 (epoch: 54), loss: 0.875, time/batch: 0.076
3575/325000 (epoch: 55), loss: 0.865, time/batch: 0.076
3640/325000 (epoch: 56), loss: 0.857, time/batch: 0.106
3705/325000 (epoch: 57), loss: 0.844, time/batch: 0.079
3770/325000 (epoch: 58), loss: 0.835, time/batch: 0.073
3835/325000 (epoch: 59), loss: 0.823, time/batch: 0.076
3900/325000 (epoch: 60), loss: 0.813, time/batch: 0.071
3965/325000 (epoch: 61), loss: 0.803, time/batch: 0.071
model saved to data/invisible_dragon/model.ckpt
4030/325000 (epoch: 62), loss: 0.792, time/batch: 0.073
4095/325000 (epoch: 63), loss: 0.783, time/batch: 0.073
4160/325000 (epoch: 64), loss: 0.775, time/batch: 0.102
4225/325000 (epoch: 65), loss: 0.775, time/batch: 0.088
4290/325000 (epoch: 66), loss: 0.763, time/batch: 0.070
4355/325000 (epoch: 67), loss: 0.755, time/batch: 0.075
4420/325000 (epoch: 68), loss: 0.745, time/batch: 0.071
4485/325000 (epoch: 69), loss: 0.743, time/batch: 0.103
4550/325000 (epoch: 70), loss: 0.737, time/batch: 0.086
4615/325000 (epoch: 71), loss: 0.731, time/batch: 0.072
4680/325000 (epoch: 72), loss: 0.722, time/batch: 0.075
4745/325000 (epoch: 73), loss: 0.709, time/batch: 0.075
4810/325000 (epoch: 74), loss: 0.704, time/batch: 0.078
4875/325000 (epoch: 75), loss: 0.706, time/batch: 0.076
4940/325000 (epoch: 76), loss: 0.717, time/batch: 0.074
5005/325000 (epoch: 77), loss: 0.711, time/batch: 0.075
5070/325000 (epoch: 78), loss: 0.697, time/batch: 0.070
5135/325000 (epoch: 79), loss: 0.686, time/batch: 0.104
5200/325000 (epoch: 80), loss: 0.685, time/batch: 0.086
5265/325000 (epoch: 81), loss: 0.681, time/batch: 0.070
5330/325000 (epoch: 82), loss: 0.673, time/batch: 0.076
5395/325000 (epoch: 83), loss: 0.663, time/batch: 0.073
5460/325000 (epoch: 84), loss: 0.661, time/batch: 0.072
5525/325000 (epoch: 85), loss: 0.662, time/batch: 0.074
5590/325000 (epoch: 86), loss: 0.641, time/batch: 0.072
5655/325000 (epoch: 87), loss: 0.630, time/batch: 0.077
5720/325000 (epoch: 88), loss: 0.618, time/batch: 0.077
5785/325000 (epoch: 89), loss: 0.606, time/batch: 0.071
5850/325000 (epoch: 90), loss: 0.599, time/batch: 0.085
5915/325000 (epoch: 91), loss: 0.595, time/batch: 0.071
5980/325000 (epoch: 92), loss: 0.586, time/batch: 0.072
model saved to data/invisible_dragon/model.ckpt
6045/325000 (epoch: 93), loss: 0.581, time/batch: 0.074
6110/325000 (epoch: 94), loss: 0.573, time/batch: 0.083
6175/325000 (epoch: 95), loss: 0.566, time/batch: 0.081
6240/325000 (epoch: 96), loss: 0.560, time/batch: 0.073
6305/325000 (epoch: 97), loss: 0.551, time/batch: 0.072
6370/325000 (epoch: 98), loss: 0.541, time/batch: 0.076
6435/325000 (epoch: 99), loss: 0.536, time/batch: 0.073
6500/325000 (epoch: 100), loss: 0.529, time/batch: 0.072
6565/325000 (epoch: 101), loss: 0.517, time/batch: 0.079
6630/325000 (epoch: 102), loss: 0.510, time/batch: 0.073
6695/325000 (epoch: 103), loss: 0.505, time/batch: 0.081
6760/325000 (epoch: 104), loss: 0.502, time/batch: 0.076
6825/325000 (epoch: 105), loss: 0.492, time/batch: 0.102
6890/325000 (epoch: 106), loss: 0.493, time/batch: 0.081
6955/325000 (epoch: 107), loss: 0.488, time/batch: 0.072
7020/325000 (epoch: 108), loss: 0.488, time/batch: 0.077
7085/325000 (epoch: 109), loss: 0.484, time/batch: 0.071
7150/325000 (epoch: 110), loss: 0.488, time/batch: 0.072
7215/325000 (epoch: 111), loss: 0.477, time/batch: 0.077
7280/325000 (epoch: 112), loss: 0.469, time/batch: 0.075
7345/325000 (epoch: 113), loss: 0.470, time/batch: 0.074
7410/325000 (epoch: 114), loss: 0.476, time/batch: 0.077
7475/325000 (epoch: 115), loss: 0.482, time/batch: 0.088
7540/325000 (epoch: 116), loss: 0.486, time/batch: 0.086
7605/325000 (epoch: 117), loss: 0.480, time/batch: 0.071
7670/325000 (epoch: 118), loss: 0.463, time/batch: 0.072
7735/325000 (epoch: 119), loss: 0.446, time/batch: 0.080
7800/325000 (epoch: 120), loss: 0.449, time/batch: 0.074
7865/325000 (epoch: 121), loss: 0.441, time/batch: 0.075
7930/325000 (epoch: 122), loss: 0.433, time/batch: 0.079
7995/325000 (epoch: 123), loss: 0.415, time/batch: 0.074
model saved to data/invisible_dragon/model.ckpt
8060/325000 (epoch: 124), loss: 0.405, time/batch: 0.076
8125/325000 (epoch: 125), loss: 0.397, time/batch: 0.077
8190/325000 (epoch: 126), loss: 0.395, time/batch: 0.102
8255/325000 (epoch: 127), loss: 0.386, time/batch: 0.079
8320/325000 (epoch: 128), loss: 0.391, time/batch: 0.074
8385/325000 (epoch: 129), loss: 0.386, time/batch: 0.077
8450/325000 (epoch: 130), loss: 0.388, time/batch: 0.076
8515/325000 (epoch: 131), loss: 0.381, time/batch: 0.103
8580/325000 (epoch: 132), loss: 0.379, time/batch: 0.075
8645/325000 (epoch: 133), loss: 0.381, time/batch: 0.072
8710/325000 (epoch: 134), loss: 0.389, time/batch: 0.078
8775/325000 (epoch: 135), loss: 0.384, time/batch: 0.073
8840/325000 (epoch: 136), loss: 0.382, time/batch: 0.073
8905/325000 (epoch: 137), loss: 0.378, time/batch: 0.089
8970/325000 (epoch: 138), loss: 0.369, time/batch: 0.073
9035/325000 (epoch: 139), loss: 0.364, time/batch: 0.075
9100/325000 (epoch: 140), loss: 0.371, time/batch: 0.077
9165/325000 (epoch: 141), loss: 0.382, time/batch: 0.074
9230/325000 (epoch: 142), loss: 0.369, time/batch: 0.073
9295/325000 (epoch: 143), loss: 0.367, time/batch: 0.076
9360/325000 (epoch: 144), loss: 0.364, time/batch: 0.072
9425/325000 (epoch: 145), loss: 0.350, time/batch: 0.075
9490/325000 (epoch: 146), loss: 0.346, time/batch: 0.079
9555/325000 (epoch: 147), loss: 0.331, time/batch: 0.073
9620/325000 (epoch: 148), loss: 0.325, time/batch: 0.077
9685/325000 (epoch: 149), loss: 0.317, time/batch: 0.073
9750/325000 (epoch: 150), loss: 0.319, time/batch: 0.074
9815/325000 (epoch: 151), loss: 0.321, time/batch: 0.082
9880/325000 (epoch: 152), loss: 0.312, time/batch: 0.074
9945/325000 (epoch: 153), loss: 0.323, time/batch: 0.100
model saved to data/invisible_dragon/model.ckpt
10010/325000 (epoch: 154), loss: 0.323, time/batch: 0.078
10075/325000 (epoch: 155), loss: 0.324, time/batch: 0.075
10140/325000 (epoch: 156), loss: 0.321, time/batch: 0.078
10205/325000 (epoch: 157), loss: 0.314, time/batch: 0.071
10270/325000 (epoch: 158), loss: 0.311, time/batch: 0.073
10335/325000 (epoch: 159), loss: 0.320, time/batch: 0.077
10400/325000 (epoch: 160), loss: 0.318, time/batch: 0.073
10465/325000 (epoch: 161), loss: 0.313, time/batch: 0.072
10530/325000 (epoch: 162), loss: 0.322, time/batch: 0.077
10595/325000 (epoch: 163), loss: 0.311, time/batch: 0.075
10660/325000 (epoch: 164), loss: 0.324, time/batch: 0.074
10725/325000 (epoch: 165), loss: 0.304, time/batch: 0.080
10790/325000 (epoch: 166), loss: 0.296, time/batch: 0.074
10855/325000 (epoch: 167), loss: 0.294, time/batch: 0.078
10920/325000 (epoch: 168), loss: 0.298, time/batch: 0.075
10985/325000 (epoch: 169), loss: 0.280, time/batch: 0.074
11050/325000 (epoch: 170), loss: 0.276, time/batch: 0.079
11115/325000 (epoch: 171), loss: 0.271, time/batch: 0.079
11180/325000 (epoch: 172), loss: 0.273, time/batch: 0.077
11245/325000 (epoch: 173), loss: 0.272, time/batch: 0.078
11310/325000 (epoch: 174), loss: 0.269, time/batch: 0.073
11375/325000 (epoch: 175), loss: 0.262, time/batch: 0.073
11440/325000 (epoch: 176), loss: 0.260, time/batch: 0.081
11505/325000 (epoch: 177), loss: 0.260, time/batch: 0.074
11570/325000 (epoch: 178), loss: 0.257, time/batch: 0.106
11635/325000 (epoch: 179), loss: 0.257, time/batch: 0.087
11700/325000 (epoch: 180), loss: 0.264, time/batch: 0.072
11765/325000 (epoch: 181), loss: 0.281, time/batch: 0.077
11830/325000 (epoch: 182), loss: 0.269, time/batch: 0.074
11895/325000 (epoch: 183), loss: 0.277, time/batch: 0.072
11960/325000 (epoch: 184), loss: 0.282, time/batch: 0.077
model saved to data/invisible_dragon/model.ckpt
12025/325000 (epoch: 185), loss: 0.289, time/batch: 0.078
12090/325000 (epoch: 186), loss: 0.272, time/batch: 0.073
12155/325000 (epoch: 187), loss: 0.275, time/batch: 0.077
12220/325000 (epoch: 188), loss: 0.262, time/batch: 0.082
12285/325000 (epoch: 189), loss: 0.269, time/batch: 0.072
12350/325000 (epoch: 190), loss: 0.276, time/batch: 0.077
12415/325000 (epoch: 191), loss: 0.262, time/batch: 0.076
12480/325000 (epoch: 192), loss: 0.246, time/batch: 0.072
12545/325000 (epoch: 193), loss: 0.242, time/batch: 0.078
12610/325000 (epoch: 194), loss: 0.239, time/batch: 0.072
12675/325000 (epoch: 195), loss: 0.242, time/batch: 0.082
12740/325000 (epoch: 196), loss: 0.228, time/batch: 0.078
12805/325000 (epoch: 197), loss: 0.219, time/batch: 0.071
12870/325000 (epoch: 198), loss: 0.215, time/batch: 0.075
12935/325000 (epoch: 199), loss: 0.210, time/batch: 0.076
13000/325000 (epoch: 200), loss: 0.202, time/batch: 0.103
13065/325000 (epoch: 201), loss: 0.195, time/batch: 0.082
13130/325000 (epoch: 202), loss: 0.199, time/batch: 0.075
13195/325000 (epoch: 203), loss: 0.192, time/batch: 0.082
13260/325000 (epoch: 204), loss: 0.185, time/batch: 0.085
13325/325000 (epoch: 205), loss: 0.180, time/batch: 0.071
13390/325000 (epoch: 206), loss: 0.180, time/batch: 0.081
13455/325000 (epoch: 207), loss: 0.180, time/batch: 0.074
13520/325000 (epoch: 208), loss: 0.177, time/batch: 0.072
13585/325000 (epoch: 209), loss: 0.172, time/batch: 0.077
13650/325000 (epoch: 210), loss: 0.185, time/batch: 0.078
13715/325000 (epoch: 211), loss: 0.181, time/batch: 0.072
13780/325000 (epoch: 212), loss: 0.175, time/batch: 0.078
13845/325000 (epoch: 213), loss: 0.174, time/batch: 0.096
13910/325000 (epoch: 214), loss: 0.179, time/batch: 0.080
13975/325000 (epoch: 215), loss: 0.183, time/batch: 0.090
model saved to data/invisible_dragon/model.ckpt
14040/325000 (epoch: 216), loss: 0.187, time/batch: 0.075
14105/325000 (epoch: 217), loss: 0.188, time/batch: 0.076
14170/325000 (epoch: 218), loss: 0.177, time/batch: 0.071
14235/325000 (epoch: 219), loss: 0.173, time/batch: 0.072
14300/325000 (epoch: 220), loss: 0.171, time/batch: 0.076
14365/325000 (epoch: 221), loss: 0.163, time/batch: 0.071
14430/325000 (epoch: 222), loss: 0.151, time/batch: 0.081
14495/325000 (epoch: 223), loss: 0.151, time/batch: 0.080
14560/325000 (epoch: 224), loss: 0.156, time/batch: 0.105
14625/325000 (epoch: 225), loss: 0.158, time/batch: 0.082
14690/325000 (epoch: 226), loss: 0.161, time/batch: 0.072
14755/325000 (epoch: 227), loss: 0.152, time/batch: 0.077
14820/325000 (epoch: 228), loss: 0.162, time/batch: 0.079
14885/325000 (epoch: 229), loss: 0.152, time/batch: 0.073
14950/325000 (epoch: 230), loss: 0.161, time/batch: 0.078
15015/325000 (epoch: 231), loss: 0.156, time/batch: 0.084
15080/325000 (epoch: 232), loss: 0.152, time/batch: 0.078
15145/325000 (epoch: 233), loss: 0.164, time/batch: 0.098
15210/325000 (epoch: 234), loss: 0.163, time/batch: 0.105
15275/325000 (epoch: 235), loss: 0.154, time/batch: 0.102
15340/325000 (epoch: 236), loss: 0.159, time/batch: 0.097
15405/325000 (epoch: 237), loss: 0.171, time/batch: 0.100
15470/325000 (epoch: 238), loss: 0.157, time/batch: 0.088
15535/325000 (epoch: 239), loss: 0.152, time/batch: 0.093
15600/325000 (epoch: 240), loss: 0.143, time/batch: 0.119
15665/325000 (epoch: 241), loss: 0.147, time/batch: 0.088
15730/325000 (epoch: 242), loss: 0.147, time/batch: 0.095
15795/325000 (epoch: 243), loss: 0.154, time/batch: 0.085
15860/325000 (epoch: 244), loss: 0.156, time/batch: 0.110
15925/325000 (epoch: 245), loss: 0.160, time/batch: 0.090
15990/325000 (epoch: 246), loss: 0.149, time/batch: 0.073
model saved to data/invisible_dragon/model.ckpt
16055/325000 (epoch: 247), loss: 0.158, time/batch: 0.074
16120/325000 (epoch: 248), loss: 0.153, time/batch: 0.085
16185/325000 (epoch: 249), loss: 0.156, time/batch: 0.081
16250/325000 (epoch: 250), loss: 0.147, time/batch: 0.080
16315/325000 (epoch: 251), loss: 0.140, time/batch: 0.084
16380/325000 (epoch: 252), loss: 0.122, time/batch: 0.105
16445/325000 (epoch: 253), loss: 0.116, time/batch: 0.092
16510/325000 (epoch: 254), loss: 0.121, time/batch: 0.095
16575/325000 (epoch: 255), loss: 0.119, time/batch: 0.076
16640/325000 (epoch: 256), loss: 0.110, time/batch: 0.084
16705/325000 (epoch: 257), loss: 0.122, time/batch: 0.082
16770/325000 (epoch: 258), loss: 0.121, time/batch: 0.076
16835/325000 (epoch: 259), loss: 0.141, time/batch: 0.075
16900/325000 (epoch: 260), loss: 0.129, time/batch: 0.073
16965/325000 (epoch: 261), loss: 0.137, time/batch: 0.078
17030/325000 (epoch: 262), loss: 0.138, time/batch: 0.072
17095/325000 (epoch: 263), loss: 0.118, time/batch: 0.077
17160/325000 (epoch: 264), loss: 0.109, time/batch: 0.077
17225/325000 (epoch: 265), loss: 0.108, time/batch: 0.097
17290/325000 (epoch: 266), loss: 0.115, time/batch: 0.109
17355/325000 (epoch: 267), loss: 0.107, time/batch: 0.092
17420/325000 (epoch: 268), loss: 0.118, time/batch: 0.076
17485/325000 (epoch: 269), loss: 0.128, time/batch: 0.080
17550/325000 (epoch: 270), loss: 0.141, time/batch: 0.078
17615/325000 (epoch: 271), loss: 0.115, time/batch: 0.070
17680/325000 (epoch: 272), loss: 0.106, time/batch: 0.080
17745/325000 (epoch: 273), loss: 0.116, time/batch: 0.078
17810/325000 (epoch: 274), loss: 0.115, time/batch: 0.129
17875/325000 (epoch: 275), loss: 0.108, time/batch: 0.078
17940/325000 (epoch: 276), loss: 0.124, time/batch: 0.080
model saved to data/invisible_dragon/model.ckpt
18005/325000 (epoch: 277), loss: 0.123, time/batch: 0.094
18070/325000 (epoch: 278), loss: 0.104, time/batch: 0.092
18135/325000 (epoch: 279), loss: 0.109, time/batch: 0.078
18200/325000 (epoch: 280), loss: 0.093, time/batch: 0.105
18265/325000 (epoch: 281), loss: 0.091, time/batch: 0.077
18330/325000 (epoch: 282), loss: 0.098, time/batch: 0.071
18395/325000 (epoch: 283), loss: 0.105, time/batch: 0.103
18460/325000 (epoch: 284), loss: 0.092, time/batch: 0.080
18525/325000 (epoch: 285), loss: 0.094, time/batch: 0.105
18590/325000 (epoch: 286), loss: 0.095, time/batch: 0.078
18655/325000 (epoch: 287), loss: 0.102, time/batch: 0.073
18720/325000 (epoch: 288), loss: 0.105, time/batch: 0.078
18785/325000 (epoch: 289), loss: 0.105, time/batch: 0.076
18850/325000 (epoch: 290), loss: 0.089, time/batch: 0.072
18915/325000 (epoch: 291), loss: 0.091, time/batch: 0.083
18980/325000 (epoch: 292), loss: 0.091, time/batch: 0.075
19045/325000 (epoch: 293), loss: 0.097, time/batch: 0.074
19110/325000 (epoch: 294), loss: 0.087, time/batch: 0.079
19175/325000 (epoch: 295), loss: 0.092, time/batch: 0.077
19240/325000 (epoch: 296), loss: 0.090, time/batch: 0.104
19305/325000 (epoch: 297), loss: 0.089, time/batch: 0.083
19370/325000 (epoch: 298), loss: 0.093, time/batch: 0.073
19435/325000 (epoch: 299), loss: 0.096, time/batch: 0.078
19500/325000 (epoch: 300), loss: 0.092, time/batch: 0.073
19565/325000 (epoch: 301), loss: 0.085, time/batch: 0.070
19630/325000 (epoch: 302), loss: 0.082, time/batch: 0.079
19695/325000 (epoch: 303), loss: 0.082, time/batch: 0.086
19760/325000 (epoch: 304), loss: 0.091, time/batch: 0.079
19825/325000 (epoch: 305), loss: 0.087, time/batch: 0.073
19890/325000 (epoch: 306), loss: 0.079, time/batch: 0.073
19955/325000 (epoch: 307), loss: 0.079, time/batch: 0.077
model saved to data/invisible_dragon/model.ckpt
20020/325000 (epoch: 308), loss: 0.078, time/batch: 0.072
20085/325000 (epoch: 309), loss: 0.088, time/batch: 0.074
20150/325000 (epoch: 310), loss: 0.079, time/batch: 0.078
20215/325000 (epoch: 311), loss: 0.081, time/batch: 0.070
20280/325000 (epoch: 312), loss: 0.078, time/batch: 0.074
20345/325000 (epoch: 313), loss: 0.073, time/batch: 0.077
20410/325000 (epoch: 314), loss: 0.070, time/batch: 0.071
20475/325000 (epoch: 315), loss: 0.070, time/batch: 0.076
20540/325000 (epoch: 316), loss: 0.072, time/batch: 0.075
20605/325000 (epoch: 317), loss: 0.068, time/batch: 0.072
20670/325000 (epoch: 318), loss: 0.079, time/batch: 0.082
20735/325000 (epoch: 319), loss: 0.073, time/batch: 0.081
20800/325000 (epoch: 320), loss: 0.075, time/batch: 0.077
20865/325000 (epoch: 321), loss: 0.076, time/batch: 0.076
20930/325000 (epoch: 322), loss: 0.068, time/batch: 0.076
20995/325000 (epoch: 323), loss: 0.069, time/batch: 0.100
21060/325000 (epoch: 324), loss: 0.064, time/batch: 0.085
21125/325000 (epoch: 325), loss: 0.064, time/batch: 0.076
21190/325000 (epoch: 326), loss: 0.074, time/batch: 0.079
21255/325000 (epoch: 327), loss: 0.072, time/batch: 0.078
21320/325000 (epoch: 328), loss: 0.069, time/batch: 0.089
21385/325000 (epoch: 329), loss: 0.069, time/batch: 0.091
21450/325000 (epoch: 330), loss: 0.074, time/batch: 0.082
21515/325000 (epoch: 331), loss: 0.073, time/batch: 0.073
21580/325000 (epoch: 332), loss: 0.070, time/batch: 0.113
21645/325000 (epoch: 333), loss: 0.072, time/batch: 0.079
21710/325000 (epoch: 334), loss: 0.067, time/batch: 0.099
21775/325000 (epoch: 335), loss: 0.067, time/batch: 0.094
21840/325000 (epoch: 336), loss: 0.065, time/batch: 0.071
21905/325000 (epoch: 337), loss: 0.063, time/batch: 0.075
21970/325000 (epoch: 338), loss: 0.060, time/batch: 0.078
model saved to data/invisible_dragon/model.ckpt
22035/325000 (epoch: 339), loss: 0.065, time/batch: 0.077
22100/325000 (epoch: 340), loss: 0.065, time/batch: 0.074
22165/325000 (epoch: 341), loss: 0.068, time/batch: 0.078
22230/325000 (epoch: 342), loss: 0.060, time/batch: 0.082
22295/325000 (epoch: 343), loss: 0.057, time/batch: 0.100
22360/325000 (epoch: 344), loss: 0.054, time/batch: 0.078
22425/325000 (epoch: 345), loss: 0.058, time/batch: 0.075
22490/325000 (epoch: 346), loss: 0.062, time/batch: 0.078
22555/325000 (epoch: 347), loss: 0.057, time/batch: 0.076
22620/325000 (epoch: 348), loss: 0.051, time/batch: 0.079
22685/325000 (epoch: 349), loss: 0.046, time/batch: 0.077
22750/325000 (epoch: 350), loss: 0.047, time/batch: 0.071
22815/325000 (epoch: 351), loss: 0.051, time/batch: 0.110
22880/325000 (epoch: 352), loss: 0.049, time/batch: 0.090
22945/325000 (epoch: 353), loss: 0.054, time/batch: 0.072
23010/325000 (epoch: 354), loss: 0.045, time/batch: 0.077
23075/325000 (epoch: 355), loss: 0.048, time/batch: 0.073
23140/325000 (epoch: 356), loss: 0.041, time/batch: 0.074
23205/325000 (epoch: 357), loss: 0.043, time/batch: 0.082
23270/325000 (epoch: 358), loss: 0.044, time/batch: 0.085
23335/325000 (epoch: 359), loss: 0.040, time/batch: 0.083
23400/325000 (epoch: 360), loss: 0.042, time/batch: 0.075
23465/325000 (epoch: 361), loss: 0.043, time/batch: 0.073
23530/325000 (epoch: 362), loss: 0.043, time/batch: 0.078
23595/325000 (epoch: 363), loss: 0.038, time/batch: 0.072
23660/325000 (epoch: 364), loss: 0.042, time/batch: 0.074
23725/325000 (epoch: 365), loss: 0.041, time/batch: 0.077
23790/325000 (epoch: 366), loss: 0.039, time/batch: 0.076
23855/325000 (epoch: 367), loss: 0.043, time/batch: 0.084
23920/325000 (epoch: 368), loss: 0.036, time/batch: 0.083
23985/325000 (epoch: 369), loss: 0.038, time/batch: 0.079
model saved to data/invisible_dragon/model.ckpt
24050/325000 (epoch: 370), loss: 0.042, time/batch: 0.076
24115/325000 (epoch: 371), loss: 0.046, time/batch: 0.078
24180/325000 (epoch: 372), loss: 0.040, time/batch: 0.077
24245/325000 (epoch: 373), loss: 0.040, time/batch: 0.109
24310/325000 (epoch: 374), loss: 0.038, time/batch: 0.085
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
<ipython-input-12-36cc09519909> in <module>()
14 feed = {input_data: x, targets: y, initial_state: state}
15 # Train!
---> 16 train_loss, state, _ = sess.run([cost, final_state, optm], feed)
17 end = time.time()
18 # PRINT
/home/yj/.virtualenvs/lecture/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
787 try:
788 result = self._run(None, fetches, feed_dict, options_ptr,
--> 789 run_metadata_ptr)
790 if run_metadata:
791 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
/home/yj/.virtualenvs/lecture/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
995 if final_fetches or final_targets:
996 results = self._do_run(handle, final_targets, final_fetches,
--> 997 feed_dict_string, options, run_metadata)
998 else:
999 results = []
/home/yj/.virtualenvs/lecture/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1130 if handle is None:
1131 return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1132 target_list, options, run_metadata)
1133 else:
1134 return self._do_call(_prun_fn, self._session, handle, feed_dict,
/home/yj/.virtualenvs/lecture/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_call(self, fn, *args)
1137 def _do_call(self, fn, *args):
1138 try:
-> 1139 return fn(*args)
1140 except errors.OpError as e:
1141 message = compat.as_text(e.message)
/home/yj/.virtualenvs/lecture/local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
1119 return tf_session.TF_Run(session, options,
1120 feed_dict, fetch_list, target_list,
-> 1121 status, run_metadata)
1122
1123 def _prun_fn(session, handle, feed_dict, fetch_list):
KeyboardInterrupt:
In [ ]:
Content source: kiseyno92/SNU_ML
Similar notebooks: