In [1]:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# tensorflow
import tensorflow as tf
# Estimators
from tensorflow.contrib import learn
# Model builder
from tensorflow.contrib.learn.python.learn.estimators import model_fn as model_fn_lib
print (tf.__version__) # tested with v1.1
# Input function
from tensorflow.python.estimator.inputs import numpy_io
# numpy
import numpy as np
# Enable TensorFlow logs
tf.logging.set_verbosity(tf.logging.INFO)
# keras
from tensorflow.contrib.keras.python.keras.preprocessing import sequence
from tensorflow.contrib.keras.python.keras.layers import Embedding, GRU, Dense, SimpleRNN
from tensorflow.contrib.keras.python.keras.layers import Reshape, Activation
# data
from tensorflow.contrib.keras.python.keras.datasets import imdb
# Run an experiment
from tensorflow.contrib.learn.python.learn import learn_runner
1.1.0
In [2]:
# map word to index
word_to_index = imdb.get_word_index()
# map index to word
index_to_word = {}
num_words = 0
for k in word_to_index:
index_to_word[word_to_index[k]] = k
num_words += 1
# turn a sequence into a sentence
def get_sentence(seq):
sentence = ''
for v in seq:
if v != 0: # 0 means it was just added to the sentence so it could have maxlen words
sentence += index_to_word[int(v)] + ' '
return sentence
# turn a sentence into a sequence
def gen_sequence(sentence):
seq = []
for word in sentence:
seq.append(word_to_index[word])
return np.asarray(seq, dtype=np.float32)
print('there are', num_words, 'words in the files')
there are 88584 words in the files
In [3]:
# ------------------- negative
print('-' * 30)
print('Example of a negative review')
print('-' * 30)
x = open('data/train/neg/0_3.txt')
r = x.readline()
print(r)
# ------------------ positive
print()
print('-' * 30)
print('Example of a positive review')
print('-' * 30)
x = open('data/train/pos/0_9.txt')
r = x.readline()
print(r)
------------------------------
Example of a negative review
------------------------------
Story of a man who has unnatural feelings for a pig. Starts out with a opening scene that is a terrific example of absurd comedy. A formal orchestra audience is turned into an insane, violent mob by the crazy chantings of it's singers. Unfortunately it stays absurd the WHOLE time with no general narrative eventually making it just too off putting. Even those from the era should be turned off. The cryptic dialogue would make Shakespeare seem easy to a third grader. On a technical level it's better than you might think with some good cinematography by future great Vilmos Zsigmond. Future stars Sally Kirkland and Frederic Forrest can be seen briefly.
------------------------------
Example of a positive review
------------------------------
Bromwell High is a cartoon comedy. It ran at the same time as some other programs about school life, such as "Teachers". My 35 years in the teaching profession lead me to believe that Bromwell High's satire is much closer to reality than is "Teachers". The scramble to survive financially, the insightful students who can see right through their pathetic teachers' pomp, the pettiness of the whole situation, all remind me of the schools I knew and their students. When I saw the episode in which a student repeatedly tried to burn down the school, I immediately recalled ......... at .......... High. A classic line: INSPECTOR: I'm here to sack one of your teachers. STUDENT: Welcome to Bromwell High. I expect that many adults of my age think that Bromwell High is far fetched. What a pity that it isn't!
In [11]:
print('Loading data')
(x_train, y_train), (x_test, y_test) = imdb.load_data()
# lets make things faster
limit = 3200
maxlen = 200
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print(x_train.shape[0])
limit = x_train.shape[0]
x_train = x_train.astype('float32')
y_train = y_train.astype('int32')
x_test = x_test.astype('float32')
y_test = y_test.astype('int32')
# y to onehot
y_train_one_hot = np.zeros((limit, 2), dtype=np.float32)
for i in range(limit):
y_train_one_hot[i][y_train[i]] = 1
y_test_one_hot = np.zeros((limit, 2), dtype=np.float32)
for i in range(limit):
y_test_one_hot[i][y_test[i]] = 1
#print(y_train)
#print(y_train_one_hot)
Loading data
25000
In [9]:
# parameters
LEARNING_RATE = 0.01
BATCH_SIZE = 25
STEPS = (limit/BATCH_SIZE) * 2 # 2 epochs
# Define the model, using Keras
def model_fn(features, targets, mode, params):
embed = Embedding(num_words, 128)(features['x'])
reshape = Reshape((1,-1))(embed)
gru = GRU(128)(embed)
logits = Dense(2)(gru)
logits_softmax = Activation('softmax')(logits)
# make logits shape the same as the targets: (BATCH_SIZE, 2)
if mode != learn.ModeKeys.PREDICT:
logits = tf.reshape(logits, shape=[BATCH_SIZE, 2])
logits_softmax = tf.reshape(logits, shape=[BATCH_SIZE, 2])
targets = tf.reshape(targets, shape=[BATCH_SIZE, 2])
loss = tf.losses.softmax_cross_entropy(
onehot_labels=targets, logits=logits)
train_op = tf.contrib.layers.optimize_loss(
loss=loss,
global_step=tf.contrib.framework.get_global_step(),
learning_rate=params["learning_rate"],
optimizer="Adam")
predictions = {
"probabilities": tf.nn.softmax(logits)
}
eval_metric_ops = {
"accuracy": tf.metrics.accuracy(
tf.argmax(input=logits_softmax, axis=1),
tf.argmax(input=targets, axis=1))
}
return model_fn_lib.ModelFnOps(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops)
In [10]:
# In[ ]:
# Input functions
# this couldn't possibly be right...
x_train_dict = {'x': x_train }
train_input_fn = numpy_io.numpy_input_fn(
x_train_dict, y_train_one_hot, batch_size=BATCH_SIZE,
shuffle=False, num_epochs=None,
queue_capacity=1000, num_threads=1)
x_test_dict = {'x': x_test }
test_input_fn = numpy_io.numpy_input_fn(
x_test_dict, y_test_one_hot, batch_size=BATCH_SIZE, shuffle=False, num_epochs=1)
# In[ ]:
model_params = {"learning_rate": LEARNING_RATE}
# create estimator
estimator = tf.contrib.learn.Estimator(model_fn=model_fn, params=model_params)
# create experiment
def generate_experiment_fn():
"""
Create an experiment function given hyperparameters.
Returns:
A function (output_dir) -> Experiment where output_dir is a string
representing the location of summaries, checkpoints, and exports.
this function is used by learn_runner to create an Experiment which
executes model code provided in the form of an Estimator and
input functions.
All listed arguments in the outer function are used to create an
Estimator, and input functions (training, evaluation, serving).
Unlisted args are passed through to Experiment.
"""
def _experiment_fn(output_dir):
train_input = train_input_fn
test_input = test_input_fn
return tf.contrib.learn.Experiment(
estimator,
train_input_fn=train_input,
eval_input_fn=test_input,
train_steps=STEPS
)
return _experiment_fn
# run experiment
learn_runner.run(generate_experiment_fn(), '/tmp/outputdir')
INFO:tensorflow:Using default config.
INFO:tensorflow:Using config: {'_cluster_spec': <tensorflow.python.training.server_lib.ClusterSpec object at 0x7f85e43ff828>, '_task_type': None, '_save_checkpoints_steps': None, '_save_summary_steps': 100, '_num_worker_replicas': 0, '_keep_checkpoint_every_n_hours': 10000, '_model_dir': None, '_tf_random_seed': None, '_master': '', '_keep_checkpoint_max': 5, '_num_ps_replicas': 0, '_environment': 'local', '_is_chief': True, '_tf_config': gpu_options {
per_process_gpu_memory_fraction: 1
}
, '_save_checkpoints_secs': 600, '_task_id': 0, '_evaluation_master': ''}
WARNING:tensorflow:Using temporary folder as model directory: /tmp/tmpw8w6bw26
WARNING:tensorflow:From /usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/monitors.py:267: BaseMonitor.__init__ (from tensorflow.contrib.learn.python.learn.monitors) is deprecated and will be removed after 2016-12-05.
Instructions for updating:
Monitors are deprecated. Please use tf.train.SessionRunHook.
INFO:tensorflow:Create CheckpointSaverHook.
INFO:tensorflow:Saving checkpoints for 1 into /tmp/tmpw8w6bw26/model.ckpt.
INFO:tensorflow:loss = 0.694797, step = 1
INFO:tensorflow:Starting evaluation at 2017-06-01-18:19:45
INFO:tensorflow:Restoring parameters from /tmp/tmpw8w6bw26/model.ckpt-1
INFO:tensorflow:Evaluation [1/100]
INFO:tensorflow:Evaluation [2/100]
INFO:tensorflow:Evaluation [3/100]
INFO:tensorflow:Evaluation [4/100]
INFO:tensorflow:Evaluation [5/100]
INFO:tensorflow:Evaluation [6/100]
INFO:tensorflow:Evaluation [7/100]
INFO:tensorflow:Evaluation [8/100]
INFO:tensorflow:Evaluation [9/100]
INFO:tensorflow:Evaluation [10/100]
INFO:tensorflow:Evaluation [11/100]
INFO:tensorflow:Evaluation [12/100]
INFO:tensorflow:Evaluation [13/100]
INFO:tensorflow:Evaluation [14/100]
INFO:tensorflow:Evaluation [15/100]
INFO:tensorflow:Evaluation [16/100]
INFO:tensorflow:Evaluation [17/100]
INFO:tensorflow:Evaluation [18/100]
INFO:tensorflow:Evaluation [19/100]
INFO:tensorflow:Evaluation [20/100]
INFO:tensorflow:Evaluation [21/100]
INFO:tensorflow:Evaluation [22/100]
INFO:tensorflow:Evaluation [23/100]
INFO:tensorflow:Evaluation [24/100]
INFO:tensorflow:Evaluation [25/100]
INFO:tensorflow:Evaluation [26/100]
INFO:tensorflow:Evaluation [27/100]
INFO:tensorflow:Evaluation [28/100]
INFO:tensorflow:Evaluation [29/100]
INFO:tensorflow:Evaluation [30/100]
INFO:tensorflow:Evaluation [31/100]
INFO:tensorflow:Evaluation [32/100]
INFO:tensorflow:Evaluation [33/100]
INFO:tensorflow:Evaluation [34/100]
INFO:tensorflow:Evaluation [35/100]
INFO:tensorflow:Evaluation [36/100]
INFO:tensorflow:Evaluation [37/100]
INFO:tensorflow:Evaluation [38/100]
INFO:tensorflow:Evaluation [39/100]
INFO:tensorflow:Evaluation [40/100]
INFO:tensorflow:Evaluation [41/100]
INFO:tensorflow:Evaluation [42/100]
INFO:tensorflow:Evaluation [43/100]
INFO:tensorflow:Evaluation [44/100]
INFO:tensorflow:Evaluation [45/100]
INFO:tensorflow:Evaluation [46/100]
INFO:tensorflow:Evaluation [47/100]
INFO:tensorflow:Evaluation [48/100]
INFO:tensorflow:Evaluation [49/100]
INFO:tensorflow:Evaluation [50/100]
INFO:tensorflow:Evaluation [51/100]
INFO:tensorflow:Evaluation [52/100]
INFO:tensorflow:Evaluation [53/100]
INFO:tensorflow:Evaluation [54/100]
INFO:tensorflow:Evaluation [55/100]
INFO:tensorflow:Evaluation [56/100]
INFO:tensorflow:Evaluation [57/100]
INFO:tensorflow:Evaluation [58/100]
INFO:tensorflow:Evaluation [59/100]
INFO:tensorflow:Evaluation [60/100]
INFO:tensorflow:Evaluation [61/100]
INFO:tensorflow:Evaluation [62/100]
INFO:tensorflow:Evaluation [63/100]
INFO:tensorflow:Evaluation [64/100]
INFO:tensorflow:Evaluation [65/100]
INFO:tensorflow:Evaluation [66/100]
INFO:tensorflow:Evaluation [67/100]
INFO:tensorflow:Evaluation [68/100]
INFO:tensorflow:Evaluation [69/100]
INFO:tensorflow:Evaluation [70/100]
INFO:tensorflow:Evaluation [71/100]
INFO:tensorflow:Evaluation [72/100]
INFO:tensorflow:Evaluation [73/100]
INFO:tensorflow:Evaluation [74/100]
INFO:tensorflow:Evaluation [75/100]
INFO:tensorflow:Evaluation [76/100]
INFO:tensorflow:Evaluation [77/100]
INFO:tensorflow:Evaluation [78/100]
INFO:tensorflow:Evaluation [79/100]
INFO:tensorflow:Evaluation [80/100]
INFO:tensorflow:Evaluation [81/100]
INFO:tensorflow:Evaluation [82/100]
INFO:tensorflow:Evaluation [83/100]
INFO:tensorflow:Evaluation [84/100]
INFO:tensorflow:Evaluation [85/100]
INFO:tensorflow:Evaluation [86/100]
INFO:tensorflow:Evaluation [87/100]
INFO:tensorflow:Evaluation [88/100]
INFO:tensorflow:Evaluation [89/100]
INFO:tensorflow:Evaluation [90/100]
INFO:tensorflow:Evaluation [91/100]
INFO:tensorflow:Evaluation [92/100]
INFO:tensorflow:Evaluation [93/100]
INFO:tensorflow:Evaluation [94/100]
INFO:tensorflow:Evaluation [95/100]
INFO:tensorflow:Evaluation [96/100]
INFO:tensorflow:Evaluation [97/100]
INFO:tensorflow:Evaluation [98/100]
INFO:tensorflow:Evaluation [99/100]
INFO:tensorflow:Evaluation [100/100]
INFO:tensorflow:Finished evaluation at 2017-06-01-18:19:55
INFO:tensorflow:Saving dict for global step 1: accuracy = 0.500156, global_step = 1, loss = 0.701048
WARNING:tensorflow:Skipping summary for global_step, must be a float or np.float32.
INFO:tensorflow:Validation (step 1): accuracy = 0.500156, loss = 0.701048, global_step = 1
---------------------------------------------------------------------------
InvalidArgumentError Traceback (most recent call last)
/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1038 try:
-> 1039 return fn(*args)
1040 except errors.OpError as e:
/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
1020 feed_dict, fetch_list, target_list,
-> 1021 status, run_metadata)
1022
/usr/lib/python3.4/contextlib.py in __exit__(self, type, value, traceback)
65 try:
---> 66 next(self.gen)
67 except StopIteration:
/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/errors_impl.py in raise_exception_on_not_ok_status()
465 compat.as_text(pywrap_tensorflow.TF_Message(status)),
--> 466 pywrap_tensorflow.TF_GetCode(status))
467 finally:
InvalidArgumentError: indices[53,143] = 88584 is not in [0, 88584)
[[Node: embedding_1/Gather = Gather[Tindices=DT_INT32, Tparams=DT_FLOAT, validate_indices=true, _device="/job:localhost/replica:0/task:0/cpu:0"](embedding_1/embeddings/read, embedding_1/Cast)]]
During handling of the above exception, another exception occurred:
InvalidArgumentError Traceback (most recent call last)
<ipython-input-10-1b088922882b> in <module>()
54
55 # run experiment
---> 56 learn_runner.run(generate_experiment_fn(), '/tmp/outputdir')
/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/learn_runner.py in run(experiment_fn, output_dir, schedule)
109 schedule = schedule or _get_default_schedule(config)
110
--> 111 return _execute_schedule(experiment, schedule)
112
113
/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/learn_runner.py in _execute_schedule(experiment, schedule)
44 logging.error('Allowed values for this experiment are: %s', valid_tasks)
45 raise TypeError('Schedule references non-callable member %s' % schedule)
---> 46 return task()
47
48
/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/experiment.py in train_and_evaluate(self)
429 name=eval_dir_suffix, hooks=self._eval_hooks
430 )]
--> 431 self.train(delay_secs=0)
432
433 eval_result = self._estimator.evaluate(input_fn=self._eval_input_fn,
/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/experiment.py in train(self, delay_secs)
228 return self._estimator.fit(input_fn=self._train_input_fn,
229 max_steps=self._train_steps,
--> 230 monitors=self._train_monitors + extra_hooks)
231
232 def evaluate(self, delay_secs=None):
/usr/local/lib/python3.4/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
279 _call_location(), decorator_utils.get_qualified_name(func),
280 func.__module__, arg_name, date, instructions)
--> 281 return func(*args, **kwargs)
282 new_func.__doc__ = _add_deprecated_arg_notice_to_docstring(
283 func.__doc__, date, instructions)
/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/estimators/estimator.py in fit(self, x, y, input_fn, steps, batch_size, monitors, max_steps)
428 hooks.append(basic_session_run_hooks.StopAtStepHook(steps, max_steps))
429
--> 430 loss = self._train_model(input_fn=input_fn, hooks=hooks)
431 logging.info('Loss for final step: %s.', loss)
432 return self
/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/estimators/estimator.py in _train_model(self, input_fn, hooks)
976 loss = None
977 while not mon_sess.should_stop():
--> 978 _, loss = mon_sess.run([model_fn_ops.train_op, model_fn_ops.loss])
979 summary_io.SummaryWriterCache.clear()
980 return loss
/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py in run(self, fetches, feed_dict, options, run_metadata)
482 feed_dict=feed_dict,
483 options=options,
--> 484 run_metadata=run_metadata)
485
486 def should_stop(self):
/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py in run(self, fetches, feed_dict, options, run_metadata)
818 feed_dict=feed_dict,
819 options=options,
--> 820 run_metadata=run_metadata)
821 except _PREEMPTION_ERRORS as e:
822 logging.info('An error was raised. This may be due to a preemption in '
/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py in run(self, *args, **kwargs)
774
775 def run(self, *args, **kwargs):
--> 776 return self._sess.run(*args, **kwargs)
777
778
/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py in run(self, fetches, feed_dict, options, run_metadata)
928 feed_dict=feed_dict,
929 options=options,
--> 930 run_metadata=run_metadata)
931
932 for hook in self._hooks:
/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py in run(self, *args, **kwargs)
774
775 def run(self, *args, **kwargs):
--> 776 return self._sess.run(*args, **kwargs)
777
778
/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
776 try:
777 result = self._run(None, fetches, feed_dict, options_ptr,
--> 778 run_metadata_ptr)
779 if run_metadata:
780 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
980 if final_fetches or final_targets:
981 results = self._do_run(handle, final_targets, final_fetches,
--> 982 feed_dict_string, options, run_metadata)
983 else:
984 results = []
/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
1030 if handle is None:
1031 return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1032 target_list, options, run_metadata)
1033 else:
1034 return self._do_call(_prun_fn, self._session, handle, feed_dict,
/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
1050 except KeyError:
1051 pass
-> 1052 raise type(e)(node_def, op, message)
1053
1054 def _extend_graph(self):
InvalidArgumentError: indices[53,143] = 88584 is not in [0, 88584)
[[Node: embedding_1/Gather = Gather[Tindices=DT_INT32, Tparams=DT_FLOAT, validate_indices=true, _device="/job:localhost/replica:0/task:0/cpu:0"](embedding_1/embeddings/read, embedding_1/Cast)]]
Caused by op 'embedding_1/Gather', defined at:
File "/usr/lib/python3.4/runpy.py", line 170, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib/python3.4/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/local/lib/python3.4/dist-packages/ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "/usr/local/lib/python3.4/dist-packages/traitlets/config/application.py", line 658, in launch_instance
app.start()
File "/usr/local/lib/python3.4/dist-packages/ipykernel/kernelapp.py", line 477, in start
ioloop.IOLoop.instance().start()
File "/usr/local/lib/python3.4/dist-packages/zmq/eventloop/ioloop.py", line 177, in start
super(ZMQIOLoop, self).start()
File "/usr/local/lib/python3.4/dist-packages/tornado/ioloop.py", line 888, in start
handler_func(fd_obj, events)
File "/usr/local/lib/python3.4/dist-packages/tornado/stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/zmq/eventloop/zmqstream.py", line 440, in _handle_events
self._handle_recv()
File "/usr/local/lib/python3.4/dist-packages/zmq/eventloop/zmqstream.py", line 472, in _handle_recv
self._run_callback(callback, msg)
File "/usr/local/lib/python3.4/dist-packages/zmq/eventloop/zmqstream.py", line 414, in _run_callback
callback(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/tornado/stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/ipykernel/kernelbase.py", line 283, in dispatcher
return self.dispatch_shell(stream, msg)
File "/usr/local/lib/python3.4/dist-packages/ipykernel/kernelbase.py", line 235, in dispatch_shell
handler(stream, idents, msg)
File "/usr/local/lib/python3.4/dist-packages/ipykernel/kernelbase.py", line 399, in execute_request
user_expressions, allow_stdin)
File "/usr/local/lib/python3.4/dist-packages/ipykernel/ipkernel.py", line 196, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/usr/local/lib/python3.4/dist-packages/ipykernel/zmqshell.py", line 533, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py", line 2683, in run_cell
interactivity=interactivity, compiler=compiler, result=result)
File "/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py", line 2793, in run_ast_nodes
if self.run_code(code, result):
File "/usr/local/lib/python3.4/dist-packages/IPython/core/interactiveshell.py", line 2847, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-10-1b088922882b>", line 56, in <module>
learn_runner.run(generate_experiment_fn(), '/tmp/outputdir')
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/learn_runner.py", line 111, in run
return _execute_schedule(experiment, schedule)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/learn_runner.py", line 46, in _execute_schedule
return task()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/experiment.py", line 431, in train_and_evaluate
self.train(delay_secs=0)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/experiment.py", line 230, in train
monitors=self._train_monitors + extra_hooks)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/util/deprecation.py", line 281, in new_func
return func(*args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/estimators/estimator.py", line 430, in fit
loss = self._train_model(input_fn=input_fn, hooks=hooks)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/estimators/estimator.py", line 927, in _train_model
model_fn_ops = self._get_train_ops(features, labels)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/estimators/estimator.py", line 1132, in _get_train_ops
return self._call_model_fn(features, labels, model_fn_lib.ModeKeys.TRAIN)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/learn/python/learn/estimators/estimator.py", line 1103, in _call_model_fn
model_fn_results = self._model_fn(features, labels, **kwargs)
File "<ipython-input-9-5a93a00d7e9b>", line 9, in model_fn
embed = Embedding(num_words, 128)(features['x'])
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/keras/python/keras/engine/topology.py", line 578, in __call__
output = self.call(inputs, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/keras/python/keras/layers/embeddings.py", line 144, in call
out = K.gather(self.embeddings, inputs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/contrib/keras/python/keras/backend.py", line 1161, in gather
return array_ops.gather(reference, indices)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/gen_array_ops.py", line 1207, in gather
validate_indices=validate_indices, name=name)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/op_def_library.py", line 768, in apply_op
op_def=op_def)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/ops.py", line 2336, in create_op
original_op=self._default_original_op, op_def=op_def)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/ops.py", line 1228, in __init__
self._traceback = _extract_stack()
InvalidArgumentError (see above for traceback): indices[53,143] = 88584 is not in [0, 88584)
[[Node: embedding_1/Gather = Gather[Tindices=DT_INT32, Tparams=DT_FLOAT, validate_indices=true, _device="/job:localhost/replica:0/task:0/cpu:0"](embedding_1/embeddings/read, embedding_1/Cast)]]
In [205]:
# generate predictions
preds = list(estimator.predict(input_fn=test_input_fn))
In [224]:
# number of outputs we want to see the prediction
NUM_EVAL = 10
def check_prediction(x, y, p, index):
print('prediction:', np.argmax(p[index]))
print('target:', np.argmax(y[index]))
print('sentence:', get_sentence(x[index]))
for i in range(NUM_EVAL):
index = np.random.randint(limit)
print('test:', index)
print('-' * 30)
print(np.asarray(x_test[index], dtype=np.int32))
check_prediction(x_test, y_test_one_hot, preds, index)
print()
test: 3022
------------------------------
[ 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 1 31 7 4 249 108 13 28 126
110 21 576 163 24 23 1288 151 175 136 15 1367
233 8 81 19 8506 883 229 42 116 9 913 4621
56 10 10 13 386 14 22 18 32 7446 9315 38
78 15 25 144 5566 83 2129 33 49 213 11 4
22 82 67 4550 12556 4 23454 18 4 172 282 10
10 259 334 798 14 22 40 4 3196 549 18 451
7 503 102 7 265 10 10 358]
prediction: 0
target: 0
sentence: the by br of sure many was one your life not told makes his are thinks old us scenes for places last in people film identified reading guy it's love it weird gangsters she i i was wonderful as you but an october hispanic her do for have real daughter's first delivered they good come this of you other can september endurance of state's but of every everything i i especially fan typical as you just of kurt type but beginning br child characters br screen i i use
test: 2210
------------------------------
[ 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
1 13 92 124 51 49 84 71 536 54 36 301
14 20 16 78 12 16 87 356 1478 4551 422 12
16 364 352 5 4 318 962 1177 14 21 15 9
24 51 25 67 6 1478 20 18 25 106 12 18
1478 82 2901 16 321 13 258 14 20 642 5 441
13 131 4080 56 54 13 2283 1478 23 15 4998 44554
150 13 80 974 14 20 9 24 18 316 48 25
92 40 503 102 25 242 528 40 14 31 48 25
17990 194 352 302 5 156 7010 788 21 48 25 40
3688 1231 5 125 4 1513 920 924 1847 14 20 9
18 25 10 10 11340 4 711 896]
prediction: 0
target: 1
sentence: the was then does when good great than girls no from takes as on with do that with him need hasn't inferior title that with along wrong to of excellent personal producers as not for it his when have can is hasn't on but have character that but hasn't other regard with shot was although as on started to overall was these occasion she no was winner hasn't are for spain thriller's years was into appear as on it his but seeing what have then just child characters have away parts just as by what have olympics thought wrong instead to before dumped elements not what have just subsequent values to better of jerry realize writers players as on it but have i i 1928 of easily imdb
test: 2456
------------------------------
[ 49 84 203 135 15 241 16024 2957 99 522 8 15
1614 2425 190 13 81 24 104 40 15 88 15 13131
16032 4 20 8 97 6 53 1614 4762 4085 23 4
904 19 53 46459 5 1663 305 7 4 578 2097 7
5614 5 8511 63 25861 85 102 19 729 537 10 10
23 4 105 39 241 16024 75 70 169 1807 39 4
3955 438 39 167 5 2979 308 5701 1677 3579 102 8
4 1629 189 108 4 307 2549 4 4116 5155 4 14654
525 12 62 28 77 55 776 8 97 98 413 11
4086 5249 4786 34 68 5239 11 4 881 21 12 9
142 5481 15 4 881 10022 4 2118 8 97 98 147
84 19 3084 712 63 11 53 42 329 2461 101 415
70 169 23 27 41 1320 8 8690 10 10 241 16024
9 35 321 20 625 64 1854 9 15 4 277 764
6 227 3889 21 63 20999 15 19 6 176 7 1123
791 12 80 30 55 221 8 67 375 3451 237 29
47 69 6 87 2016 19 14 20]
prediction: 0
target: 1
sentence: good great action why for am venturing risk movies etc in for therefore promising take was people his two just for most for clinical infernal of on in could is up therefore nuts aid are of directors film up patricide to sleep john br of living jennifer br insipid to bliss really barbet because characters film sets obviously i i are of films or am venturing bad well same dressed or of conversations entertaining or going to attacked audience rhythm desperate returning characters in of listen fact many of version occasional of official subjected of bearded won't that story one will time actual in could any lost this cary ross legal who were filling this of dr not that it back lip for of dr researched of leader in could any now great film spin words really this up it's read plans think piece well same are be about mistake in emmy i i am venturing it so shot on david see knowledge it for of once red is far recognized not really timone for film is quite br biggest follow that into at time done in can couple korean he's all there me is him sea film as on
test: 2508
------------------------------
[ 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 1 1294 558 18 17279 5 4 312 2125 7
45 702 930 60 19 15 13 426 870 143 101 53
7 15 4995 54 36 385 638 4 154 1395 2254 18
11095 13 520 56 5 15243 12 9 14 4 118 15
6196 1308 70 216 56 19 18 2324 3527 89 44 49
154 2239 305 4 326 7 6 24199 318 1929 4814 16
78 195 12 186 40 36 473 8 607 278 34 260
285 814 46 11 4 1411 51 53 70 13 135 12
16 38 373 15 13 92 104 13 70 216 56 19
195 411 8 7397 18 834 8 733 12 21 12 272
40 31 53 347 80 81 12 607 129 58 387 584
129 278 23 14 912 7 6 22]
prediction: 0
target: 0
sentence: the unbelievable moment but somebody's to of during awards br if future deep which film for was against expected i'm think up br for pink no from moments light of work expectations choose but shuttle was eyes she to curator that it as of where for joker journey well saw she film but substance ego don't has good work produce john of less br is maidens excellent kinda attorney with do that's that horror just from children in looked sense who our dvd means some this of bother when up well was why that with her tell for was then two was well saw she film that's dialogue in mins but dance in mystery that not that different just by up top into people that looked man my episode including man sense are as male br is you
test: 687
------------------------------
[ 1674 11 61 523 7339 8925 6 389 284 61 205 559
1131 639 12 1932 1578 72 7 111 3080 13 28 2051
11 61 113 38 13 901 14 17 6 6493 8 185
5 154 57 31 9 12224 8 845 9 11 1130 18
178 10 10 13 784 8 135 15 341 9 170 8
593 21 14 65 16 4 86 58 13 219 341 17
24 6 78 155 21 6 173 7 113 16 179 1227
11 14 65 13 191 339 12 12 16 6 113 2546
561 18 72 5 80 40691 30 10 10 5 18 15
10 10 13 119 25 1240 15126 5 13 131 377 76
7 129 157 11 113 56 366 4 251 25 1131 5
129 14375 5582 123 5 1294 25 1620 15126 18 1895 4
23100 1239 1202 13 81 1294 25 32 7 129 1001 3010
11816 5 30276 10 10 4 868 9 43 6 868 5
6 123 9 43 6 123 21 1546 80 987 1236 15
1437 4 182 80 124 10 10 1294 25 10 10 5
18 32 37 824 72 837 1236 837]
prediction: 0
target: 1
sentence: rip this only directed gigantic percent is small shows only right took effective hilarious that cage inspired we br plot overcome was one window this only acting her was situation as movie is immortal in got to work even by it hardship in lee it this consider but want i i was tale in why for home it part in ago not as their with of how my was least home movie his is do 10 not is lot br acting with world steve this as their was big 3 that that with is acting princess writer but we to into likeability at i i to but for i i was did have yeah feud to was these start get br man another this acting she friends of hard have effective to man gigi overlong ever to unbelievable have ready feud but japan of that'd grade shooting was people unbelievable have an br man inside ashamed anatomy to presson i i of who's it out is who's to is ever it out is ever not flying into pay alive for victim of young into does i i unbelievable have i i to but an like tom we third alive third
test: 3171
------------------------------
[ 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 1 4 1603 1315 9 6 2723
212 368 7 105 1075 11 18899 7 6 147 65 63
716 6 185 26897 1475 7455 37 304 6 292 8248 35
1603 1315 5 461 18916 11 6 588 736 7023 48 25
70 683 12 15 14 364 6827 106 1367 57 836 57
3487 57 453 57 206 3669 171 163 388 6 8419 7
1006 5 220 57 883 4 1603 1315 9 5084 2441 2582
1644 18 64 4 91 14949 5721 17924]
prediction: 0
target: 0
sentence: the of nasty wow it is fabulous must truly br films convincing this lizards br is now their really local is got primed bruce brent like beautiful is together frat so nasty wow to friend prosthetics this is please appears followers what have well single that for as along mythology character places even brought even homage even lives even without documentaries again makes understand is mercifully br twist to family even reading of nasty wow it confess display forest martial but see of its inimitable representation teenaged
test: 2408
------------------------------
[12614 34 4 4927 9338 17 73 17 112 12868 34 4
10154 7 4 719 10888 1333 4467 7 41 2813 5150 8
1183 11 9 7415 33 112 88 7 27 2920 5014 15
1212 12 203 82 28 8 81 19 90 112 6 28061
6746 21 141 26 5965 1212 151 13 2230 2913 196 159
36 71 6272 33 4 130 7 4 251 19039 16 269
8 79 1404 11 113 717 1126 439 9 397 4 65561
33 4 719 8894 1333 8 40 41 3285 5288 336 38
15 59 70 79 6 1304 19 4 24112 31360 428 554
42 38 13 8676 10 10 553 11 17441 94 862 398
913 3757 19 835 270 1328 5 7055 6 176 7 676
849 3600 48 25 71 502 8 870 143 12 8 45
1174 12 66 9 6 26683 50 9 15 888 951 151
7 138 122 38 111 7 4 204 156 24 994 17
3630 8 112 2959 34 3185 37 23 2300 33 222 610
68 9538 279 36 43 1173 1803 42 382 13 1781 36
165 332 4 229 1231 8 4 204]
prediction: 0
target: 0
sentence: erica who of performs brits movie much movie never hiking who of infectious br of entertainment democratic blue pretending br about cabin tube in situations this it rice they never most br be amongst whoopi for cult that action other one in people film made never is subverting arranged not should he dylan cult old was fill bat both new from than confront they of here br of hard ferocious with looks in also stands this acting sequence runs she's it often of hoi they of entertainment expresses blue in just about gag teaches help her for would well also is terrific film of unfit hallam's picture heard it's her was diaz i i killed this panther make zombie keep weird providing film imagine place g to israeli is quite br turned de achievement what have than turns in expected i'm that in if showed that had it is dark' more it for 20 cheesy old br such off her plot br of i've before his fighting movie shortly in never sold who suggests like are strongly they there's song were snippets reason from out win johnny it's came was twenty from look you're of guy values in of i've
test: 2095
------------------------------
[ 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 1 1604 3214 558 13 3117 260 8 106 14 22
689 211 11 61 2481 313 5 16 12 2150 6 65
7 383 5 1865 5 55 706 5639 116 894 25 26
33 13774 112 579 8 106 14 798 12 32350 2673 43
2085 29 566 511 26339 32805 82 520 4 91 2150 116
126 11 101 7 27 108 50 16 24 31 173 7
14 22 93 72 462 42 97 72 1783 42 235 101
1426 13 62 30 770 84 165 510 14 13 28 306
49 2150 108 11 61 113 21 14 62 30 11 61
677 249 108 126 4 228 11 12 286 52 5 4
769 13 104 16 93 56 34 6 378 7 493 37
5175 6 7757 5 43 1829 180 1040 747 3985 757 5
95 93 12 83 6 22 427 2150]
prediction: 0
target: 0
sentence: the arthur assistant moment was technique our in character as you attention gets this only latest everyone to with that commercial is their br recommend to kate to time 8 firing love superb have he they cylon never coming in character as typical that florida's alice out ages all hit favorite lives' vests other eyes of its commercial love your this think br be many more with his by lot br as you way we dark it's could we wanting it's might think fascinating was story at named great look town as was one himself good commercial many this only acting not as story at this only strange sure many your of making this that three very to of using was two with way she who is stars br behind like griffith is lo to out superman things cold theater kidding mention to them way that first is you boy commercial
test: 1266
------------------------------
[ 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 1 689 183 3704 72 17 8 14 22
4633 12 381 38280 10303 7 1063 2246 37 9 210 6
1742 8 106 12 944 530 162 9109 524 5182 12751 12
9 814 11 9058 19 4 162 9109 22 17150 65044 6
464 1178 12 8 72 190 13 16 1254 685 4 226
769 9 1755 5 2732 19 55 117 3473 116 9 478
21 5182 9 472 467 343 4 85 156 5 1507 26
32 864 21 13 244 7745 19 98 32 1212 39 4
327 26406 63 26 20674 469 4 20 5 10303 5 5182
14 22 218 55 52 38 4 1515 7 4 65 9
92 106 12 894 25 66 181 8]
prediction: 0
target: 0
sentence: the attention seems 50's we movie in as you greedy that mean illuminator sybil br members sheriff like it point is x in character that mess themselves actually archer horrible masks idyllic that it means this pond film of actually archer you lynchian gourmet is under culture that in we take was with fair due of script using it grand to summary film time over canada love it sound not masks it 4 short of because before to system he an believable not was rather crooks film any an cult or of mind enhancements really he winch unfortunately of on to sybil to masks as you interesting time very her of creative br of their it then character that superb have had pretty in
test: 1405
------------------------------
[ 1041 14 1253 7 43370 4 9 55 52 849 210 31
7 61 1640 12344 537 13 16 3786 15 12 238 30
44 58115 73 12 16 6 117 227 21 64 6 117
227 5 4 130 4 959 16 1562 441 628 22887 1999
8 30 73 15731 19492 17 12344 5 17 409 28 3381
46 4 85 323 7 14 123 9 8268 33269 37 1348
100 24 30 26808 17 717 24671 13 82 66 510 11739
37 16 1466 23 17 4 2410 84728 11679 21 66 4
973 16 179 9197 469 10 10 31 213 9 5290 125
18 4 24 112 502 8 4974 82015 99 111 9400 83
4 477 3496 234 4 970 19 4 80 5 313 7945
16 32 6 227 1744 261 13 161 330 89 36 19518
56 4 2438 7 4 223 11 4 274 4 223 2840
9 66 179 2732 4 477 388 54 4 2748 9 2029
190 26 66 966 73 224 5 13 258 4 55 130
54 36 32 563 179 1301 14 9 66 31 7 4
55 118 7 4 12344 201 38 230]
prediction: 0
target: 1
sentence: plain as ability br postpone of it time very de point by br only asks persistent obviously was with craig for that 2 at has necking much that with is over far not see is over far to of here of effect with ground overall english deneuve we've in at much borne pitying movie persistent to movie live one tea some of because idea br as ever it cohesive fruity like justice after his at debauchery movie sequence stabbings was other had town veronika like with henry are movie of witness padarouski simulated not had of powerful with world sailors unfortunately i i by come it loneliness better but of his never turns in expressed unidimensional movies plot downtown first of amazing brilliance since of casting film of into to everyone outrageously with an is far returns believe was nothing second don't from zion she of forgettable br of whole this of ending of whole children's it had world summary of amazing understand no of lisa it remembered take he had monster much bit to was although of time here no from an cannot world date as it had by br of time where br of persistent original her anything
In [ ]:
Content source: mari-linhares/tensorflow-workshop
Similar notebooks: