In [2]:
import tarfile
import re
import urllib.request
import os
import random

class ImdbMovieReviews:
    """
    The movie review dataset is offered by Stanford University’s AI department:
    http://ai.stanford.edu/~amaas/data/sentiment/. It comes as a compressed  tar  archive where
    positive and negative reviews can be found as text files in two according folders. We apply
    the same pre-processing to the text as in the last section: Extracting plain words using a
    regular expression and converting to lower case.
    """
    DEFAULT_URL = \
        'http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz'
    TOKEN_REGEX = re.compile(r'[A-Za-z]+|[!?.:,()]')
    
    def __init__(self):
        self._cache_dir = './imdb'
        self._url = 'http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz'
        
        if not os.path.isfile(self._cache_dir):
            urllib.request.urlretrieve(self._url, self._cache_dir)
        self.filepath = self._cache_dir

    def __iter__(self):
        with tarfile.open(self.filepath) as archive:
            items = archive.getnames()
            for filename in archive.getnames():
                if filename.startswith('aclImdb/train/pos/'):
                    yield self._read(archive, filename), True
                elif filename.startswith('aclImdb/train/neg/'):
                    yield self._read(archive, filename), False
                    
    def _read(self, archive, filename):
        with archive.extractfile(filename) as file_:
            data = file_.read().decode('utf-8')
            data = type(self).TOKEN_REGEX.findall(data)
            data = [x.lower() for x in data]
            return data

In [3]:
import numpy as np
# Spacy is my favourite nlp framework, which havu builtin word embeddings trains on wikipesia
from spacy.en import English

class Embedding:
    
    def __init__(self, length):
#          spaCy makes using word vectors very easy. 
#             The Lexeme , Token , Span  and Doc  classes all have a .vector property,
#             which is a 1-dimensional numpy array of 32-bit floats:
        self.parser = English()
        self._length = length
        self.dimensions = 300
        
    def __call__(self, sequence):
        data = np.zeros((self._length, self.dimensions))
        # you can access known words from the parser's vocabulary
        embedded = [self.parser.vocab[w].vector for w in sequence]
        data[:len(sequence)] = embedded
        return data

In [23]:
from lazy import lazy

class SequenceClassificationModel:
    def __init__(self, data, params):
        self.params = params
        self._create_placeholders()
        self.prediction
        self.cost
        self.error
        self.optimize
        self.global_step = 0
        self._create_summaries()
        self.sess = tf.Session()
        self.sess.run(tf.global_variables_initializer())
    
    def _create_placeholders(self):
        with tf.name_scope("data"):
            self.data = tf.placeholder(tf.float32, [None, self.params.seq_length, self.params.embed_length])
            self.target = tf.placeholder(tf.float32, [None, 2])
  
    def _create_summaries(self):
        with tf.name_scope("summaries"):
            tf.summary.scalar('loss', self.cost)
            tf.summary.scalar('erroe', self.error)
            self.summary = tf.summary.merge_all()
            saver = tf.train.Saver()
            
    @lazy
    def length(self):
        with tf.name_scope("seq_length"):
            used = tf.sign(tf.reduce_max(tf.abs(self.data), reduction_indices=2))
            length = tf.reduce_sum(used, reduction_indices=1)
            length = tf.cast(length, tf.int32)
        return length
    
    @lazy
    def prediction(self):
        with tf.name_scope("recurrent_layer"):
            output, _ = tf.nn.dynamic_rnn(
                self.params.rnn_cell(self.params.rnn_hidden),
                self.data,
                dtype=tf.float32,
                sequence_length=self.length
            )
        last = self._last_relevant(output, self.length)

        with tf.name_scope("softmax_layer"):
            num_classes = int(self.target.get_shape()[1])
            weight = tf.Variable(tf.truncated_normal(
                [self.params.rnn_hidden, num_classes], stddev=0.01))
            bias = tf.Variable(tf.constant(0.1, shape=[num_classes]))
            prediction = tf.nn.softmax(tf.matmul(last, weight) + bias)
        return prediction
    
    @lazy
    def cost(self):
        cross_entropy = -tf.reduce_sum(self.target * tf.log(self.prediction))
        return cross_entropy
    
    @lazy
    def error(self):
        self.mistakes = tf.not_equal(
            tf.argmax(self.target, 1), tf.argmax(self.prediction, 1))
        return tf.reduce_mean(tf.cast(self.mistakes, tf.float32))
    
    @lazy
    def optimize(self):
        with tf.name_scope("optimization"):
            gradient = self.params.optimizer.compute_gradients(self.cost)
            if self.params.gradient_clipping:
                limit = self.params.gradient_clipping
                gradient = [
                    (tf.clip_by_value(g, -limit, limit), v)
                    if g is not None else (None, v)
                    for g, v in gradient]
            optimize = self.params.optimizer.apply_gradients(gradient)
        return optimize
    
    @staticmethod
    def _last_relevant(output, length):
        with tf.name_scope("last_relevant"):
            # As of now, TensorFlow only supports indexing along the first dimension, using
            # tf.gather() . We thus flatten the first two dimensions of the output activations from their
            # shape of  sequences x time_steps x word_vectors  and construct an index into this resulting tensor.
            batch_size = tf.shape(output)[0]
            max_length = int(output.get_shape()[1])
            output_size = int(output.get_shape()[2])

            # The index takes into account the start indices for each sequence in the flat tensor and adds
            # the sequence length to it. Actually, we only add  length - 1  so that we select the last valid
            # time step.
            index = tf.range(0, batch_size) * max_length + (length - 1)
            flat = tf.reshape(output, [-1, output_size])
            relevant = tf.gather(flat, index)
        return relevant
    
    def train(self, batches, save_prefix, save_every=10):
        saver = tf.train.Saver()
        if os.path.isdir('./saved/'):
            saver.restore(self.sess, tf.train.latest_checkpoint('./saved/'))
        else:
            os.makedirs('saved')
        summary_writer = tf.summary.FileWriter('graphs/run{}'.format(self.global_step), self.sess.graph)
        self.global_step += 1
        for index, batch in enumerate(batches):
            feed = {model.data: batch[0], model.target: batch[1]}
            error, _, summary_str = self.sess.run([model.error, model.optimize, model.summary], feed)
            print('{}: {:3.1f}%'.format(index + 1, 100 * error))
            if index % save_every == 0:
                summary_writer.add_summary(summary_str, index)
                summary_writer.flush()
            if index % save_every == 0:
                save_path = os.path.join('checkpoints', save_prefix)
                print('saving...', save_path)
                saver.save(self.sess, save_path, global_step=index)
        saver.save(self.sess, os.path.join('checkpoints', save_prefix + '_final'))

    def predict_proba(self, data):
        feed = {model.data: data, }
        prediction = self.sess.run([model.prediction], feed)        
        return prediction
        
    def close(self):
        tf.reset_default_graph()
        self.session.close()

In [7]:
def preprocess_batched(iterator, length, embedding, batch_size):
    iterator = iter(iterator)
    while True:
        data = np.zeros((batch_size, length, embedding.dimensions))
        target = np.zeros((batch_size, 2))
        for index in range(batch_size):
            text, label = next(iterator)
            data[index] = embedding(text)
            target[index] = [1, 0] if label else [0, 1]
        yield data, target

In [8]:
reviews = list(ImdbMovieReviews())

In [9]:
random.shuffle(reviews)

In [10]:
length = max(len(x[0]) for x in reviews)
embedding = Embedding(length)

In [11]:
from attrdict import AttrDict

params = AttrDict(
    rnn_cell=tf.contrib.rnn.GRUCell,
    rnn_hidden=300,
    optimizer=tf.train.RMSPropOptimizer(0.002),
    batch_size=20,
    gradient_clipping=100,
    seq_length=length,
    embed_length=embedding.dimensions
)

In [16]:
batches = preprocess_batched(reviews, length, embedding, params.batch_size)

In [24]:
tf.reset_default_graph()

model = SequenceClassificationModel(data, params)


/home/kurbanov/Soft/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/gradients_impl.py:95: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.
  "Converting sparse IndexedSlices to a dense Tensor of unknown shape. "

In [25]:
model.train(batches, save_prefix='simple-rnn')


1: 35.0%
saving... checlkpoints/simple-rnn
2: 50.0%
3: 65.0%
4: 45.0%
5: 55.0%
6: 45.0%
7: 60.0%
8: 50.0%
9: 40.0%
10: 50.0%
11: 45.0%
saving... checlkpoints/simple-rnn
12: 25.0%
13: 50.0%
14: 40.0%
15: 50.0%
16: 55.0%
17: 55.0%
18: 35.0%
19: 45.0%
20: 35.0%
21: 55.0%
saving... checlkpoints/simple-rnn
22: 60.0%
23: 45.0%
24: 55.0%
25: 55.0%
26: 40.0%
27: 45.0%
28: 65.0%
29: 45.0%
30: 35.0%
31: 50.0%
saving... checlkpoints/simple-rnn
32: 25.0%
33: 35.0%
34: 65.0%
35: 45.0%
36: 45.0%
37: 50.0%
38: 45.0%
39: 60.0%
40: 55.0%
41: 60.0%
saving... checlkpoints/simple-rnn
42: 45.0%
43: 70.0%
44: 50.0%
45: 50.0%
46: 50.0%
47: 60.0%
48: 40.0%
49: 50.0%
50: 60.0%
51: 35.0%
saving... checlkpoints/simple-rnn
52: 45.0%
53: 40.0%
54: 35.0%
55: 50.0%
56: 70.0%
57: 40.0%
58: 45.0%
59: 40.0%
60: 45.0%
61: 55.0%
saving... checlkpoints/simple-rnn
62: 45.0%
63: 35.0%
64: 60.0%
65: 50.0%
66: 15.0%
67: 60.0%
68: 55.0%
69: 40.0%
70: 40.0%
71: 45.0%
saving... checlkpoints/simple-rnn
72: 50.0%
73: 50.0%
74: 25.0%
75: 50.0%
76: 35.0%
77: 45.0%
78: 35.0%
79: 55.0%
80: 50.0%
81: 60.0%
saving... checlkpoints/simple-rnn
82: 55.0%
83: 55.0%
84: 45.0%
85: 40.0%
86: 25.0%
87: 40.0%
88: 55.0%
89: 45.0%
90: 45.0%
91: 55.0%
saving... checlkpoints/simple-rnn
92: 45.0%
93: 25.0%
94: 45.0%
95: 55.0%
96: 60.0%
97: 50.0%
98: 50.0%
99: 30.0%
100: 25.0%
101: 35.0%
saving... checlkpoints/simple-rnn
102: 40.0%
103: 50.0%
104: 50.0%
105: 40.0%
106: 45.0%
107: 50.0%
108: 50.0%
109: 35.0%
110: 60.0%
111: 70.0%
saving... checlkpoints/simple-rnn
112: 35.0%
113: 40.0%
114: 20.0%
115: 45.0%
116: 45.0%
117: 50.0%
118: 40.0%
119: 40.0%
120: 45.0%
121: 60.0%
saving... checlkpoints/simple-rnn
122: 55.0%
123: 55.0%
124: 50.0%
125: 30.0%
126: 45.0%
127: 60.0%
128: 40.0%
129: 40.0%
130: 40.0%
131: 45.0%
saving... checlkpoints/simple-rnn
132: 30.0%
133: 40.0%
134: 30.0%
135: 40.0%
136: 40.0%
137: 45.0%
138: 55.0%
139: 30.0%
140: 30.0%
141: 40.0%
saving... checlkpoints/simple-rnn
142: 45.0%
143: 40.0%
144: 35.0%
145: 45.0%
146: 55.0%
147: 45.0%
148: 60.0%
149: 50.0%
150: 30.0%
151: 40.0%
saving... checlkpoints/simple-rnn
152: 20.0%
153: 50.0%
154: 30.0%
155: 60.0%
156: 50.0%
157: 35.0%
158: 50.0%
159: 65.0%
160: 60.0%
161: 35.0%
saving... checlkpoints/simple-rnn
162: 35.0%
163: 25.0%
164: 35.0%
165: 45.0%
166: 35.0%
167: 55.0%
168: 50.0%
169: 40.0%
170: 30.0%
171: 45.0%
saving... checlkpoints/simple-rnn
172: 30.0%
173: 35.0%
174: 55.0%
175: 35.0%
176: 40.0%
177: 20.0%
178: 50.0%
179: 30.0%
180: 35.0%
181: 50.0%
saving... checlkpoints/simple-rnn
182: 60.0%
183: 10.0%
184: 45.0%
185: 60.0%
186: 30.0%
187: 35.0%
188: 45.0%
189: 35.0%
190: 50.0%
191: 25.0%
saving... checlkpoints/simple-rnn
192: 25.0%
193: 45.0%
194: 50.0%
195: 40.0%
196: 30.0%
197: 20.0%
198: 20.0%
199: 60.0%
200: 25.0%
201: 35.0%
saving... checlkpoints/simple-rnn
202: 10.0%
203: 45.0%
204: 35.0%
205: 35.0%
206: 40.0%
207: 40.0%
208: 40.0%
209: 75.0%
210: 40.0%
211: 15.0%
saving... checlkpoints/simple-rnn
212: 25.0%
213: 45.0%
214: 35.0%
215: 45.0%
216: 30.0%
217: 40.0%
218: 30.0%
219: 35.0%
220: 20.0%
221: 20.0%
saving... checlkpoints/simple-rnn
222: 25.0%
223: 25.0%
224: 35.0%
225: 35.0%
226: 15.0%
227: 30.0%
228: 40.0%
229: 25.0%
230: 20.0%
231: 20.0%
saving... checlkpoints/simple-rnn
232: 10.0%
233: 20.0%
234: 25.0%
235: 20.0%
236: 35.0%
237: 25.0%
238: 20.0%
239: 25.0%
240: 20.0%
241: 35.0%
saving... checlkpoints/simple-rnn
242: 25.0%
243: 15.0%
244: 20.0%
245: 15.0%
246: 15.0%
247: 20.0%
248: 30.0%
249: 25.0%
250: 15.0%
251: 35.0%
saving... checlkpoints/simple-rnn
252: 45.0%
253: 15.0%
254: 35.0%
255: 25.0%
256: 30.0%
257: 30.0%
258: 35.0%
259: 15.0%
260: 20.0%
261: 0.0%
saving... checlkpoints/simple-rnn
262: 10.0%
263: 40.0%
264: 20.0%
265: 20.0%
266: 15.0%
267: 10.0%
268: 10.0%
269: 25.0%
270: 20.0%
271: 20.0%
saving... checlkpoints/simple-rnn
272: 15.0%
273: 25.0%
274: 35.0%
275: 5.0%
276: 25.0%
277: 35.0%
278: 20.0%
279: 20.0%
280: 35.0%
281: 30.0%
saving... checlkpoints/simple-rnn
282: 20.0%
283: 25.0%
284: 10.0%
285: 20.0%
286: 15.0%
287: 25.0%
288: 35.0%
289: 30.0%
290: 15.0%
291: 10.0%
saving... checlkpoints/simple-rnn
292: 30.0%
293: 15.0%
294: 25.0%
295: 25.0%
296: 25.0%
297: 25.0%
298: 15.0%
299: 15.0%
300: 20.0%
301: 20.0%
saving... checlkpoints/simple-rnn
302: 15.0%
303: 20.0%
304: 25.0%
305: 20.0%
306: 20.0%
307: 25.0%
308: 25.0%
309: 15.0%
310: 40.0%
311: 40.0%
saving... checlkpoints/simple-rnn
312: 35.0%
313: 5.0%
314: 20.0%
315: 40.0%
316: 10.0%
317: 40.0%
318: 10.0%
319: 10.0%
320: 15.0%
321: 30.0%
saving... checlkpoints/simple-rnn
322: 10.0%
323: 25.0%
324: 10.0%
325: 20.0%
326: 20.0%
327: 30.0%
328: 25.0%
329: 20.0%
330: 10.0%
331: 25.0%
saving... checlkpoints/simple-rnn
332: 25.0%
333: 20.0%
334: 20.0%
335: 40.0%
336: 30.0%
337: 20.0%
338: 15.0%
339: 10.0%
340: 20.0%
341: 25.0%
saving... checlkpoints/simple-rnn
342: 25.0%
343: 10.0%
344: 20.0%
345: 15.0%
346: 20.0%
347: 20.0%
348: 25.0%
349: 10.0%
350: 5.0%
351: 15.0%
saving... checlkpoints/simple-rnn
352: 20.0%
353: 40.0%
354: 5.0%
355: 5.0%
356: 30.0%
357: 25.0%
358: 15.0%
359: 30.0%
360: 15.0%
361: 15.0%
saving... checlkpoints/simple-rnn
362: 10.0%
363: 20.0%
364: 30.0%
365: 20.0%
366: 20.0%
367: 5.0%
368: 15.0%
369: 5.0%
370: 30.0%
371: 25.0%
saving... checlkpoints/simple-rnn
372: 10.0%
373: 15.0%
374: 35.0%
375: 10.0%
376: 25.0%
377: 20.0%
378: 20.0%
379: 25.0%
380: 20.0%
381: 15.0%
saving... checlkpoints/simple-rnn
382: 5.0%
383: 20.0%
384: 10.0%
385: 5.0%
386: 20.0%
387: 15.0%
388: 15.0%
389: 15.0%
390: 10.0%
391: 10.0%
saving... checlkpoints/simple-rnn
392: 5.0%
393: 20.0%
394: 10.0%
395: 15.0%
396: 20.0%
397: 35.0%
398: 25.0%
399: 15.0%
400: 20.0%
401: 20.0%
saving... checlkpoints/simple-rnn
402: 20.0%
403: 5.0%
404: 20.0%
405: 5.0%
406: 10.0%
407: 10.0%
408: 35.0%
409: 40.0%
410: 10.0%
411: 25.0%
saving... checlkpoints/simple-rnn
412: 35.0%
413: 15.0%
414: 20.0%
415: 15.0%
416: 15.0%
417: 25.0%
418: 15.0%
419: 25.0%
420: 10.0%
421: 15.0%
saving... checlkpoints/simple-rnn
422: 15.0%
423: 15.0%
424: 15.0%
425: 20.0%
426: 20.0%
427: 10.0%
428: 25.0%
429: 35.0%
430: 25.0%
431: 25.0%
saving... checlkpoints/simple-rnn
432: 15.0%
433: 25.0%
434: 40.0%
435: 15.0%
436: 0.0%
437: 20.0%
438: 15.0%
439: 0.0%
440: 10.0%
441: 30.0%
saving... checlkpoints/simple-rnn
442: 10.0%
443: 25.0%
444: 25.0%
445: 15.0%
446: 15.0%
447: 5.0%
448: 15.0%
449: 20.0%
450: 25.0%
451: 35.0%
saving... checlkpoints/simple-rnn
452: 10.0%
453: 15.0%
454: 35.0%
455: 30.0%
456: 25.0%
457: 20.0%
458: 15.0%
459: 35.0%
460: 15.0%
461: 10.0%
saving... checlkpoints/simple-rnn
462: 15.0%
463: 15.0%
464: 10.0%
465: 20.0%
466: 20.0%
467: 20.0%
468: 20.0%
469: 20.0%
470: 30.0%
471: 15.0%
saving... checlkpoints/simple-rnn
472: 15.0%
473: 20.0%
474: 15.0%
475: 15.0%
476: 15.0%
477: 25.0%
478: 20.0%
479: 20.0%
480: 20.0%
481: 20.0%
saving... checlkpoints/simple-rnn
482: 15.0%
483: 15.0%
484: 25.0%
485: 15.0%
486: 10.0%
487: 25.0%
488: 15.0%
489: 30.0%
490: 20.0%
491: 15.0%
saving... checlkpoints/simple-rnn
492: 20.0%
493: 20.0%
494: 10.0%
495: 25.0%
496: 30.0%
497: 20.0%
498: 5.0%
499: 25.0%
500: 10.0%
501: 10.0%
saving... checlkpoints/simple-rnn
502: 15.0%
503: 5.0%
504: 15.0%
505: 25.0%
506: 10.0%
507: 30.0%
508: 20.0%
509: 25.0%
510: 25.0%
511: 10.0%
saving... checlkpoints/simple-rnn
512: 10.0%
513: 10.0%
514: 20.0%
515: 15.0%
516: 15.0%
517: 15.0%
518: 15.0%
519: 15.0%
520: 5.0%
521: 5.0%
saving... checlkpoints/simple-rnn
522: 25.0%
523: 20.0%
524: 20.0%
525: 15.0%
526: 35.0%
527: 25.0%
528: 5.0%
529: 20.0%
530: 15.0%
531: 15.0%
saving... checlkpoints/simple-rnn
532: 30.0%
533: 30.0%
534: 15.0%
535: 15.0%
536: 20.0%
537: 20.0%
538: 20.0%
539: 5.0%
540: 15.0%
541: 20.0%
saving... checlkpoints/simple-rnn
542: 15.0%
543: 10.0%
544: 20.0%
545: 10.0%
546: 20.0%
547: 10.0%
548: 15.0%
549: 10.0%
550: 10.0%
551: 30.0%
saving... checlkpoints/simple-rnn
552: 10.0%
553: 20.0%
554: 5.0%
555: 10.0%
556: 25.0%
557: 5.0%
558: 10.0%
559: 30.0%
560: 35.0%
561: 0.0%
saving... checlkpoints/simple-rnn
562: 15.0%
563: 15.0%
564: 10.0%
565: 25.0%
566: 5.0%
567: 20.0%
568: 30.0%
569: 10.0%
570: 20.0%
571: 30.0%
saving... checlkpoints/simple-rnn
572: 20.0%
573: 10.0%
574: 20.0%
575: 10.0%
576: 5.0%
577: 15.0%
578: 25.0%
579: 20.0%
580: 20.0%
581: 25.0%
saving... checlkpoints/simple-rnn
582: 20.0%
583: 15.0%
584: 10.0%
585: 15.0%
586: 0.0%
587: 5.0%
588: 15.0%
589: 20.0%
590: 10.0%
591: 10.0%
saving... checlkpoints/simple-rnn
592: 5.0%
593: 5.0%
594: 10.0%
595: 10.0%
596: 15.0%
597: 5.0%
598: 30.0%
599: 15.0%
600: 30.0%
601: 30.0%
saving... checlkpoints/simple-rnn
602: 30.0%
603: 20.0%
604: 15.0%
605: 10.0%
606: 15.0%
607: 20.0%
608: 10.0%
609: 15.0%
610: 10.0%
611: 15.0%
saving... checlkpoints/simple-rnn
612: 20.0%
613: 0.0%
614: 15.0%
615: 20.0%
616: 15.0%
617: 25.0%
618: 5.0%
619: 15.0%
620: 30.0%
621: 20.0%
saving... checlkpoints/simple-rnn
622: 25.0%
623: 20.0%
624: 20.0%
625: 20.0%
626: 20.0%
627: 15.0%
628: 5.0%
629: 10.0%
630: 0.0%
631: 35.0%
saving... checlkpoints/simple-rnn
632: 15.0%
633: 10.0%
634: 25.0%
635: 30.0%
636: 10.0%
637: 20.0%
638: 15.0%
639: 40.0%
640: 25.0%
641: 20.0%
saving... checlkpoints/simple-rnn
642: 5.0%
643: 30.0%
644: 15.0%
645: 10.0%
646: 30.0%
647: 10.0%
648: 15.0%
649: 15.0%
650: 25.0%
651: 25.0%
saving... checlkpoints/simple-rnn
652: 25.0%
653: 15.0%
654: 30.0%
655: 10.0%
656: 15.0%
657: 0.0%
658: 10.0%
659: 5.0%
660: 20.0%
661: 25.0%
saving... checlkpoints/simple-rnn
662: 5.0%
663: 5.0%
664: 35.0%
665: 15.0%
666: 10.0%
667: 10.0%
668: 5.0%
669: 15.0%
670: 10.0%
671: 20.0%
saving... checlkpoints/simple-rnn
672: 10.0%
673: 15.0%
674: 10.0%
675: 30.0%
676: 5.0%
677: 15.0%
678: 15.0%
679: 5.0%
680: 10.0%
681: 5.0%
saving... checlkpoints/simple-rnn
682: 20.0%
683: 20.0%
684: 15.0%
685: 15.0%
686: 10.0%
687: 20.0%
688: 15.0%
689: 10.0%
690: 10.0%
691: 10.0%
saving... checlkpoints/simple-rnn
692: 25.0%
693: 20.0%
694: 25.0%
695: 20.0%
696: 35.0%
697: 5.0%
698: 10.0%
699: 20.0%
700: 5.0%
701: 15.0%
saving... checlkpoints/simple-rnn
702: 15.0%
703: 15.0%
704: 10.0%
705: 15.0%
706: 15.0%
707: 20.0%
708: 35.0%
709: 20.0%
710: 30.0%
711: 5.0%
saving... checlkpoints/simple-rnn
712: 25.0%
713: 10.0%
714: 15.0%
715: 10.0%
716: 0.0%
717: 10.0%
718: 20.0%
719: 25.0%
720: 0.0%
721: 15.0%
saving... checlkpoints/simple-rnn
722: 20.0%
723: 10.0%
724: 10.0%
725: 15.0%
726: 10.0%
727: 25.0%
728: 20.0%
729: 10.0%
730: 30.0%
731: 5.0%
saving... checlkpoints/simple-rnn
732: 5.0%
733: 20.0%
734: 15.0%
735: 25.0%
736: 0.0%
737: 25.0%
738: 10.0%
739: 15.0%
740: 20.0%
741: 10.0%
saving... checlkpoints/simple-rnn
742: 10.0%
743: 15.0%
744: 10.0%
745: 25.0%
746: 20.0%
747: 25.0%
748: 20.0%
749: 30.0%
750: 5.0%
751: 10.0%
saving... checlkpoints/simple-rnn
752: 15.0%
753: 40.0%
754: 15.0%
755: 25.0%
756: 5.0%
757: 20.0%
758: 20.0%
759: 35.0%
760: 15.0%
761: 20.0%
saving... checlkpoints/simple-rnn
762: 20.0%
763: 15.0%
764: 25.0%
765: 25.0%
766: 5.0%
767: 5.0%
768: 15.0%
769: 10.0%
770: 5.0%
771: 25.0%
saving... checlkpoints/simple-rnn
772: 0.0%
773: 0.0%
774: 20.0%
775: 15.0%
776: 15.0%
777: 15.0%
778: 20.0%
779: 10.0%
780: 15.0%
781: 20.0%
saving... checlkpoints/simple-rnn
782: 5.0%
783: 15.0%
784: 10.0%
785: 15.0%
786: 20.0%
787: 10.0%
788: 20.0%
789: 10.0%
790: 10.0%
791: 20.0%
saving... checlkpoints/simple-rnn
792: 20.0%
793: 15.0%
794: 20.0%
795: 5.0%
796: 30.0%
797: 10.0%
798: 15.0%
799: 15.0%
800: 30.0%
801: 20.0%
saving... checlkpoints/simple-rnn
802: 25.0%
803: 20.0%
804: 20.0%
805: 15.0%
806: 0.0%
807: 15.0%
808: 15.0%
809: 10.0%
810: 10.0%
811: 15.0%
saving... checlkpoints/simple-rnn
812: 10.0%
813: 15.0%
814: 35.0%
815: 20.0%
816: 20.0%
817: 25.0%
818: 20.0%
819: 15.0%
820: 0.0%
821: 10.0%
saving... checlkpoints/simple-rnn
822: 15.0%
823: 25.0%
824: 10.0%
825: 5.0%
826: 25.0%
827: 15.0%
828: 15.0%
829: 15.0%
830: 30.0%
831: 10.0%
saving... checlkpoints/simple-rnn
832: 10.0%
833: 10.0%
834: 25.0%
835: 30.0%
836: 5.0%
837: 20.0%
838: 10.0%
839: 10.0%
840: 20.0%
841: 20.0%
saving... checlkpoints/simple-rnn
842: 20.0%
843: 5.0%
844: 25.0%
845: 20.0%
846: 5.0%
847: 10.0%
848: 15.0%
849: 10.0%
850: 20.0%
851: 15.0%
saving... checlkpoints/simple-rnn
852: 10.0%
853: 25.0%
854: 25.0%
855: 20.0%
856: 5.0%
857: 35.0%
858: 15.0%
859: 25.0%
860: 30.0%
861: 25.0%
saving... checlkpoints/simple-rnn
862: 0.0%
863: 10.0%
864: 15.0%
865: 15.0%
866: 20.0%
867: 5.0%
868: 15.0%
869: 5.0%
870: 10.0%
871: 5.0%
saving... checlkpoints/simple-rnn
872: 10.0%
873: 5.0%
874: 20.0%
875: 25.0%
876: 15.0%
877: 15.0%
878: 20.0%
879: 15.0%
880: 15.0%
881: 10.0%
saving... checlkpoints/simple-rnn
882: 25.0%
883: 30.0%
884: 20.0%
885: 5.0%
886: 10.0%
887: 30.0%
888: 10.0%
889: 30.0%
890: 20.0%
891: 10.0%
saving... checlkpoints/simple-rnn
892: 10.0%
893: 15.0%
894: 5.0%
895: 20.0%
896: 25.0%
897: 10.0%
898: 25.0%
899: 15.0%
900: 25.0%
901: 10.0%
saving... checlkpoints/simple-rnn
902: 10.0%
903: 25.0%
904: 10.0%
905: 15.0%
906: 35.0%
907: 15.0%
908: 25.0%
909: 5.0%
910: 20.0%
911: 20.0%
saving... checlkpoints/simple-rnn
912: 10.0%
913: 25.0%
914: 15.0%
915: 15.0%
916: 25.0%
917: 25.0%
918: 30.0%
919: 10.0%
920: 25.0%
921: 20.0%
saving... checlkpoints/simple-rnn
922: 10.0%
923: 20.0%
924: 25.0%
925: 15.0%
926: 15.0%
927: 10.0%
928: 10.0%
929: 10.0%
930: 25.0%
931: 15.0%
saving... checlkpoints/simple-rnn
932: 15.0%
933: 10.0%
934: 25.0%
935: 15.0%
936: 30.0%
937: 10.0%
938: 10.0%
939: 5.0%
940: 20.0%
941: 10.0%
saving... checlkpoints/simple-rnn
942: 20.0%
943: 25.0%
944: 5.0%
945: 0.0%
946: 10.0%
947: 20.0%
948: 5.0%
949: 5.0%
950: 15.0%
951: 30.0%
saving... checlkpoints/simple-rnn
952: 20.0%
953: 5.0%
954: 15.0%
955: 20.0%
956: 10.0%
957: 0.0%
958: 15.0%
959: 15.0%
960: 20.0%
961: 20.0%
saving... checlkpoints/simple-rnn
962: 10.0%
963: 30.0%
964: 20.0%
965: 10.0%
966: 5.0%
967: 35.0%
968: 10.0%
969: 25.0%
970: 0.0%
971: 15.0%
saving... checlkpoints/simple-rnn
972: 5.0%
973: 10.0%
974: 20.0%
975: 15.0%
976: 30.0%
977: 30.0%
978: 25.0%
979: 20.0%
980: 5.0%
981: 30.0%
saving... checlkpoints/simple-rnn
982: 15.0%
983: 25.0%
984: 15.0%
985: 20.0%
986: 0.0%
987: 10.0%
988: 25.0%
989: 5.0%
990: 10.0%
991: 10.0%
saving... checlkpoints/simple-rnn
992: 10.0%
993: 20.0%
994: 5.0%
995: 5.0%
996: 5.0%
997: 15.0%
998: 10.0%
999: 0.0%
1000: 20.0%
1001: 5.0%
saving... checlkpoints/simple-rnn
1002: 15.0%
1003: 25.0%
1004: 10.0%
1005: 20.0%
1006: 15.0%
1007: 15.0%
1008: 10.0%
1009: 5.0%
1010: 15.0%
1011: 5.0%
saving... checlkpoints/simple-rnn
1012: 15.0%
1013: 5.0%
1014: 20.0%
1015: 10.0%
1016: 15.0%
1017: 10.0%
1018: 5.0%
1019: 25.0%
1020: 15.0%
1021: 15.0%
saving... checlkpoints/simple-rnn
1022: 20.0%
1023: 30.0%
1024: 15.0%
1025: 5.0%
1026: 25.0%
1027: 30.0%
1028: 10.0%
1029: 15.0%
1030: 15.0%
1031: 40.0%
saving... checlkpoints/simple-rnn
1032: 10.0%
1033: 5.0%
1034: 25.0%
1035: 10.0%
1036: 10.0%
1037: 5.0%
1038: 0.0%
1039: 5.0%
1040: 30.0%
1041: 10.0%
saving... checlkpoints/simple-rnn
1042: 20.0%
1043: 10.0%
1044: 30.0%
1045: 15.0%
1046: 15.0%
1047: 15.0%
1048: 25.0%
1049: 5.0%
1050: 5.0%
1051: 10.0%
saving... checlkpoints/simple-rnn
1052: 10.0%
1053: 30.0%
1054: 25.0%
1055: 15.0%
1056: 15.0%
1057: 15.0%
1058: 15.0%
1059: 15.0%
1060: 0.0%
1061: 10.0%
saving... checlkpoints/simple-rnn
1062: 15.0%
1063: 5.0%
1064: 15.0%
1065: 15.0%
1066: 10.0%
1067: 25.0%
1068: 20.0%
1069: 20.0%
1070: 10.0%
1071: 10.0%
saving... checlkpoints/simple-rnn
1072: 25.0%
1073: 5.0%
1074: 10.0%
1075: 15.0%
1076: 10.0%
1077: 20.0%
1078: 20.0%
1079: 20.0%
1080: 15.0%
1081: 15.0%
saving... checlkpoints/simple-rnn
1082: 10.0%
1083: 5.0%
1084: 10.0%
1085: 25.0%
1086: 10.0%
1087: 0.0%
1088: 10.0%
1089: 20.0%
1090: 5.0%
1091: 10.0%
saving... checlkpoints/simple-rnn
1092: 25.0%
1093: 10.0%
1094: 0.0%
1095: 20.0%
1096: 10.0%
1097: 20.0%
1098: 10.0%
1099: 5.0%
1100: 40.0%
1101: 15.0%
saving... checlkpoints/simple-rnn
1102: 15.0%
1103: 10.0%
1104: 15.0%
1105: 20.0%
1106: 15.0%
1107: 15.0%
1108: 10.0%
1109: 5.0%
1110: 15.0%
1111: 5.0%
saving... checlkpoints/simple-rnn
1112: 15.0%
1113: 15.0%
1114: 10.0%
1115: 20.0%
1116: 20.0%
1117: 15.0%
1118: 15.0%
1119: 20.0%
1120: 15.0%
1121: 5.0%
saving... checlkpoints/simple-rnn
1122: 0.0%
1123: 0.0%
1124: 10.0%
1125: 25.0%
1126: 15.0%
1127: 15.0%
1128: 25.0%
1129: 5.0%
1130: 0.0%
1131: 25.0%
saving... checlkpoints/simple-rnn
1132: 15.0%
1133: 15.0%
1134: 20.0%
1135: 30.0%
1136: 35.0%
1137: 25.0%
1138: 15.0%
1139: 10.0%
1140: 35.0%
1141: 10.0%
saving... checlkpoints/simple-rnn
1142: 10.0%
1143: 15.0%
1144: 10.0%
1145: 10.0%
1146: 15.0%
1147: 10.0%
1148: 10.0%
1149: 20.0%
1150: 5.0%
1151: 5.0%
saving... checlkpoints/simple-rnn
1152: 20.0%
1153: 5.0%
1154: 10.0%
1155: 30.0%
1156: 20.0%
1157: 20.0%
1158: 20.0%
1159: 20.0%
1160: 20.0%
1161: 15.0%
saving... checlkpoints/simple-rnn
1162: 15.0%
1163: 0.0%
1164: 15.0%
1165: 20.0%
1166: 5.0%
1167: 20.0%
1168: 20.0%
1169: 20.0%
1170: 20.0%
1171: 15.0%
saving... checlkpoints/simple-rnn
1172: 30.0%
1173: 15.0%
1174: 10.0%
1175: 30.0%
1176: 10.0%
1177: 5.0%
1178: 10.0%
1179: 30.0%
1180: 30.0%
1181: 10.0%
saving... checlkpoints/simple-rnn
1182: 0.0%
1183: 5.0%
1184: 5.0%
1185: 10.0%
1186: 5.0%
1187: 0.0%
1188: 15.0%
1189: 25.0%
1190: 20.0%
1191: 10.0%
saving... checlkpoints/simple-rnn
1192: 5.0%
1193: 5.0%
1194: 15.0%
1195: 15.0%
1196: 15.0%
1197: 20.0%
1198: 5.0%
1199: 20.0%
1200: 10.0%
1201: 15.0%
saving... checlkpoints/simple-rnn
1202: 0.0%
1203: 5.0%
1204: 20.0%
1205: 25.0%
1206: 15.0%
1207: 15.0%
1208: 10.0%
1209: 15.0%
1210: 5.0%
1211: 20.0%
saving... checlkpoints/simple-rnn
1212: 5.0%
1213: 10.0%
1214: 15.0%
1215: 10.0%
1216: 10.0%
1217: 10.0%
1218: 20.0%
1219: 10.0%
1220: 10.0%
1221: 5.0%
saving... checlkpoints/simple-rnn
1222: 15.0%
1223: 30.0%
1224: 25.0%
1225: 10.0%
1226: 15.0%
1227: 10.0%
1228: 20.0%
1229: 25.0%
1230: 15.0%
1231: 15.0%
saving... checlkpoints/simple-rnn
1232: 25.0%
1233: 20.0%
1234: 5.0%
1235: 25.0%
1236: 15.0%
1237: 20.0%
1238: 10.0%
1239: 15.0%
1240: 25.0%
1241: 5.0%
saving... checlkpoints/simple-rnn
1242: 15.0%
1243: 15.0%
1244: 20.0%
1245: 25.0%
1246: 0.0%
1247: 20.0%
1248: 10.0%
1249: 5.0%
/home/kurbanov/Soft/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:120: DeprecationWarning: generator 'preprocess_batched' raised StopIteration