In [10]:
import tensorflow as tf
import numpy as np
In [11]:
q = tf.FIFOQueue(3, "float")
In [12]:
init = q.enqueue_many(([0.,0.,0.,],))
In [13]:
x = q.dequeue()
y = x+1
q_inc = q.enqueue([y])
In [15]:
N_SAMPLES = 1000
NUM_THREADS = 4
# Generating some simple data
# create 1000 random samples, each is a 1D array from the normal distribution (10, 1)
data = 10 * np . random . randn ( N_SAMPLES , 4 ) + 1
# create 1000 random labels of 0 and 1
target = np . random . randint ( 0 , 2 , size = N_SAMPLES )
queue = tf . FIFOQueue ( capacity = 50 , dtypes =[ tf . float32 , tf . int32 ], shapes =[[ 4 ], []])
enqueue_op = queue . enqueue_many ([ data , target ])
dequeue_op = queue . dequeue ()
# create NUM_THREADS to do enqueue
qr = tf . train . QueueRunner ( queue , [ enqueue_op ] * NUM_THREADS)
with tf . Session () as sess:
# Create a coordinator, launch the queue runner threads.
coord = tf . train . Coordinator ()
enqueue_threads = qr . create_threads ( sess , coord = coord , start = True)
for step in range ( 100 ): # do to 100 iterations
if coord . should_stop ():
break
data_batch , label_batch = sess . run ( dequeue_op)
coord . request_stop ()
coord . join ( enqueue_threads)
In [17]:
import threading
# thread body: loop until the coordinator indicates a stop was requested.
# if some condition becomes true, ask the coordinator to stop.
def my_loop ( coord ):
while not coord . should_stop ():
print("1")
if ... some condition ...:
coord . request_stop ()
# main code: create a coordinator.
coord = tf . Coordinator ()
# create 10 threads that run 'my_loop()'
# you can also create threads using QueueRunner as the example above
threads = [ threading . Thread ( target = my_loop , args =( coord ,)) for _ in xrange ( 10 )]
# start the threads and wait for all of them to stop.
for t in threads :
t . start ()
coord . join ( threads)
Outputs the lines of a file delimited by newlines
E . g . text files , CSV files
Outputs the entire file when all files have same fixed lengths
E . g . each MNIST file has 28 x 28 pixels , CIFAR - 10 32 x 32 x 3
Outputs the entire file content. This is useful when each file contains a sample
Reads samples from TensorFlow ' s own binary format ( TFRecord)
Allows you to create your own readers
In [26]:
filename_queue = tf.train . string_input_producer ([ "heart.csv" ])
reader = tf . TextLineReader (skip_header_lines=1)
# it means you choose to skip the first line for every file in the queue
In [27]:
key , value = reader . read ( filename_queue)
In [28]:
key = data/heart.csv:2
value = 144 , 0.01 , 4.41 , 28.61 , Absent , 55 , 28.87 , 2.06 , 63 ,1
In [29]:
filename_queue = tf.train.string_input_producer(filenames)
reader = tf.TextLineReader(skip_header_lines = 1) # skip the first line in the file
key, value = reader.read (filename_queue)
with tf.Session() as sess:
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord = coord)
print(sess.run(key)) # data / heart . csv :2
print(sess.run(value)) # 144 , 0.01 , 4.41 , 28.61 , Absent , 55 , 28.87 , 2.06 , 63 ,1
coord.request_stop()
coord.join(threads)
In [ ]: