In [12]:
import tensorflow as tf
import os
import numpy as np
from dataset import path_to_image_crop, input_pipeline, images_as_float

def model1(x, y_, data_size):
    W = tf.Variable(tf.zeros([data_size, 1]))
    b = tf.Variable(tf.zeros([2]))
    y = tf.matmul(x, W) + b

    cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y))
    return tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy), y, cross_entropy

def model2(x, y_):
    def fully_connected(input, size):
        weights = tf.get_variable( 'weights', 
            shape = [input.get_shape()[1], size],
            initializer = tf.contrib.layers.xavier_initializer()
          )
        biases = tf.get_variable( 'biases',
            shape = [size],
            initializer=tf.constant_initializer(0.0)
          )
        return tf.matmul(input, weights) + biases
    def model_pass(input):
        with tf.variable_scope('hidden'):
            hidden = fully_connected(input, size = 100)
        relu_hidden = tf.nn.relu(hidden)
        with tf.variable_scope('out'):
            prediction = fully_connected(relu_hidden, size = 2)
        return prediction   
    
    predictions = model_pass(x)  
    loss = tf.reduce_mean(tf.square(predictions - y_))
    optimizer = tf.train.MomentumOptimizer(
        learning_rate = 0.01, 
        momentum = 0.9, 
        use_nesterov = True
    ).minimize(loss)
    
    return optimizer, predictions, loss

people_path = '/data/people_classification_all/fold_*_data.txt'
image_prefix = 'coarse_tilt_aligned_face'
batch_size = 128
image_size = 227
image_dimension = [image_size,image_size]
num_epochs = 1000

model_name = "1fc_b" + str(batch_size) + "_e" + str(num_epochs - 1)
model_variable_scope = model_name

def extract_gender(features):
    def extract(v):
        def f1(): return tf.constant([1,0])
        def f2(): return tf.constant([0,1])
        def f_(): return tf.constant([0,0])
        return tf.case({
            tf.equal(v[1], tf.constant(1)): f1,
            tf.equal(v[1], tf.constant(2)): f2,
            }, default=f_, exclusive=True)
    return tf.map_fn(extract, features, dtype=tf.int32)


data_size = image_dimension[0] * image_dimension[1] * 3

graph = tf.Graph()
with graph.as_default():
    path_batch, label_batch = input_pipeline(people_path, batch_size, None, True)
    label_batch = extract_gender(label_batch)
    label_batch = tf.reshape(label_batch,[batch_size,2])

    data_batch = path_to_image_crop(path_batch, os.path.dirname(people_path), image_prefix, image_dimension)
    data_batch = tf.reshape(data_batch,[batch_size, data_size])

    x = tf.placeholder(tf.float32, [None, data_size])
    y_ = tf.placeholder(tf.float32, [None, 2])
    
    #train_step, y, loss = model1(x, y_, data_size)
    train_step, y, loss = model2(x, y_)

with tf.Session(graph = graph) as session:
    session.run(tf.global_variables_initializer())
    session.run(tf.local_variables_initializer())
    coord = tf.train.Coordinator ()
    threads = tf.train.start_queue_runners (coord = coord)
    for i in range(num_epochs):
        batch_xs, batch_ys = session.run([data_batch, label_batch])
        batch_xs = images_as_float(batch_xs, batch_size, data_size)
        p,l,_ = session.run([y, loss, train_step], feed_dict={x: batch_xs, y_: batch_ys})
        print('%d: %s -> %s %s' % (i, l, p[i % batch_size], batch_ys[i % batch_size])) # 1 -> male, 2 -> female
        if (i == 0): print(batch_xs)
    coord.request_stop ()
    coord.join (threads)


0: 0.820444 -> [ 0.7785638   0.17644775] [1 0]
[[ 0.42745098  0.4         0.36862745 ...,  0.18039216  0.08235294
   0.06666667]
 [ 0.40392157  0.35686275  0.2627451  ...,  0.16078431  0.11764706
   0.14117647]
 [ 0.23529412  0.1372549   0.12156863 ...,  0.36078431  0.11764706
   0.21568627]
 ..., 
 [ 0.60784314  0.43137255  0.34901961 ...,  0.19215686  0.10588235
   0.11764706]
 [ 0.41176471  0.28235294  0.24705882 ...,  0.38823529  0.27843137
   0.19215686]
 [ 0.          0.00392157  0.         ...,  0.          0.00392157
   0.01960784]]
1: 13963.5 -> [-179.78222656  171.47874451] [1 0]
2: 6.65374e+08 -> [ 14878.20507812 -14850.13378906] [1 0]
3: 3.57617e+07 -> [-3879.42919922  6603.88378906] [0 1]
4: 299687.0 -> [-575.04150391  518.39080811] [1 0]
5: 446401.0 -> [-706.7364502  627.1751709] [1 0]
6: 591358.0 -> [-817.00469971  717.83569336] [1 0]
7: 724631.0 -> [-907.0793457   791.44152832] [1 0]
8: 828642.0 -> [-954.19421387  866.4239502 ] [1 0]
9: 6.57381e+15 -> [ 66663184. -61471804.] [1 0]
10: 2.14096e+12 -> [-1521267.5    1402731.625] [1 0]
11: 4.23865e+12 -> [-2140490.75  1973728.  ] [1 0]
12: 6.59919e+12 -> [-2670813.75  2462748.5 ] [1 0]
13: 8.98615e+12 -> [-3116623.25  2873838.25] [0 1]
14: 1.1221e+13 -> [-3482673.25  3211381.  ] [1 0]
15: 1.31768e+13 -> [-3773997.   3480017.5] [0 1]
16: 1.47713e+13 -> [-3995826.75  3684572.5 ] [0 1]
17: 1.59603e+13 -> [-4153518.5   3829985.25] [1 0]
18: 1.67299e+13 -> [-4252487.    3921248.25] [0 1]
19: 1.70911e+13 -> [-4298143.  3963351.] [0 1]
20: 1.70728e+13 -> [-4295841.  3961231.] [1 0]
21: 1.67169e+13 -> [-4250831.5  3919730. ] [1 0]
22: 1.60735e+13 -> [-4168219.75  3843555.  ] [1 0]
23: 1.51966e+13 -> [-4052930.5   3737247.75] [1 0]
24: 1.41414e+13 -> [-3909678.5  3605155.5] [0 1]
25: 1.29609e+13 -> [-3742944.25  3451409.75] [0 1]
26: 1.17049e+13 -> [-3556954.5  3279908. ] [0 1]
27: 1.04176e+13 -> [-3355668.  3094301.] [1 0]
28: 9.13764e+12 -> [-3142765.    2897982.25] [0 1]
29: 7.89703e+12 -> [-2921641.    2694082.25] [1 0]
30: 6.72137e+12 -> [-2695403.    2485466.75] [1 0]
31: 2.82536e+12 -> [-1743921.   1628012.5] [1 0]
32: inf -> [  2.73581473e+19  -2.56039996e+19] [1 0]
33: 4.09262e+35 -> [ -6.60562259e+17   6.18208659e+17] [1 0]
34: nan -> [ nan  nan] [1 0]
35: nan -> [ nan  nan] [1 0]
36: nan -> [ nan  nan] [0 1]
37: nan -> [ nan  nan] [0 1]
38: nan -> [ nan  nan] [1 0]
39: nan -> [ nan  nan] [1 0]
40: nan -> [ nan  nan] [0 1]
41: nan -> [ nan  nan] [0 1]
42: nan -> [ nan  nan] [1 0]
43: nan -> [ nan  nan] [1 0]
44: nan -> [ nan  nan] [0 1]
45: nan -> [ nan  nan] [1 0]
46: nan -> [ nan  nan] [1 0]
47: nan -> [ nan  nan] [1 0]
48: nan -> [ nan  nan] [0 1]
49: nan -> [ nan  nan] [1 0]
50: nan -> [ nan  nan] [0 1]
51: nan -> [ nan  nan] [1 0]
52: nan -> [ nan  nan] [1 0]
53: nan -> [ nan  nan] [1 0]
54: nan -> [ nan  nan] [0 1]
55: nan -> [ nan  nan] [0 1]
56: nan -> [ nan  nan] [0 1]
57: nan -> [ nan  nan] [1 0]
58: nan -> [ nan  nan] [0 1]
59: nan -> [ nan  nan] [0 1]
60: nan -> [ nan  nan] [0 1]
61: nan -> [ nan  nan] [1 0]
62: nan -> [ nan  nan] [0 1]
63: nan -> [ nan  nan] [0 1]
64: nan -> [ nan  nan] [1 0]
65: nan -> [ nan  nan] [1 0]
66: nan -> [ nan  nan] [1 0]
67: nan -> [ nan  nan] [1 0]
68: nan -> [ nan  nan] [0 1]
69: nan -> [ nan  nan] [0 1]
70: nan -> [ nan  nan] [1 0]
71: nan -> [ nan  nan] [1 0]
72: nan -> [ nan  nan] [0 1]
73: nan -> [ nan  nan] [1 0]
74: nan -> [ nan  nan] [0 1]
75: nan -> [ nan  nan] [1 0]
76: nan -> [ nan  nan] [1 0]
77: nan -> [ nan  nan] [0 1]
78: nan -> [ nan  nan] [1 0]
79: nan -> [ nan  nan] [0 1]
80: nan -> [ nan  nan] [1 0]
INFO:tensorflow:Error reported to Coordinator: <class 'tensorflow.python.framework.errors_impl.CancelledError'>, Enqueue operation was cancelled
	 [[Node: input_producer/input_producer_EnqueueMany = QueueEnqueueManyV2[Tcomponents=[DT_STRING], timeout_ms=-1, _device="/job:localhost/replica:0/task:0/cpu:0"](input_producer, input_producer/RandomShuffle)]]

Caused by op u'input_producer/input_producer_EnqueueMany', defined at:
  File "/usr/lib/python2.7/runpy.py", line 174, in _run_module_as_main
    "__main__", fname, loader, pkg_name)
  File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
    exec code in run_globals
  File "/usr/local/lib/python2.7/dist-packages/ipykernel_launcher.py", line 16, in <module>
    app.launch_new_instance()
  File "/usr/local/lib/python2.7/dist-packages/traitlets/config/application.py", line 658, in launch_instance
    app.start()
  File "/usr/local/lib/python2.7/dist-packages/ipykernel/kernelapp.py", line 477, in start
    ioloop.IOLoop.instance().start()
  File "/usr/local/lib/python2.7/dist-packages/zmq/eventloop/ioloop.py", line 177, in start
    super(ZMQIOLoop, self).start()
  File "/usr/local/lib/python2.7/dist-packages/tornado/ioloop.py", line 888, in start
    handler_func(fd_obj, events)
  File "/usr/local/lib/python2.7/dist-packages/tornado/stack_context.py", line 277, in null_wrapper
    return fn(*args, **kwargs)
  File "/usr/local/lib/python2.7/dist-packages/zmq/eventloop/zmqstream.py", line 440, in _handle_events
    self._handle_recv()
  File "/usr/local/lib/python2.7/dist-packages/zmq/eventloop/zmqstream.py", line 472, in _handle_recv
    self._run_callback(callback, msg)
  File "/usr/local/lib/python2.7/dist-packages/zmq/eventloop/zmqstream.py", line 414, in _run_callback
    callback(*args, **kwargs)
  File "/usr/local/lib/python2.7/dist-packages/tornado/stack_context.py", line 277, in null_wrapper
    return fn(*args, **kwargs)
  File "/usr/local/lib/python2.7/dist-packages/ipykernel/kernelbase.py", line 283, in dispatcher
    return self.dispatch_shell(stream, msg)
  File "/usr/local/lib/python2.7/dist-packages/ipykernel/kernelbase.py", line 235, in dispatch_shell
    handler(stream, idents, msg)
  File "/usr/local/lib/python2.7/dist-packages/ipykernel/kernelbase.py", line 399, in execute_request
    user_expressions, allow_stdin)
  File "/usr/local/lib/python2.7/dist-packages/ipykernel/ipkernel.py", line 196, in do_execute
    res = shell.run_cell(code, store_history=store_history, silent=silent)
  File "/usr/local/lib/python2.7/dist-packages/ipykernel/zmqshell.py", line 533, in run_cell
    return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
  File "/usr/local/lib/python2.7/dist-packages/IPython/core/interactiveshell.py", line 2717, in run_cell
    interactivity=interactivity, compiler=compiler, result=result)
  File "/usr/local/lib/python2.7/dist-packages/IPython/core/interactiveshell.py", line 2821, in run_ast_nodes
    if self.run_code(code, result):
  File "/usr/local/lib/python2.7/dist-packages/IPython/core/interactiveshell.py", line 2881, in run_code
    exec(code_obj, self.user_global_ns, self.user_ns)
  File "<ipython-input-12-482b7e3cf4e4>", line 69, in <module>
    path_batch, label_batch = input_pipeline(people_path, batch_size, None, True)
  File "dataset.py", line 107, in input_pipeline
    filename_queue = tf.train.string_input_producer(filenames, num_epochs=num_epochs, shuffle=shuffle)
  File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/training/input.py", line 232, in string_input_producer
    cancel_op=cancel_op)
  File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/training/input.py", line 164, in input_producer
    enq = q.enqueue_many([input_tensor])
  File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/data_flow_ops.py", line 367, in enqueue_many
    self._queue_ref, vals, name=scope)
  File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/gen_data_flow_ops.py", line 1556, in _queue_enqueue_many_v2
    name=name)
  File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/framework/op_def_library.py", line 768, in apply_op
    op_def=op_def)
  File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/framework/ops.py", line 2336, in create_op
    original_op=self._default_original_op, op_def=op_def)
  File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/framework/ops.py", line 1228, in __init__
    self._traceback = _extract_stack()

CancelledError (see above for traceback): Enqueue operation was cancelled
	 [[Node: input_producer/input_producer_EnqueueMany = QueueEnqueueManyV2[Tcomponents=[DT_STRING], timeout_ms=-1, _device="/job:localhost/replica:0/task:0/cpu:0"](input_producer, input_producer/RandomShuffle)]]

---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-12-482b7e3cf4e4> in <module>()
     87     for i in range(num_epochs):
     88         batch_xs, batch_ys = session.run([data_batch, label_batch])
---> 89         batch_xs = images_as_float(batch_xs, batch_size, data_size)
     90         p,l,_ = session.run([y, loss, train_step], feed_dict={x: batch_xs, y_: batch_ys})
     91         print('%d: %s -> %s %s' % (i, l, p[i % batch_size], batch_ys[i % batch_size])) # 1 -> male, 2 -> female

/book/people_classification/dataset.pyc in images_as_float(img, batch_size, data_size)
     72 def images_as_float(img, batch_size, data_size):
     73     i = img.reshape(batch_size*data_size)
---> 74     i = vfunc(np.array(i, dtype=np.float32))
     75     i = i.reshape(batch_size,data_size)
     76     return i

/usr/local/lib/python2.7/dist-packages/numpy/lib/function_base.pyc in __call__(self, *args, **kwargs)
   2574             vargs.extend([kwargs[_n] for _n in names])
   2575 
-> 2576         return self._vectorize_call(func=func, args=vargs)
   2577 
   2578     def _get_ufunc_and_otypes(self, func, args):

/usr/local/lib/python2.7/dist-packages/numpy/lib/function_base.pyc in _vectorize_call(self, func, args)
   2650                       for a in args]
   2651 
-> 2652             outputs = ufunc(*inputs)
   2653 
   2654             if ufunc.nout == 1:

/book/people_classification/dataset.pyc in <lambda>(t)
     68     return options[v]
     69 
---> 70 fn = lambda t: t / 255
     71 vfunc = np.vectorize(fn)
     72 def images_as_float(img, batch_size, data_size):

KeyboardInterrupt: