In [1]:
import tensorflow as tf  # version 0.12.0rc0
import rcnn

# %config InlineBackend.figure_format = 'retina'  # double res graphs

# Load constrained dataset
from grid_dataset import load_grid_dataset
datasets = load_grid_dataset('data/grids_9x9_1000.hdf5')

# Some magic to autoreload external imports
# from http://ipython.readthedocs.io/en/stable/config/extensions/autoreload.html
%load_ext autoreload
%autoreload 2


- - - - - - - - - - - - - - - - - - - - 
Loaded dataset  data/grids_9x9_1000.hdf5
X.dtype =  float64  X.shape =  (1000, 9, 9)
y.dtype =  float64  y.shape =  (1000,)
- - - - - - - - - - - - - - - - - - - - 

In [2]:
# Hyperparameters
learning_rate = 1e-3
training_iters = 2000000
batch_size = 128
display_step = 50  # Steps after to give some feedback during training
validation_step = 1000  # Steps after to run a round of validation to 
                        # check against overfitting

# Network Parameters
n_input = 1000 # Data input
n_classes = 2 # Connected or not connected
grid_size = 9
height = grid_size
width = grid_size
depth = 1
dropout = 0.5 # Dropout, probability to keep units

# tf Graph input
with tf.name_scope('input'):
    X = tf.placeholder(tf.float32, shape=[None, height, width, depth])
    y = tf.placeholder(tf.int32, shape=[None])
    keep_prob = tf.placeholder(tf.float32) #dropout (keep probability)

In [5]:
# Construct model
rcnn = rcnn.Model(X, y, output_size=2, learning_rate=learning_rate, dropout=keep_prob)

# Initializing the variables
# Note: tensorflow needs to have this initializer after
# the model constructor above in order to catch all variables 
init = tf.initialize_all_variables()

# Merge all summaries for tensorboard writer 
merged = tf.merge_all_summaries()

In [7]:
## Launch the graph
with tf.Session() as sess:
    sess.run(init)
    step = 1
    
    # Profiling tools (only on tensorflow >= 0.12.0rc0)
    # Print trainable variable parameter statistics to stdout.
    # param_stats = tf.contrib.tfprof.model_analyzer.print_model_analysis(
    #    tf.get_default_graph(),
    #    tfprof_options=tf.contrib.tfprof.model_analyzer.TRAINABLE_VARS_PARAMS_STAT_OPTIONS)
    
    # param_stats is tensorflow.tfprof.TFProfNode proto. It organize the statistics
    # of each graph node in tree scructure. Let's print the root below.
    # print('total_params: %d\n' % param_stats.total_parameters)
    
    # Add a summary writer for train steps
    train_writer = tf.train.SummaryWriter("./logs", sess.graph)
    
    # Keep training until reach max iterations
    while step * batch_size < training_iters:
        
        batch_X, batch_y = datasets.train.next_batch(batch_size)
        # Run optimization op (backprop)
        summary, _ = sess.run([merged, rcnn.train],
                              feed_dict={X: batch_X, y: batch_y,
                                         keep_prob: dropout})

        train_writer.add_summary(summary, step)
        
        # Display training steps
        if step % display_step == 0:
            # Calculate batch loss and accuracy
            summary, loss, acc = sess.run([merged, rcnn.loss, rcnn.predict], 
                                 feed_dict={X: batch_X,
                                            y: batch_y,
                                            keep_prob: dropout})
            
            train_writer.add_summary(summary, step)
            
            print("Iteration " + str(step * batch_size) + \
                  ", Minibatch Loss= " + "{:.6f}".format(loss) + \
                  ", Training Accuracy= " + "{:.5f}".format(acc))
        
        # Run a round of validation once in a while
        if step % validation_step == 0:
            print("Validation Accuracy:", \
                  sess.run(rcnn.predict, feed_dict={X: datasets.validation.X[:batch_size],
                                                      y: datasets.validation.y[:batch_size],
                                                      keep_prob: 1.}))
            
        step += 1
    print("Optimization Finished!")
    train_writer.close()

    # Calculate accuracy
    print("Testing Accuracy:", \
          sess.run(rcnn.predict, feed_dict={X: datasets.test.X[:batch_size],
                                            y: datasets.test.y[:batch_size],
                                            keep_prob: 1.}))


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-7-55f20c8c5070> in <module>()
     22         batch_X, batch_y = datasets.train.next_batch(batch_size)
     23         # Run optimization op (backprop)
---> 24         summary, _ = sess.run([merged, rcnn.train],
     25                               feed_dict={X: batch_X, y: batch_y,
     26                                          keep_prob: dropout})

/home/jovyan/work/define_scope.py in decorator(self)
     58                 if not hasattr(self, attribute):
     59                         with tf.variable_scope(name, *args, **kwargs):  # pylint: disable=undefined-variable
---> 60                                 setattr(self, attribute, function(self))
     61                 return getattr(self, attribute)
     62 

/home/jovyan/work/rcnn.py in train(self)
    163                 with tf.name_scope('train'):
    164                         optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
--> 165                         minimize = optimizer.minimize(self.loss)
    166                 return minimize
    167 

/home/jovyan/work/define_scope.py in decorator(self)
     58                 if not hasattr(self, attribute):
     59                         with tf.variable_scope(name, *args, **kwargs):  # pylint: disable=undefined-variable
---> 60                                 setattr(self, attribute, function(self))
     61                 return getattr(self, attribute)
     62 

/home/jovyan/work/rcnn.py in loss(self)
    173                 with tf.name_scope('cross_entopy'):
    174 			diff = tf.nn.sparse_softmax_cross_entropy_with_logits(
--> 175 				logits=self.inference, labels=self.y)
    176                         with tf.name_scope('total'):
    177                                 cross_entropy = tf.reduce_mean(diff)

/home/jovyan/work/define_scope.py in decorator(self)
     58                 if not hasattr(self, attribute):
     59                         with tf.variable_scope(name, *args, **kwargs):  # pylint: disable=undefined-variable
---> 60                                 setattr(self, attribute, function(self))
     61                 return getattr(self, attribute)
     62 

/home/jovyan/work/rcnn.py in inference(self)
    127                 # Convolution Layer
    128                 with tf.variable_scope('conv1'):
--> 129                         conv1 = conv_relu(X, [1, 1, 1, 32], [32], 'conv1')
    130                         # conv1 = conv2d(X, params['Wc1'], params['bc1'])
    131 

/home/jovyan/work/rcnn.py in conv_relu(input_tensor, kernel_shape, bias_shape, layer_name, act)
    110                         # Create variable named "weights".
    111 			weights = tf.get_variable("weights", kernel_shape,
--> 112 				initializer=tf.random_normal_initializer())
    113                         # Create variable named "biases".
    114 			biases = tf.get_variable("biases", bias_shape,

/opt/conda/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
   1020       collections=collections, caching_device=caching_device,
   1021       partitioner=partitioner, validate_shape=validate_shape,
-> 1022       custom_getter=custom_getter)
   1023 
   1024 

/opt/conda/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
    847           collections=collections, caching_device=caching_device,
    848           partitioner=partitioner, validate_shape=validate_shape,
--> 849           custom_getter=custom_getter)
    850 
    851   def _get_partitioned_variable(self,

/opt/conda/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
    343           reuse=reuse, trainable=trainable, collections=collections,
    344           caching_device=caching_device, partitioner=partitioner,
--> 345           validate_shape=validate_shape)
    346 
    347   def _get_partitioned_variable(

/opt/conda/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape)
    328           initializer=initializer, regularizer=regularizer, reuse=reuse,
    329           trainable=trainable, collections=collections,
--> 330           caching_device=caching_device, validate_shape=validate_shape)
    331 
    332     if custom_getter is not None:

/opt/conda/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape)
    631                          " Did you mean to set reuse=True in VarScope? "
    632                          "Originally defined at:\n\n%s" % (
--> 633                              name, "".join(traceback.format_list(tb))))
    634       found_var = self._vars[name]
    635       if not shape.is_compatible_with(found_var.get_shape()):

ValueError: Variable train/loss/inference/conv1/weights already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:

  File "/home/jovyan/work/rcnn.py", line 112, in conv_relu
    initializer=tf.random_normal_initializer())
  File "/home/jovyan/work/rcnn.py", line 129, in inference
    conv1 = conv_relu(X, [1, 1, 1, 32], [32], 'conv1')
  File "/home/jovyan/work/define_scope.py", line 60, in decorator
    setattr(self, attribute, function(self))

In [ ]:


In [ ]: