In [1]:
#!/usr/bin/env python

from PIL import Image
%matplotlib inline
#%matplotlib qt

import skimage
import skimage.io
import skimage.transform

import os
import scipy as scp
import scipy.misc

import numpy as np
import tensorflow as tf

import fcn8_vgg_train
import utils

import loss

from tensorflow.python.framework import ops

#os.environ['CUDA_VISIBLE_DEVICES'] = ''

#img1 = skimage.io.imread("./test_data/tabby_cat.png")
img1 = skimage.io.imread("./test_data/19.jpg")
lbl1 = skimage.io.imread("./test_data/labels.png")

FLAGS = tf.app.flags.FLAGS

from skimage import io, img_as_ubyte

obj_array = np.zeros((lbl1.shape[0],lbl1.shape[1]))
bg_array = np.zeros((lbl1.shape[0],lbl1.shape[1]))

print lbl1.shape

obj_array[lbl1[:,:,0] > 0] = 1
bg_array[lbl1[:,:, 0] == 0] = 1

lbl_in = np.array([obj_array, bg_array]).swapaxes(0,2).swapaxes(0,1)


print lbl_in.shape
io.imshow(lbl_in[:,:,1])


(333, 500, 3)
(333, 500, 2)
Out[1]:
<matplotlib.image.AxesImage at 0x5a1a050>

In [2]:
with tf.Session() as sess:
    images = tf.placeholder("float")
    labels = tf.placeholder("float")
    feed_dict = {images: img1, labels: lbl_in}
    
    batch_images = tf.expand_dims(images, 0)
    
    batch_labels = tf.expand_dims(labels, 0)


    #print type(batch_images)
    
    #print type(batch_labels)
    
    vgg_fcn = fcn8_vgg_train.FCN8VGG()


npy file loaded

In [3]:
with tf.name_scope("content_vgg"):
        #vgg_fcn.build(batch_images, debug=True)
        
        vgg_fcn.build(batch_images, train=True, num_classes=2, random_init_fc8=True)

    print('Finished building Network.')

    
    init = tf.initialize_all_variables()
    sess.run(tf.initialize_all_variables())

    print('Running the Network')


Finished building Network.
Running the Network

In [4]:
logits = vgg_fcn.upscore32
    
    labels = batch_labels

    #down_color = utils.color_image(down[0])
    #up_color = utils.color_image(up[0])

    #scp.misc.imsave('fcn32_downsampled.png', down_color)
    #scp.misc.imsave('fcn32_upsampled.png', up_color)

In [5]:
loss = vgg_fcn.loss_study(logits, labels, 2, head=None)
    
    loss_get = sess.run(loss, feed_dict=feed_dict) 
    
    print loss_get


12.2698

In [ ]:


In [6]:
train_op = tf.train.AdamOptimizer(0.00005).minimize(loss)
    # Add the ops to initialize variables.  These will include 
    # the optimizer slots added by AdamOptimizer().
    
    init_op = tf.initialize_all_variables()
    sess.run(init_op)
    
    for i in range(100):
      
        loss_get = sess.run(loss, feed_dict=feed_dict) 
        
        
        print("step %d, training accuracy %g"%(i, loss_get))
        sess.run(train_op, feed_dict=feed_dict)


step 0, training accuracy 12.2736
step 1, training accuracy 12.057
step 2, training accuracy 11.8431
step 3, training accuracy 11.6322
step 4, training accuracy 11.4244
step 5, training accuracy 11.2197
step 6, training accuracy 11.0182
step 7, training accuracy 10.8199
step 8, training accuracy 10.6247
step 9, training accuracy 10.4325
step 10, training accuracy 10.2433
step 11, training accuracy 10.0571
step 12, training accuracy 9.87395
step 13, training accuracy 9.69353
step 14, training accuracy 9.5159
step 15, training accuracy 9.3409
step 16, training accuracy 9.16852
step 17, training accuracy 8.99835
step 18, training accuracy 8.83043
step 19, training accuracy 8.66461
step 20, training accuracy 8.50067
step 21, training accuracy 8.33795
step 22, training accuracy 8.17663
step 23, training accuracy 8.01603
step 24, training accuracy 7.85573
step 25, training accuracy 7.69559
step 26, training accuracy 7.5344
step 27, training accuracy 7.37169
step 28, training accuracy 7.20669
step 29, training accuracy 7.04038
step 30, training accuracy 6.88422
step 31, training accuracy 6.77192
step 32, training accuracy 6.65957
step 33, training accuracy 6.52238
step 34, training accuracy 6.38335
step 35, training accuracy 6.25632
step 36, training accuracy 6.13684
step 37, training accuracy 6.02327
step 38, training accuracy 5.91159
step 39, training accuracy 5.80146
step 40, training accuracy 5.69276
step 41, training accuracy 5.58663
step 42, training accuracy 5.48312
step 43, training accuracy 5.38225
step 44, training accuracy 5.28408
step 45, training accuracy 5.18807
step 46, training accuracy 5.09356
step 47, training accuracy 5.00053
step 48, training accuracy 4.90879
step 49, training accuracy 4.81811
step 50, training accuracy 4.72899
step 51, training accuracy 4.64193
step 52, training accuracy 4.55642
step 53, training accuracy 4.47267
step 54, training accuracy 4.39031
step 55, training accuracy 4.30817
step 56, training accuracy 4.22635
step 57, training accuracy 4.14487
step 58, training accuracy 4.06338
step 59, training accuracy 3.98234
step 60, training accuracy 3.90342
step 61, training accuracy 3.82562
step 62, training accuracy 3.74604
step 63, training accuracy 3.6684
step 64, training accuracy 3.5895
step 65, training accuracy 3.50927
step 66, training accuracy 3.42677
step 67, training accuracy 3.34339
step 68, training accuracy 3.26364
step 69, training accuracy 3.20638
step 70, training accuracy 3.13424
step 71, training accuracy 3.078
step 72, training accuracy 3.00102
step 73, training accuracy 2.93339
step 74, training accuracy 2.87739
step 75, training accuracy 2.80683
step 76, training accuracy 2.75086
step 77, training accuracy 2.69109
step 78, training accuracy 2.63531
step 79, training accuracy 2.59282
step 80, training accuracy 2.53153
step 81, training accuracy 2.47553
step 82, training accuracy 2.42576
step 83, training accuracy 2.37622
step 84, training accuracy 2.33127
step 85, training accuracy 2.28271
step 86, training accuracy 2.24035
step 87, training accuracy 2.19455
step 88, training accuracy 2.14937
step 89, training accuracy 2.10385
step 90, training accuracy 2.06524
step 91, training accuracy 2.02563
step 92, training accuracy 1.98353
step 93, training accuracy 1.9429
step 94, training accuracy 1.91665
step 95, training accuracy 1.89329
step 96, training accuracy 1.83026
step 97, training accuracy 1.81457
step 98, training accuracy 1.76305
step 99, training accuracy 1.7375

In [17]:
for i in range(500):
      
        loss_get = sess.run(loss, feed_dict=feed_dict) 
        
        
        print("step %d, training accuracy %g"%(i, loss_get))
        sess.run(train_op, feed_dict=feed_dict)  
    
#logits_get, labels_get, cross_entropy_get = sess.run([vgg_fcn.logits, vgg_fcn.labels, vgg_fcn.cross_entropy]) 
    

#print logits_get.shape 
#print labels_get.shape
#print cross_entropy_get.shape


step 0, training accuracy 0.0747245
step 1, training accuracy 0.0720168
step 2, training accuracy 0.0718566
step 3, training accuracy 0.0724076
step 4, training accuracy 0.0717721
step 5, training accuracy 0.0740787
step 6, training accuracy 0.0777244
step 7, training accuracy 0.0725177
step 8, training accuracy 0.0715202
step 9, training accuracy 0.0712285
step 10, training accuracy 0.0775024
step 11, training accuracy 0.0707385
step 12, training accuracy 0.0778318
step 13, training accuracy 0.0757244
step 14, training accuracy 0.0753323
step 15, training accuracy 0.0783738
step 16, training accuracy 0.0766112
step 17, training accuracy 0.0838786
step 18, training accuracy 0.0742568
step 19, training accuracy 0.076266
step 20, training accuracy 0.0732501
step 21, training accuracy 0.0763539
step 22, training accuracy 0.0706362
step 23, training accuracy 0.0724975
step 24, training accuracy 0.0713552
step 25, training accuracy 0.0738456
step 26, training accuracy 0.0698821
step 27, training accuracy 0.0744666
step 28, training accuracy 0.0789453
step 29, training accuracy 0.0730063
step 30, training accuracy 0.0738715
step 31, training accuracy 0.070561
step 32, training accuracy 0.0778562
step 33, training accuracy 0.0759961
step 34, training accuracy 0.074866
step 35, training accuracy 0.0718145
step 36, training accuracy 0.0774372
step 37, training accuracy 0.0711584
step 38, training accuracy 0.0741987
step 39, training accuracy 0.0732523
step 40, training accuracy 0.0701117
step 41, training accuracy 0.0755799
step 42, training accuracy 0.0711096
step 43, training accuracy 0.072736
step 44, training accuracy 0.0712345
step 45, training accuracy 0.0733033
step 46, training accuracy 0.0709882
step 47, training accuracy 0.0785838
step 48, training accuracy 0.0715791
step 49, training accuracy 0.0722805
step 50, training accuracy 0.0698008
step 51, training accuracy 0.0740748
step 52, training accuracy 0.0711228
step 53, training accuracy 0.0715284
step 54, training accuracy 0.0756972
step 55, training accuracy 0.0708775
step 56, training accuracy 0.0782746
step 57, training accuracy 0.0685875
step 58, training accuracy 0.0721419
step 59, training accuracy 0.0703621
step 60, training accuracy 0.075839
step 61, training accuracy 0.0684434
step 62, training accuracy 0.0725299
step 63, training accuracy 0.0712577
step 64, training accuracy 0.0734751
step 65, training accuracy 0.0691951
step 66, training accuracy 0.0803543
step 67, training accuracy 0.0696767
step 68, training accuracy 0.0778628
step 69, training accuracy 0.0729659
step 70, training accuracy 0.0754669
step 71, training accuracy 0.0699725
step 72, training accuracy 0.0790512
step 73, training accuracy 0.0743527
step 74, training accuracy 0.0710601
step 75, training accuracy 0.0730947
step 76, training accuracy 0.0707602
step 77, training accuracy 0.0719625
step 78, training accuracy 0.0715241
step 79, training accuracy 0.0706103
step 80, training accuracy 0.074312
step 81, training accuracy 0.0681208
step 82, training accuracy 0.0687178
step 83, training accuracy 0.0681645
step 84, training accuracy 0.0677982
step 85, training accuracy 0.0696272
step 86, training accuracy 0.0687518
step 87, training accuracy 0.0682976
step 88, training accuracy 0.0725616
step 89, training accuracy 0.0670812
step 90, training accuracy 0.0695062
step 91, training accuracy 0.0686915
step 92, training accuracy 0.0667892
step 93, training accuracy 0.0680172
step 94, training accuracy 0.0669181
step 95, training accuracy 0.0686438
step 96, training accuracy 0.0688651
step 97, training accuracy 0.0742926
step 98, training accuracy 0.0706309
step 99, training accuracy 0.0682976
step 100, training accuracy 0.0731287
step 101, training accuracy 0.0674456
step 102, training accuracy 0.0710741
step 103, training accuracy 0.0680579
step 104, training accuracy 0.0744874
step 105, training accuracy 0.0700864
step 106, training accuracy 0.069241
step 107, training accuracy 0.0679391
step 108, training accuracy 0.0703522
step 109, training accuracy 0.066617
step 110, training accuracy 0.0699735
step 111, training accuracy 0.0720354
step 112, training accuracy 0.0672828
step 113, training accuracy 0.0691445
step 114, training accuracy 0.0700602
step 115, training accuracy 0.0666964
step 116, training accuracy 0.0727679
step 117, training accuracy 0.0683006
step 118, training accuracy 0.0698858
step 119, training accuracy 0.0662032
step 120, training accuracy 0.0682418
step 121, training accuracy 0.0694116
step 122, training accuracy 0.0682311
step 123, training accuracy 0.0665487
step 124, training accuracy 0.0654959
step 125, training accuracy 0.0660631
step 126, training accuracy 0.0693862
step 127, training accuracy 0.0658656
step 128, training accuracy 0.0659562
step 129, training accuracy 0.0665146
step 130, training accuracy 0.0688648
step 131, training accuracy 0.0726354
step 132, training accuracy 0.067661
step 133, training accuracy 0.0775727
step 134, training accuracy 0.0690883
step 135, training accuracy 0.0750398
step 136, training accuracy 0.0664494
step 137, training accuracy 0.0778434
step 138, training accuracy 0.0724467
step 139, training accuracy 0.0681179
step 140, training accuracy 0.0741877
step 141, training accuracy 0.0678298
step 142, training accuracy 0.0745703
step 143, training accuracy 0.0673523
step 144, training accuracy 0.0706536
step 145, training accuracy 0.0670208
step 146, training accuracy 0.0751305
step 147, training accuracy 0.0662684
step 148, training accuracy 0.0790188
step 149, training accuracy 0.0680416
step 150, training accuracy 0.075159
step 151, training accuracy 0.075656
step 152, training accuracy 0.0687514
step 153, training accuracy 0.0692804
step 154, training accuracy 0.0679706
step 155, training accuracy 0.0739352
step 156, training accuracy 0.0688627
step 157, training accuracy 0.0711647
step 158, training accuracy 0.0742466
step 159, training accuracy 0.0691294
step 160, training accuracy 0.068555
step 161, training accuracy 0.073322
step 162, training accuracy 0.0708902
step 163, training accuracy 0.0678416
step 164, training accuracy 0.0741889
step 165, training accuracy 0.0711925
step 166, training accuracy 0.0799763
step 167, training accuracy 0.0708443
step 168, training accuracy 0.0765503
step 169, training accuracy 0.0705314
step 170, training accuracy 0.0682046
step 171, training accuracy 0.0725229
step 172, training accuracy 0.0707186
step 173, training accuracy 0.0675533
step 174, training accuracy 0.0736521
step 175, training accuracy 0.0665194
step 176, training accuracy 0.0697414
step 177, training accuracy 0.066671
step 178, training accuracy 0.0735521
step 179, training accuracy 0.0657187
step 180, training accuracy 0.0718168
step 181, training accuracy 0.068294
step 182, training accuracy 0.0683485
step 183, training accuracy 0.0674215
step 184, training accuracy 0.0676094
step 185, training accuracy 0.0658499
step 186, training accuracy 0.0657423
step 187, training accuracy 0.0658616
step 188, training accuracy 0.0649976
step 189, training accuracy 0.0657173
step 190, training accuracy 0.0652788
step 191, training accuracy 0.0669434
step 192, training accuracy 0.0644065
step 193, training accuracy 0.0657676
step 194, training accuracy 0.0653909
step 195, training accuracy 0.0643019
step 196, training accuracy 0.0642981
step 197, training accuracy 0.0649089
step 198, training accuracy 0.0703552
step 199, training accuracy 0.066712
step 200, training accuracy 0.0643294
step 201, training accuracy 0.0643761
step 202, training accuracy 0.0646207
step 203, training accuracy 0.0648839
step 204, training accuracy 0.0642055
step 205, training accuracy 0.0657803
step 206, training accuracy 0.0661377
step 207, training accuracy 0.0651914
step 208, training accuracy 0.0645187
step 209, training accuracy 0.067739
step 210, training accuracy 0.0632558
step 211, training accuracy 0.0641838
step 212, training accuracy 0.0648542
step 213, training accuracy 0.0644899
step 214, training accuracy 0.0718841
step 215, training accuracy 0.0657796
step 216, training accuracy 0.0664429
step 217, training accuracy 0.0654465
step 218, training accuracy 0.0679885
step 219, training accuracy 0.0694216
step 220, training accuracy 0.0664744
step 221, training accuracy 0.0701197
step 222, training accuracy 0.0642609
step 223, training accuracy 0.0665707
step 224, training accuracy 0.0686076
step 225, training accuracy 0.0672474
step 226, training accuracy 0.0676163
step 227, training accuracy 0.0685339
step 228, training accuracy 0.0653447
step 229, training accuracy 0.0655897
step 230, training accuracy 0.0685166
step 231, training accuracy 0.0669883
step 232, training accuracy 0.0652927
step 233, training accuracy 0.0648286
step 234, training accuracy 0.0641617
step 235, training accuracy 0.0632887
step 236, training accuracy 0.0641143
step 237, training accuracy 0.0642743
step 238, training accuracy 0.0633061
step 239, training accuracy 0.0633476
step 240, training accuracy 0.06447
step 241, training accuracy 0.0647891
step 242, training accuracy 0.0624176
step 243, training accuracy 0.0639883
step 244, training accuracy 0.0634851
step 245, training accuracy 0.0624455
step 246, training accuracy 0.063437
step 247, training accuracy 0.0648795
step 248, training accuracy 0.0631026
step 249, training accuracy 0.0634995
step 250, training accuracy 0.0632337
step 251, training accuracy 0.0634162
step 252, training accuracy 0.0642054
step 253, training accuracy 0.0641284
step 254, training accuracy 0.0622733
step 255, training accuracy 0.0655742
step 256, training accuracy 0.0659106
step 257, training accuracy 0.0619379
step 258, training accuracy 0.0634962
step 259, training accuracy 0.0628377
step 260, training accuracy 0.0679448
step 261, training accuracy 0.0660201
step 262, training accuracy 0.0638856
step 263, training accuracy 0.0696351
step 264, training accuracy 0.0639335
step 265, training accuracy 0.0658786
step 266, training accuracy 0.0657253
step 267, training accuracy 0.0710165
step 268, training accuracy 0.0655009
step 269, training accuracy 0.0716183
step 270, training accuracy 0.0650877
step 271, training accuracy 0.0748029
step 272, training accuracy 0.0633394
step 273, training accuracy 0.0685343
step 274, training accuracy 0.0646438
step 275, training accuracy 0.0652397
step 276, training accuracy 0.0656409
step 277, training accuracy 0.0636902
step 278, training accuracy 0.0656217
step 279, training accuracy 0.0628165
step 280, training accuracy 0.0692396
step 281, training accuracy 0.0631034
step 282, training accuracy 0.0653098
step 283, training accuracy 0.0648411
step 284, training accuracy 0.0647143
step 285, training accuracy 0.0644262
step 286, training accuracy 0.0634993
step 287, training accuracy 0.0626579
step 288, training accuracy 0.0636483
step 289, training accuracy 0.063671
step 290, training accuracy 0.061991
step 291, training accuracy 0.0670232
step 292, training accuracy 0.0610802
step 293, training accuracy 0.0698989
step 294, training accuracy 0.0621917
step 295, training accuracy 0.0625038
step 296, training accuracy 0.0640684
step 297, training accuracy 0.0634678
step 298, training accuracy 0.0634858
step 299, training accuracy 0.0667482
step 300, training accuracy 0.0635248
step 301, training accuracy 0.0648626
step 302, training accuracy 0.0684547
step 303, training accuracy 0.0631119
step 304, training accuracy 0.0642554
step 305, training accuracy 0.0634145
step 306, training accuracy 0.0625336
step 307, training accuracy 0.0657228
step 308, training accuracy 0.0642546
step 309, training accuracy 0.0624323
step 310, training accuracy 0.0674616
step 311, training accuracy 0.0631353
step 312, training accuracy 0.0672767
step 313, training accuracy 0.0647295
step 314, training accuracy 0.0665163
step 315, training accuracy 0.0631415
step 316, training accuracy 0.0647025
step 317, training accuracy 0.062213
step 318, training accuracy 0.0664465
step 319, training accuracy 0.062561
step 320, training accuracy 0.0622329
step 321, training accuracy 0.0614542
step 322, training accuracy 0.0654002
step 323, training accuracy 0.0628535
step 324, training accuracy 0.0650493
step 325, training accuracy 0.0637813
step 326, training accuracy 0.0650735
step 327, training accuracy 0.0644758
step 328, training accuracy 0.0657639
step 329, training accuracy 0.0641689
step 330, training accuracy 0.0665744
step 331, training accuracy 0.0631556
step 332, training accuracy 0.0653843
step 333, training accuracy 0.0618651
step 334, training accuracy 0.0658745
step 335, training accuracy 0.0650241
step 336, training accuracy 0.0629515
step 337, training accuracy 0.0629206
step 338, training accuracy 0.0627309
step 339, training accuracy 0.0656316
step 340, training accuracy 0.0615657
step 341, training accuracy 0.0643416
step 342, training accuracy 0.0623382
step 343, training accuracy 0.0633843
step 344, training accuracy 0.0619419
step 345, training accuracy 0.0622726
step 346, training accuracy 0.062363
step 347, training accuracy 0.0699203
step 348, training accuracy 0.0626355
step 349, training accuracy 0.0655895
step 350, training accuracy 0.061984
step 351, training accuracy 0.0648269
step 352, training accuracy 0.0660922
step 353, training accuracy 0.0636476
step 354, training accuracy 0.0617566
step 355, training accuracy 0.0626823
step 356, training accuracy 0.0646625
step 357, training accuracy 0.0615135
step 358, training accuracy 0.0640899
step 359, training accuracy 0.0617439
step 360, training accuracy 0.0615173
step 361, training accuracy 0.0628804
step 362, training accuracy 0.0640113
step 363, training accuracy 0.0611032
step 364, training accuracy 0.0658771
step 365, training accuracy 0.0656595
step 366, training accuracy 0.0659374
step 367, training accuracy 0.0597978
step 368, training accuracy 0.0657575
step 369, training accuracy 0.0643362
step 370, training accuracy 0.0652797
step 371, training accuracy 0.0672887
step 372, training accuracy 0.0635443
step 373, training accuracy 0.0654363
step 374, training accuracy 0.0661297
step 375, training accuracy 0.0649796
step 376, training accuracy 0.0644627
step 377, training accuracy 0.0671783
step 378, training accuracy 0.0641325
step 379, training accuracy 0.0614691
step 380, training accuracy 0.0633238
step 381, training accuracy 0.0630781
step 382, training accuracy 0.0631719
step 383, training accuracy 0.0630476
step 384, training accuracy 0.0649125
step 385, training accuracy 0.0614628
step 386, training accuracy 0.0611367
step 387, training accuracy 0.0623522
step 388, training accuracy 0.0618451
step 389, training accuracy 0.0616538
step 390, training accuracy 0.0605467
step 391, training accuracy 0.0633329
step 392, training accuracy 0.0626089
step 393, training accuracy 0.062164
step 394, training accuracy 0.067993
step 395, training accuracy 0.0639542
step 396, training accuracy 0.0638961
step 397, training accuracy 0.0612778
step 398, training accuracy 0.0652787
step 399, training accuracy 0.0614136
step 400, training accuracy 0.0678967
step 401, training accuracy 0.0617833
step 402, training accuracy 0.0638176
step 403, training accuracy 0.0612688
step 404, training accuracy 0.0646709
step 405, training accuracy 0.0621302
step 406, training accuracy 0.0672273
step 407, training accuracy 0.0620106
step 408, training accuracy 0.0639142
step 409, training accuracy 0.0620801
step 410, training accuracy 0.0621559
step 411, training accuracy 0.0615496
step 412, training accuracy 0.0611371
step 413, training accuracy 0.0620058
step 414, training accuracy 0.0601789
step 415, training accuracy 0.0612968
step 416, training accuracy 0.0600937
step 417, training accuracy 0.0611232
step 418, training accuracy 0.0606679
step 419, training accuracy 0.0607554
step 420, training accuracy 0.059194
step 421, training accuracy 0.0611077
step 422, training accuracy 0.0618375
step 423, training accuracy 0.0604977
step 424, training accuracy 0.0631004
step 425, training accuracy 0.0614763
step 426, training accuracy 0.0663636
step 427, training accuracy 0.065671
step 428, training accuracy 0.064199
step 429, training accuracy 0.0647803
step 430, training accuracy 0.061812
step 431, training accuracy 0.0630054
step 432, training accuracy 0.061404
step 433, training accuracy 0.0592641
step 434, training accuracy 0.0615211
step 435, training accuracy 0.0599422
step 436, training accuracy 0.0636926
step 437, training accuracy 0.0601464
step 438, training accuracy 0.06095
step 439, training accuracy 0.0599553
step 440, training accuracy 0.0600901
step 441, training accuracy 0.0611221
step 442, training accuracy 0.0632975
step 443, training accuracy 0.0604251
step 444, training accuracy 0.0637019
step 445, training accuracy 0.0611787
step 446, training accuracy 0.0626112
step 447, training accuracy 0.0661183
step 448, training accuracy 0.0611817
step 449, training accuracy 0.0660146
step 450, training accuracy 0.0596282
step 451, training accuracy 0.0610811
step 452, training accuracy 0.063094
step 453, training accuracy 0.0598964
step 454, training accuracy 0.0626493
step 455, training accuracy 0.0615528
step 456, training accuracy 0.0606991
step 457, training accuracy 0.0617111
step 458, training accuracy 0.0650235
step 459, training accuracy 0.0615658
step 460, training accuracy 0.0616955
step 461, training accuracy 0.061982
step 462, training accuracy 0.0599039
step 463, training accuracy 0.0647706
step 464, training accuracy 0.0604166
step 465, training accuracy 0.0601014
step 466, training accuracy 0.0616022
step 467, training accuracy 0.0618049
step 468, training accuracy 0.058992
step 469, training accuracy 0.0596996
step 470, training accuracy 0.0604491
step 471, training accuracy 0.0596226
step 472, training accuracy 0.0594008
step 473, training accuracy 0.0592784
step 474, training accuracy 0.0602197
step 475, training accuracy 0.0591395
step 476, training accuracy 0.060293
step 477, training accuracy 0.0601337
step 478, training accuracy 0.0584549
step 479, training accuracy 0.0591456
step 480, training accuracy 0.0591801
step 481, training accuracy 0.0600886
step 482, training accuracy 0.0584339
step 483, training accuracy 0.0586043
step 484, training accuracy 0.0583794
step 485, training accuracy 0.0599584
step 486, training accuracy 0.057238
step 487, training accuracy 0.0603744
step 488, training accuracy 0.0588798
step 489, training accuracy 0.0589182
step 490, training accuracy 0.0592779
step 491, training accuracy 0.0605474
step 492, training accuracy 0.0593422
step 493, training accuracy 0.0601422
step 494, training accuracy 0.0608739
step 495, training accuracy 0.0592518
step 496, training accuracy 0.0588606
step 497, training accuracy 0.0599164
step 498, training accuracy 0.0664997
step 499, training accuracy 0.0657896

In [ ]:


In [14]:
#print up.shape
#print labels_get.shape

In [18]:
#check project image in each layer

#v = sess.run(vgg_fcn.conv5_1, feed_dict=feed_dict) 
v = sess.run(vgg_fcn.pool2, feed_dict=feed_dict)

In [19]:
print type(v)
print v.shape
v_temp = v[0,:,:,10]
print v_temp.shape


<type 'numpy.ndarray'>
(1, 84, 125, 128)
(84, 125)

In [20]:
from PIL import Image
%matplotlib inline
#%matplotlib qt

from skimage import io, img_as_ubyte
io.imshow(v_temp)


/home/irashadow/env_TensorFlow/lib/python2.7/site-packages/skimage/io/_plugins/matplotlib_plugin.py:77: UserWarning: Float image out of standard range; displaying image with stretched contrast.
  warn("Float image out of standard range; displaying "
Out[20]:
<matplotlib.image.AxesImage at 0xbcb3c50>

In [21]:
#check project image in each layer

w = sess.run(vgg_fcn.score_fr, feed_dict=feed_dict) 
print type(w)
print w.shape
w_temp = w[0,:,:,1]
print w_temp.shape
io.imshow(w_temp)


<type 'numpy.ndarray'>
(1, 11, 16, 2)
(11, 16)
Out[21]:
<matplotlib.image.AxesImage at 0xbab74d0>

In [22]:
#check project image in each layer
w = sess.run(vgg_fcn.upscore2, feed_dict=feed_dict) 
print type(w)
print w.shape
w_temp = w[0,:,:,0]
print w_temp.shape
io.imshow(w_temp)


<type 'numpy.ndarray'>
(1, 21, 32, 2)
(21, 32)
Out[22]:
<matplotlib.image.AxesImage at 0xc95fd90>

In [23]:
#check project image in each layer
w = sess.run(vgg_fcn.upscore4, feed_dict=feed_dict) 
print type(w)
print w.shape
w_temp = w[0,:,:,0]
print w_temp.shape
io.imshow(w_temp)


<type 'numpy.ndarray'>
(1, 42, 63, 2)
(42, 63)
Out[23]:
<matplotlib.image.AxesImage at 0xcd70b90>

In [35]:
#check project image in each layer
w = sess.run(vgg_fcn.upscore32, feed_dict=feed_dict) 
print type(w)
print w.shape
w_temp = w[0,:,:,0]
print w_temp.shape
io.imshow(w_temp)
 
w_norm = ((w_temp-w_temp.min())/(w_temp.max()-w_temp.min()))    

img = Image.fromarray(img_as_ubyte(w_norm)).convert('RGB')
img.save('./pred_up_8x_1000_loops.png', "PNG", quality=80)


<type 'numpy.ndarray'>
(1, 333, 500, 2)
(333, 500)

In [25]:
#check project image in each layer
w = sess.run(vgg_fcn.pred_up, feed_dict=feed_dict) 
print type(w)
print w.shape
w_temp = w[0,:,:]
print w_temp.shape
io.imshow(w_temp)

img = Image.fromarray(img_as_ubyte(255-w_temp*255)).convert('RGB')
img.save('./pred_up_8x_1000_loops.png', "PNG", quality=80)


<type 'numpy.ndarray'>
(1, 333, 500)
(333, 500)

In [27]:
logits_reshape = np.reshape(w, (-1, 2))


print logits_reshape.shape

sub_data = logits_reshape[10,:]
print sub_data


(166500, 2)
[-2.65825844  2.78694844]

In [58]:
print np.exp(sub_data)

temp_sum = np.sum(np.exp(sub_data))

print np.exp(sub_data)/temp_sum


[ inf  inf]
[ nan  nan]
/home/irashadow/env_TensorFlow/lib/python2.7/site-packages/ipykernel/__main__.py:1: RuntimeWarning: overflow encountered in exp
  if __name__ == '__main__':
/home/irashadow/env_TensorFlow/lib/python2.7/site-packages/ipykernel/__main__.py:3: RuntimeWarning: overflow encountered in exp
  app.launch_new_instance()
/home/irashadow/env_TensorFlow/lib/python2.7/site-packages/ipykernel/__main__.py:5: RuntimeWarning: overflow encountered in exp
/home/irashadow/env_TensorFlow/lib/python2.7/site-packages/ipykernel/__main__.py:5: RuntimeWarning: invalid value encountered in divide

In [ ]:


In [11]:
arr=np.array([[[ 6,  9,  4],
        [ 5,  2,  1],
        [10, 15, 30]],

       [[ 9,  0,  1],
        [ 4,  6,  4],
        [ 8,  3,  9]],

       [[ 6,  7,  4],
        [ 0,  1,  6],
        [ 4,  0,  1]]])

In [12]:
print arr


[[[ 6  9  4]
  [ 5  2  1]
  [10 15 30]]

 [[ 9  0  1]
  [ 4  6  4]
  [ 8  3  9]]

 [[ 6  7  4]
  [ 0  1  6]
  [ 4  0  1]]]

In [21]:
coord = np.where((arr[:,:,0]==10) & (arr[:,:,1]==15) & (arr[:,:,2]==30))

In [22]:
print coord


(array([0]), array([2]))

In [ ]: