In [1]:
%matplotlib inline

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import argparse
import tensorflow as tf
import numpy as np
import time
from resnet import generate_lenet_graph

In [2]:
# Read from the trained model
train_graph = tf.Graph()
with train_graph.as_default():
    with tf.Session() as session:
        saver = tf.train.import_meta_graph('/Users/taiyuanz/tensorflow/image_net/mnist_model/model.ckpt-19999.meta')
        saver.restore(session, '/Users/taiyuanz/tensorflow/image_net/mnist_model/model.ckpt-19999')
        graph = session.graph
        name_to_variables = dict([(v.name, session.run(v)) for v in graph.get_collection('variables')])
        
        print(name_to_variables["conv1/W:0"])


[[[[ 0.05113909  0.04381812  0.02659773 -0.01993001  0.04619731 -0.00452234
     0.03183864 -0.00510896  0.06517674 -0.03519627  0.11048368  0.02240274
     0.04311677  0.03847463  0.02527412  0.01453614  0.00101281  0.05337216
     0.03393031  0.06977642  0.02160246 -0.1578251  -0.08632866  0.09886949
     0.08256903  0.08528065  0.00029364 -0.00527354  0.0232158   0.10365336
     0.01000161  0.09755064]]

  [[ 0.00959306  0.10670724  0.03894611  0.01175356  0.11329081  0.02166279
     0.06193409  0.02069947  0.11469997 -0.06113772  0.10202844  0.04823137
     0.07901468  0.04659472  0.08734411  0.06789946  0.04330951  0.01960913
     0.00328808  0.07450893  0.10065265 -0.21477632 -0.08345801  0.06573732
     0.08823162  0.09863298 -0.07746028  0.02518265  0.06701902  0.13868143
     0.009212    0.09760997]]

  [[-0.09993755  0.09136302  0.05836122  0.01631213  0.13324833  0.04169635
     0.07370796  0.03865109  0.11506147 -0.10936466  0.02597795  0.08114966
     0.04497865  0.04797432  0.12063295  0.09294423  0.04685177 -0.02363501
    -0.08022191  0.08433712  0.06525255 -0.18626404 -0.04129201  0.03134264
     0.11670529  0.11832204 -0.08663164  0.07463238  0.07007913  0.1022667
     0.0338589   0.12735097]]

  [[-0.14178178  0.0813004   0.0372788   0.03255293  0.11743278  0.05750734
     0.0589317   0.06680354  0.10660634 -0.07449882 -0.02185871  0.04866016
    -0.01874766  0.02594161  0.11311416  0.09116504  0.07582804 -0.10903096
    -0.13075703  0.08384157 -0.02772526 -0.10656078 -0.03890527 -0.05817319
     0.10311893  0.10483354  0.01581383  0.06282602  0.0526001   0.02762554
     0.04248661  0.1303947 ]]

  [[-0.03652006  0.03240858 -0.01197309 -0.03329691  0.10338894  0.00389432
     0.04606012  0.02532574  0.08913614  0.04087149 -0.06323549 -0.00351714
    -0.08179336 -0.01339449  0.08480646  0.03594404  0.04704218 -0.07114297
    -0.05443973  0.02803491 -0.15044273 -0.03562957 -0.07713007 -0.11899105
     0.03982622  0.05074707  0.12635261  0.0399579   0.01556756 -0.03771858
     0.00291886  0.10483021]]]


 [[[-0.01710313  0.07958714  0.05955618  0.0488067  -0.04600646  0.03218615
     0.03733667  0.02636613 -0.02005654 -0.09213037  0.10618817  0.03807062
    -0.03123844  0.05017262 -0.08421864  0.08312791  0.06721527 -0.03239175
    -0.05656879  0.00566369  0.08955225 -0.07872897 -0.00624498  0.06609012
    -0.00604646 -0.00066241 -0.04496131  0.01061226  0.05714187  0.11269733
     0.03983878  0.12720628]]

  [[-0.15019079  0.16675289  0.12041831  0.0885883   0.05288496  0.07876354
     0.12557828  0.06738934  0.03610377 -0.15439036  0.06632528  0.10195617
    -0.10372815  0.10952308 -0.02647684  0.16229679  0.12013195 -0.14931542
    -0.17312542  0.01837019  0.14723133 -0.11656623  0.01194822  0.04574239
     0.04722597  0.03396503 -0.13120739  0.07742392  0.15122634  0.13679898
     0.09871493  0.19382519]]

  [[-0.24366251  0.16258667  0.13061768  0.08709051  0.07747865  0.10927807
     0.14997098  0.10099871  0.05316127 -0.18502349 -0.01455842  0.13037333
    -0.21499974  0.13242629  0.01415528  0.18923542  0.15439889 -0.25063917
    -0.22766627  0.0393288   0.08303978 -0.06808363  0.06039512 -0.05402847
     0.02380821  0.00906209 -0.11269847  0.12366898  0.15149356  0.06118219
     0.1310914   0.18163671]]

  [[-0.08037397  0.1196516   0.05687911  0.07599441  0.05945102  0.0732838
     0.09269362  0.11730124  0.02396445 -0.06264293 -0.12203978  0.06885299
    -0.16191366  0.08154323  0.03433087  0.10322715  0.14412782 -0.13138673
    -0.07924806 -0.03213923 -0.06135068  0.02178899  0.03326996 -0.18900827
    -0.05363337 -0.044035    0.03260718  0.10018031  0.13792667 -0.03635912
     0.0768705   0.17753434]]

  [[ 0.04198272  0.03028418 -0.00983605  0.00674749  0.04472021  0.03089384
     0.03442661  0.03629583 -0.03219788  0.08650615 -0.04862987  0.01487449
    -0.07530534  0.05048482 -0.01800506  0.04237647  0.06763196 -0.01378168
     0.04023741 -0.1233722  -0.15875651  0.04255904 -0.04709721 -0.03099484
    -0.10367202 -0.08374816  0.12609006  0.04304848  0.06967607 -0.09540677
     0.03109578  0.1536269 ]]]


 [[[-0.08328676  0.12096848  0.07174978  0.10696555 -0.13804053  0.08162573
     0.11788385  0.03999025 -0.08335242 -0.08767219  0.11044945  0.08581778
    -0.10386361  0.09644283 -0.11774757  0.12877196  0.07858036 -0.10498963
    -0.07674682 -0.0546945   0.13319631  0.04805009  0.1121502   0.07189421
    -0.07319061 -0.08453105 -0.07681152  0.0590859   0.09665818  0.13453361
     0.10849925  0.10364635]]

  [[-0.1868979   0.21241291  0.15501802  0.19154333 -0.12476987  0.2003113
     0.20963037  0.10654492 -0.13452262 -0.09322782  0.05019849  0.1559386
    -0.18989004  0.15290706 -0.16687565  0.22220781  0.16875583 -0.20616628
    -0.15738663 -0.08303848  0.21172626  0.08134841  0.15496278  0.02928131
    -0.16336524 -0.15628457 -0.1521797   0.16898575  0.18126661  0.13084021
     0.18806149  0.14011523]]

  [[-0.09428205  0.16955595  0.20944287  0.23067383 -0.13954654  0.22618267
     0.19845597  0.18488438 -0.15773208 -0.08505955 -0.09486999  0.17204659
    -0.13234413  0.219781   -0.18611927  0.20594421  0.21460827 -0.10178959
    -0.0471821  -0.1987111   0.16158344  0.11414641  0.18439743 -0.15687652
    -0.24997689 -0.20711736 -0.08414268  0.18843934  0.19227239  0.0275877
     0.19655399  0.12736171]]

  [[ 0.03308029  0.08998708  0.15510184  0.14199111 -0.11147299  0.14560993
     0.0850753   0.15348354 -0.14163648  0.05452155 -0.17345646  0.12969591
    -0.04736472  0.14028278 -0.16642958  0.09201004  0.1650836   0.02039606
     0.04515664 -0.21454613  0.04418119  0.14245775  0.11489668 -0.13209666
    -0.1689274  -0.13785794  0.05025083  0.1214479   0.14449374 -0.12189577
     0.12915538  0.12571667]]

  [[ 0.11802106  0.00070685  0.04003727  0.08235142 -0.05854317  0.0619709
     0.01661513  0.06461407 -0.07121623  0.14398037  0.01808721  0.04408725
     0.05720664  0.07263733 -0.08451167  0.02344374  0.08132654  0.09986116
     0.11971391 -0.0829624  -0.04376516  0.11816257  0.03315683  0.0224458
    -0.04553922 -0.03656298  0.11710034  0.02794127  0.09182785 -0.13542715
     0.04411699  0.12508626]]]


 [[[-0.09349151  0.05597452  0.01483429  0.06205218 -0.04531873  0.06097434
     0.0666831  -0.01826682 -0.06424385 -0.00520072  0.09303388  0.05105842
    -0.05355279  0.05087082  0.00552758  0.07039105 -0.0052019  -0.10277692
    -0.05385529 -0.10265768  0.10694804  0.16109054  0.1564863   0.04420175
    -0.08073913 -0.06121805 -0.10359666  0.06041512  0.0497053   0.11314783
     0.04098779  0.01957988]]

  [[-0.10637543  0.14088991  0.12519257  0.12905125 -0.12837559  0.13296278
     0.14342754  0.07400575 -0.08489311 -0.00208102 -0.00119272  0.11128857
    -0.04526787  0.12140248 -0.02326046  0.14386527  0.05820928 -0.08328348
    -0.02082791 -0.15267801  0.1715439   0.19066881  0.22316188 -0.04152254
    -0.09157189 -0.08137931 -0.1066667   0.13635333  0.11010454  0.0928773
     0.10034386 -0.01614185]]

  [[ 0.00969006  0.07463883  0.14043722  0.14943233 -0.15087387  0.15640341
     0.08323708  0.11734014 -0.11126391  0.04280886 -0.17426807  0.120137
     0.04012721  0.1365625  -0.07334058  0.09846881  0.12221248  0.05088146
     0.07139176 -0.11267716  0.16204606  0.19576295  0.25217772 -0.22570142
    -0.04623355 -0.04890928 -0.02334037  0.14192222  0.08340202 -0.04247379
     0.13093287 -0.05989453]]

  [[ 0.07609256 -0.00181665  0.08531109  0.11700162 -0.0914882   0.0894709
     0.03450516  0.11287219 -0.04670208  0.11993562 -0.11095499  0.0702809
     0.07967722  0.08056248 -0.04554453  0.01096726  0.0769621   0.10445537
     0.11172354 -0.0080039   0.10189773  0.18089518  0.15070505 -0.04032745
     0.01807667  0.01131496  0.07512148  0.07138454  0.04417993 -0.17463021
     0.09239649 -0.05621932]]

  [[ 0.11991919 -0.08181913  0.07066667  0.07104202 -0.02440718  0.03831697
    -0.01521501  0.05040823  0.00099337  0.11744429  0.01960485  0.01126699
     0.10188018  0.06525297  0.00489499 -0.01324209  0.05697191  0.10267066
     0.10853492  0.05426575  0.04476413  0.11129408  0.07868291  0.04533694
     0.05189861  0.03076287  0.10352286  0.01628271  0.03972346 -0.13064116
     0.07948217  0.00602219]]]


 [[[-0.08249152  0.0279677  -0.00730905 -0.03614821  0.06569222 -0.0152329
     0.03217102 -0.05330585  0.0505589   0.05928624  0.08596385  0.00697596
    -0.00460398 -0.01738412  0.07997529  0.00944456 -0.0663009  -0.04679645
     0.01731181 -0.04580332  0.0359054   0.1416707   0.09605783  0.03039224
    -0.00168075  0.01994341 -0.09889966  0.01209727 -0.02996122  0.11587568
    -0.03265477 -0.11011814]]

  [[-0.029965    0.06776559  0.05623555  0.06118622  0.04585179  0.03647719
     0.07797436  0.01955868  0.04745406  0.07820298 -0.03789266  0.0727462
     0.0311669   0.01359381  0.07203683  0.02660164 -0.02197007 -0.00439088
     0.03649002 -0.03587619  0.07856335  0.16880679  0.14906681 -0.08281726
     0.02725069  0.05614712 -0.08579551  0.07057703  0.03414821  0.08338176
     0.0156846  -0.16459364]]

  [[ 0.06000303  0.02834164  0.06348059  0.05546321  0.03310162  0.06840307
     0.03283272  0.0601585   0.06064572  0.1180165  -0.11839718  0.07154509
     0.09567112  0.06711207  0.06662396  0.0085061   0.03371228  0.07908089
     0.10335907  0.02971686  0.07965861  0.13716234  0.15962559 -0.11946263
     0.08609921  0.09513801  0.00322406  0.08817002 -0.00124968 -0.05720126
     0.07879968 -0.18469995]]

  [[ 0.11822894 -0.07228475  0.02562424  0.05312922  0.03240464  0.06284147
    -0.03347768  0.07566753  0.05063029  0.15115249 -0.05049251  0.03452057
     0.11452428  0.03519977  0.05843813 -0.05303147  0.04680727  0.10526981
     0.12106275  0.06057413  0.06621354  0.13108227  0.12540993  0.00667213
     0.08056089  0.06178404  0.07243074  0.02542301 -0.05812596 -0.11345253
     0.04772005 -0.17494635]]

  [[ 0.09555864 -0.13761924 -0.01316747  0.03637191  0.02964703 -0.01390302
    -0.05105749  0.04882943  0.05655444  0.11843193  0.05525827  0.01939804
     0.09438848  0.00177755  0.0489265  -0.06600326  0.01641329  0.08021357
     0.08008882  0.08338504  0.04773462  0.07950671  0.09552156  0.05505574
     0.08134712  0.07225169  0.08812957 -0.02681454 -0.05139002 -0.09506686
     0.03712811 -0.1178064 ]]]]

In [7]:
labels = range(10)
num_samples = 20
num_train_iter = 1000
label_to_samples = {}
alpha = 1e-4

test_graph = tf.Graph()
with test_graph.as_default():
    with tf.Session() as session:
        test_session = session
        X = tf.Variable(tf.truncated_normal([num_samples, 28, 28, 1], stddev=0.01), name='X')
        Y_pred_activation = generate_lenet_graph(X)
        Xsquare = tf.square(X)
        penalty = tf.reduce_sum(tf.sqrt(tf.reduce_sum(Xsquare, (1,2))), 0)
        Y_placeholder = tf.placeholder(tf.int32, shape=[None, 10], name='Y')
        loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(Y_pred_activation, Y_placeholder))
        # + alpha * penalty
        grad_op = tf.train.AdamOptimizer(1e-3).minimize(loss, var_list=[X])
        session.run(tf.global_variables_initializer())
        
        for v in test_graph.get_collection('variables'):
            if v.name not in name_to_variables:
                print('Variable not found %s' % v.name)
                continue

            value = name_to_variables[v.name]
            print(v.name, v)
            session.run(v.assign(value))
        
        train_op = test_graph.get_operation_by_name('Adam')
        # X = test_graph.get_tensor_by_name('X:0')
        Y_pred_activation = test_graph.get_tensor_by_name('pred/add:0')
        W = test_graph.get_tensor_by_name('conv1/W:0')
        # print(session.run(W))

        for label in labels:
            print ("Running label %d" % label)
            label_to_samples[label] = []
            
            Y_target = np.zeros((num_samples, 10), dtype=int)
            Y_target[:, label] = 1

            # Initialize X to a random number
            session.run(X.assign(np.asarray(np.random.randn(num_samples, 28 * 28)).reshape((num_samples, 28, 28, 1))))
            for i in range(num_train_iter):
                session.run(train_op, feed_dict={Y_placeholder: Y_target})

                if i % 200 == 0:
                    y = session.run(Y_pred_activation)
                    print("Index: ", i, y.argmax(axis=1), y[:, label])

            label_to_samples[label].append(session.run(X))


Variable not found X:0
conv1/W:0 Tensor("conv1/W/read:0", shape=(5, 5, 1, 32), dtype=float32, device=/device:CPU:0)
conv1/b:0 Tensor("conv1/b/read:0", shape=(32,), dtype=float32, device=/device:CPU:0)
Variable not found conv2/W:0
Variable not found conv2/b:0
fc/W:0 Tensor("fc/W/read:0", shape=(3136, 1024), dtype=float32, device=/device:CPU:0)
fc/W_1:0 Tensor("fc/W_1/read:0", shape=(1024,), dtype=float32, device=/device:CPU:0)
pred/W:0 Tensor("pred/W/read:0", shape=(1024, 10), dtype=float32, device=/device:CPU:0)
pred/b:0 Tensor("pred/b/read:0", shape=(10,), dtype=float32, device=/device:CPU:0)
beta1_power:0 Tensor("beta1_power/read:0", shape=(), dtype=float32)
beta2_power:0 Tensor("beta2_power/read:0", shape=(), dtype=float32)
Variable not found X/Adam:0
Variable not found X/Adam_1:0
Running label 0
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-2.61008048 -2.81456423 -2.67557311 -2.45765305 -2.61726284 -2.77917504
 -2.50981808 -2.54539847 -2.34550285 -2.47132254 -2.220505   -2.54257083
 -2.75052476 -2.48599672 -2.6902802  -2.46926951 -2.89745998 -2.52746582
 -2.6527946  -2.80207038]
Index:  200 [5 2 5 5 5 5 2 2 2 5 5 5 5 5 5 5 5 5 5 5] [-0.51605356 -0.83309627 -0.67783403 -0.62537181 -0.59387988 -0.66792214
 -0.54723072 -0.62808985 -0.31694487 -0.64264894 -0.50559592 -0.47913131
 -0.81993294 -0.60487449 -0.80194908 -0.51879203 -0.6594739  -0.66652966
 -0.62488306 -0.8568176 ]
Index:  400 [2 2 5 5 5 5 5 2 2 5 5 5 2 5 5 5 5 2 5 5] [-0.03452366 -0.263298   -0.30714849 -0.15955478 -0.13760051 -0.1947715
 -0.05313798 -0.15583602  0.14266932 -0.20611832 -0.00690177 -0.05381273
 -0.38454667 -0.17438689 -0.33209518 -0.12281913 -0.24465907 -0.2274268
 -0.15226793 -0.39912519]
Index:  600 [0 2 5 5 5 2 5 2 0 5 0 5 2 5 5 5 2 2 0 5] [ 0.28643918  0.03703509 -0.0066537   0.15460649  0.19714287  0.10092696
  0.25357467  0.13030621  0.43175456  0.09880777  0.30508474  0.20548895
 -0.08076338  0.07311126 -0.08126279  0.11782731 -0.01795787  0.04475417
  0.15407595 -0.09651086]
Index:  800 [0 0 5 0 0 0 0 0 0 0 0 0 0 0 5 0 0 0 0 0] [ 0.52365124  0.26047316  0.20401728  0.3534252   0.42225575  0.36185509
  0.48300946  0.34589925  0.6631276   0.35229003  0.52611065  0.40977541
  0.14974019  0.28223312  0.09673987  0.3067801   0.16252443  0.23086378
  0.37396598  0.14084356]
Running label 1
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-1.05869031 -1.25837743 -1.31398201 -0.82845747 -1.09573615 -0.52041245
 -0.91189516 -1.0271666  -0.5576638  -1.08729613 -1.20357084 -1.03155303
 -0.58982897 -0.87344873 -0.69964123 -0.86760366 -0.96867549 -1.1778686
 -1.07108784 -1.31015182]
Index:  200 [1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1] [ 1.10612619  0.95269728  1.17144752  1.21428776  1.16755688  1.45673275
  0.96282709  1.02325928  1.28998899  0.97377241  1.14610958  1.09559059
  1.41275191  1.24417257  1.22505343  1.21318781  1.31782293  1.00828695
  0.93774307  0.8558743 ]
Index:  400 [1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1] [ 1.78295958  1.59714103  1.92067182  1.85136771  1.81204927  2.0475421
  1.60498142  1.7832315   1.9293952   1.57554483  1.8695333   1.73795867
  2.14110327  1.89245498  1.97104466  1.88636494  2.03553772  1.74359202
  1.63869309  1.60739827]
Index:  600 [1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1] [ 2.22819996  2.03561807  2.37117505  2.27129292  2.26439691  2.40252471
  2.02168751  2.20542121  2.35529613  1.96307564  2.32038665  2.16982198
  2.55696678  2.31818438  2.41559696  2.30427551  2.46720266  2.17276502
  2.1166141   2.03610396]
Index:  800 [1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1] [ 2.55819178  2.33167291  2.69165707  2.60651994  2.59974313  2.69318223
  2.33309412  2.52801394  2.69731307  2.28074479  2.6430819   2.50780058
  2.8595438   2.61640716  2.74586511  2.61018157  2.79080296  2.48927188
  2.49057555  2.36203194]
Running label 2
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-0.17953032  0.00510452 -0.09633357 -0.11905358 -0.18945295  0.00215998
 -0.07733098 -0.09506503  0.00218309 -0.04894567  0.19571796 -0.35215732
 -0.02590656  0.00477942 -0.27923399 -0.02712009 -0.10106459  0.05048241
 -0.07201167  0.18248489]
Index:  200 [2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2] [ 1.46817076  1.47675169  1.36997008  1.3720845   1.33574128  1.60772479
  1.49248397  1.5009681   1.58078444  1.61483014  1.63873446  1.43750691
  1.56100225  1.49042571  1.35994661  1.55667651  1.37215078  1.58741355
  1.42080033  1.55496013]
Index:  400 [2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2] [ 2.07741189  1.97352624  1.97551405  1.88851535  1.86894357  2.10653162
  2.0151341   2.03270674  2.09722281  2.15011334  2.12220883  1.99195921
  2.0694325   1.98006213  1.87507904  2.11681104  1.93450105  2.07889748
  1.92142618  2.09027696]
Index:  600 [2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2] [ 2.45388412  2.286098    2.3629427   2.22090912  2.17793298  2.42991233
  2.37438989  2.37385321  2.41938996  2.49540281  2.44074893  2.33611107
  2.38390994  2.31943965  2.20482802  2.43228698  2.28815389  2.41334677
  2.25813437  2.42343807]
Index:  800 [2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2] [ 2.72119999  2.54750013  2.63153601  2.48882055  2.41998672  2.66122055
  2.64526582  2.62740898  2.65715957  2.73660517  2.66800404  2.58751297
  2.61982822  2.5940094   2.45053101  2.67986536  2.54518652  2.66136694
  2.52294755  2.67165422]
Running label 3
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-1.33599961 -1.45468295 -1.55387926 -1.6925391  -1.40377319 -1.18523693
 -1.62213218 -1.38323665 -1.27620244 -1.5136658  -1.3084532  -1.17271554
 -1.60279691 -1.23509419 -1.29626846 -1.49163759 -1.16547239 -1.10470414
 -1.09487247 -1.07310545]
Index:  200 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [ 0.58768535  0.66629326  0.36247823  0.2429384   0.52633655  0.55413139
  0.29466853  0.5871973   0.60598624  0.50386214  0.58302748  0.62999225
  0.14454512  0.70003772  0.29318058  0.46401641  0.60382903  0.64260793
  0.64398968  0.66200626]
Index:  400 [3 3 3 3 3 3 3 3 3 3 3 3 5 3 3 3 3 3 3 3] [ 1.24464667  1.32889807  1.17865455  0.94409311  1.25030279  1.26123559
  0.99569929  1.26843357  1.29573333  1.19165349  1.22848296  1.28092837
  0.75271463  1.2669847   0.94036889  1.18467724  1.22864306  1.22927558
  1.25291634  1.27141869]
Index:  600 [3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3] [ 1.67669272  1.70377684  1.64589286  1.43019903  1.66342998  1.66699171
  1.45910692  1.67855656  1.7362498   1.58775008  1.63033283  1.67770255
  1.15733993  1.58485639  1.35614073  1.62136722  1.58744228  1.59604919
  1.67493296  1.64585221]
Index:  800 [3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3] [ 2.00559902  1.98363388  1.96133745  1.81454313  1.96655297  1.96811068
  1.76556623  1.96074831  2.01900244  1.87402916  1.96710813  1.95423377
  1.44379759  1.86560416  1.65663242  1.93944287  1.88661325  1.86315262
  1.99010861  1.92296839]
Running label 4
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-0.84491295 -1.00937545 -0.81464738 -1.0960021  -1.02104378 -0.90968281
 -0.78804845 -1.13607287 -0.67690355 -0.91027743 -0.84932113 -0.96160489
 -1.06731665 -0.88333023 -0.72112834 -0.86919814 -0.65840572 -0.82975423
 -0.83853322 -0.98497134]
Index:  200 [4 4 4 5 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4] [ 0.72814739  0.60481     0.76616997  0.53323156  0.82381362  0.48843765
  0.8474474   0.54588461  0.9262237   0.64667743  0.74729985  0.47659081
  0.37935507  0.68409514  0.77831906  0.74904853  0.95345229  0.77896976
  0.74644536  0.65333265]
Index:  400 [4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4] [ 1.30641389  1.11449373  1.31392086  1.20819235  1.47784328  1.08653605
  1.45200706  1.11995053  1.52430058  1.24974298  1.26313972  1.08622742
  1.01059401  1.32390904  1.30209804  1.38487399  1.53366697  1.38077688
  1.36873102  1.25305414]
Index:  600 [4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4] [ 1.6968565   1.46755195  1.66884756  1.59281301  1.86386657  1.49558485
  1.7994349   1.54989648  1.89227843  1.67551398  1.62972939  1.47673392
  1.41519046  1.775231    1.66448903  1.76159573  1.93249297  1.77111375
  1.73979771  1.6591655 ]
Index:  800 [4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4] [ 1.98792744  1.72078967  1.93984604  1.86945462  2.13542557  1.8007915
  2.05391431  1.87558484  2.15024972  2.02136397  1.90469635  1.77927494
  1.72169399  2.10009074  1.94268978  2.05769777  2.20784664  2.07596898
  1.99707162  1.93036103]
Running label 5
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [ 1.04622221  1.35461283  1.08525014  1.27326369  1.47178841  0.98098385
  1.28386045  1.19743848  1.18930769  0.9834283   0.82231456  1.51167274
  1.46186233  1.11551929  1.06130791  1.30062604  1.35385776  1.49986482
  1.35258269  1.00619411]
Index:  200 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [ 2.28873587  2.53470254  2.25836587  2.4109211   2.42984509  2.31094742
  2.34837961  2.37306929  2.36264086  2.19901347  2.24279165  2.57490945
  2.39242625  2.23491788  2.25784755  2.47053123  2.48165298  2.44728255
  2.40134001  2.26729369]
Index:  400 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [ 2.75337434  2.9461143   2.69272447  2.82472157  2.81918907  2.7502768
  2.75635362  2.78851581  2.78526402  2.6636219   2.72030115  2.97553802
  2.77310324  2.69152379  2.70129704  2.90997863  2.92193294  2.82453609
  2.81648302  2.70642996]
Index:  600 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [ 3.02763128  3.21703792  2.98185372  3.11009908  3.08639026  3.02253318
  3.02606034  3.05105758  3.0642488   2.93800282  3.00208712  3.22554302
  3.02767324  2.9722867   2.98341703  3.19680619  3.21084642  3.09162712
  3.09043336  2.98691297]
Index:  800 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [ 3.23189425  3.42221427  3.20347452  3.31864142  3.30924058  3.23031187
  3.24376774  3.25933433  3.29463816  3.15480113  3.22756362  3.42802763
  3.23529029  3.19160843  3.19256997  3.42181516  3.42819333  3.29842448
  3.30653358  3.19900537]
Running label 6
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-2.80112815 -2.35845995 -2.45840979 -2.48432755 -2.21819377 -2.53663325
 -2.43865466 -2.22361922 -2.14850616 -2.44254231 -2.75064492 -2.4889276
 -2.26263595 -2.39044762 -2.40663958 -2.38404322 -2.99308991 -2.17598748
 -2.09116411 -2.70540524]
Index:  200 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [ 0.33500484  0.35415271  0.69486439  0.2326178   0.45848611  0.41004696
  0.37255818  0.51275921  0.60970086  0.61977202  0.04565006  0.58751196
  0.35927495  0.464113    0.44440576  0.50543982  0.31472191  0.68189669
  0.67405671  0.36052284]
Index:  400 [6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6] [ 1.20503068  1.16656458  1.55836201  1.0900104   1.24672222  1.2210381
  1.18937564  1.27837181  1.45986676  1.41335499  0.91575372  1.45191884
  1.13654947  1.2897712   1.3521663   1.43155241  1.23932719  1.43375754
  1.41689956  1.20511508]
Index:  600 [6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6] [ 1.70809746  1.71674788  2.03707719  1.61432791  1.75942934  1.72369957
  1.69563353  1.80965018  1.98620415  1.88035393  1.47850585  1.94818258
  1.68541718  1.79512095  1.88773072  1.89554119  1.79962254  1.90777421
  1.87107754  1.67628503]
Index:  800 [6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6] [ 2.08503175  2.08603334  2.38838148  2.01483536  2.11543489  2.10132837
  2.10609913  2.1891222   2.34627223  2.23327017  1.87759566  2.29159379
  2.07884049  2.17726374  2.2580688   2.23226523  2.16381693  2.2349093
  2.20287466  2.02380538]
Running label 7
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-1.21075654 -1.48762274 -1.12853503 -1.05213654 -1.40368521 -1.24323368
 -1.0180155  -1.02431619 -1.05378771 -1.01535118 -1.41469312 -1.63676763
 -1.1898458  -1.35823131 -1.39805186 -1.15453506 -1.18232298 -1.08370876
 -1.57186866 -1.0211153 ]
Index:  200 [7 7 7 7 7 7 7 7 7 7 7 5 7 7 7 7 7 7 7 7] [ 0.99151582  0.67254531  1.06654036  1.02066326  0.7451064   0.62328857
  1.2453531   0.95868719  1.05710506  1.04411542  0.73807526  0.69569921
  0.86210525  0.93026209  0.6886003   0.84418333  0.81308627  1.07147813
  0.71568924  0.84935629]
Index:  400 [7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7] [ 1.78095746  1.41714013  1.85261869  1.71240807  1.44734097  1.3004328
  2.00850606  1.6758883   1.78445685  1.74866581  1.57230496  1.58253503
  1.5433085   1.64450216  1.54471433  1.5905714   1.53712976  1.73300767
  1.63850522  1.6821425 ]
Index:  600 [7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7] [ 2.2254436   1.85666931  2.30524921  2.14387798  1.85491061  1.83334601
  2.39174175  2.10307741  2.21482682  2.14756799  2.04999614  2.04993963
  1.97546577  2.07936645  2.05537772  2.04741859  2.00364447  2.14345312
  2.1240294   2.13893986]
Index:  800 [7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7 7] [ 2.51605392  2.18559337  2.62335849  2.46051693  2.1542604   2.23896503
  2.6708293   2.42803264  2.5360508   2.45431781  2.38556528  2.37851882
  2.29044485  2.41349626  2.39670873  2.34064245  2.34399438  2.44854999
  2.4510572   2.4328928 ]
Running label 8
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-0.40636975 -0.18853198 -0.24308588 -0.54253256 -0.53630155 -0.37292558
 -0.02009614 -0.54190826 -0.40086341 -0.09972127 -0.97968006  0.00440179
 -0.56943858 -0.57481831 -0.40909463 -0.54434001 -0.14909361 -0.53502321
 -0.19285057 -0.47139221]
Index:  200 [8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8] [ 1.76175499  1.96064603  1.82180548  1.82853782  1.9485383   1.93613291
  1.96492434  1.71381795  2.064924    2.03265452  1.47923779  2.02159023
  1.80310798  1.9828856   1.99260187  1.75221312  2.16279483  1.97468972
  2.03132915  1.7387507 ]
Index:  400 [8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8] [ 2.42288232  2.61069727  2.40623093  2.43081474  2.58413887  2.57154393
  2.54379559  2.38681507  2.67098522  2.6168797   2.19757938  2.60639834
  2.450351    2.61205316  2.57054663  2.39467859  2.6971693   2.64913964
  2.65877247  2.41836739]
Index:  600 [8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8] [ 2.81048918  2.99192095  2.75406742  2.79993916  2.95950055  2.93376327
  2.9042871   2.77601743  3.02188063  2.97691751  2.57302046  2.95786381
  2.81698847  2.9840486   2.91435814  2.76803112  3.03408241  3.05565357
  2.98532104  2.80110645]
Index:  800 [8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8] [ 3.11423492  3.26237082  3.01931787  3.08939481  3.24173403  3.19850755
  3.16344357  3.06007147  3.2803266   3.25024605  2.84692764  3.21106672
  3.0859437   3.24892569  3.17244339  3.05923986  3.28349686  3.33688951
  3.24233413  3.07453918]
Running label 9
Index:  0 [5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5] [-0.83280087 -1.18582034 -1.20556593 -1.4794023  -1.37590718 -1.94165659
 -1.33827949 -1.0062176  -1.63063323 -1.40507567 -1.78151035 -1.05858541
 -1.34911966 -1.17791867 -1.49240351 -1.95286632 -1.39261115 -1.13362432
 -1.39162302 -1.52543247]
Index:  200 [9 5 9 5 5 5 9 9 5 9 5 9 5 9 9 5 9 9 9 9] [ 1.01277554  0.66185236  0.79285944  0.62575448  0.60623646  0.23728374
  0.77612936  0.97280335  0.58462375  0.77463245  0.28015292  1.01162469
  0.52455747  0.79201937  0.72025859  0.59820259  0.76001298  0.85260665
  1.06452346  0.65558851]
Index:  400 [9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9] [ 1.61777949  1.33210552  1.4865129   1.32814097  1.30998576  1.12829244
  1.54107237  1.62902665  1.26941133  1.42697334  1.09482121  1.6639359
  1.28340816  1.40360188  1.39847457  1.36799908  1.43199444  1.54111934
  1.73782396  1.41026592]
Index:  600 [9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9] [ 2.00279856  1.81017363  1.92096329  1.73162532  1.7638905   1.61984265
  1.95866346  2.01127362  1.67318702  1.84303498  1.55100417  2.05935574
  1.74259853  1.79439962  1.81790662  1.84727502  1.95928895  1.9574393
  2.14299607  1.85575497]
Index:  800 [9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9] [ 2.28400397  2.17268109  2.21100187  2.04206371  2.08103132  1.97219419
  2.26193213  2.30917215  2.03920722  2.14898705  1.87549853  2.33918786
  2.0669663   2.09224677  2.1124742   2.19052291  2.33615661  2.27785468
  2.45641351  2.16875052]

In [68]:
from matplotlib import pyplot as plt

row = 4
col = 5
fig, axarr = plt.subplots(row, col)

imgs = np.asarray(label_to_samples[8]).squeeze()
for r in range(row):
    for c in range(col):
        img = imgs[r * col + c, :, :].squeeze()
        img = img - img.min(axis=(0, 1))
        # print(img)
        img = img / img.max(axis=(0, 1)) * 255.0
        # print(img)
        axarr[r, c].imshow(img, cmap='gray')
        # break



In [32]:
from tensorflow.examples.tutorials.mnist import input_data
mnist_data = input_data.read_data_sets('/Users/taiyuanz/tensorflow/image_net/mnist_data')


Extracting /Users/taiyuanz/tensorflow/image_net/mnist_data/train-images-idx3-ubyte.gz
Extracting /Users/taiyuanz/tensorflow/image_net/mnist_data/train-labels-idx1-ubyte.gz
Extracting /Users/taiyuanz/tensorflow/image_net/mnist_data/t10k-images-idx3-ubyte.gz
Extracting /Users/taiyuanz/tensorflow/image_net/mnist_data/t10k-labels-idx1-ubyte.gz

In [47]:
x, y = mnist_data.train.next_batch(32)

y


Out[47]:
array([6, 6, 9, 7, 3, 8, 9, 9, 3, 0, 8, 1, 3, 2, 0, 3, 2, 4, 9, 5, 9, 6, 4,
       7, 3, 8, 4, 9, 3, 6, 5, 2], dtype=uint8)

In [48]:
np.where(y == 6)


Out[48]:
(array([ 0,  1, 21, 29]),)

In [49]:
x.shape


Out[49]:
(32, 784)

In [51]:
np.random.standard_normal(100)


Out[51]:
array([ 0.26806472,  0.06319465,  0.52474696, -1.18638785,  0.59912884,
        0.69227536, -2.08798792, -0.58684299, -0.48418262, -1.07233145,
        0.71177585,  1.18479432,  0.1048661 ,  0.00417651, -1.05382187,
       -0.73798049, -1.45192626,  0.85490937,  0.66364953,  0.35091625,
       -0.24137106, -0.75876971, -1.09456811,  0.67705033, -0.47050818,
       -1.30629606, -0.9547    , -0.33354506,  1.88022896, -2.01822284,
       -0.06525345,  0.33300746,  0.52666417, -0.90607095,  0.07840228,
        0.13005563, -0.12848024, -0.54616441,  0.0826097 ,  2.87294491,
       -2.38378805, -1.27004767,  1.69055107,  0.11561125, -0.47186767,
       -0.99939038, -0.35382354, -0.16070221,  0.74281377,  0.25086723,
        1.7264666 ,  0.94420263,  0.19889193,  0.14154871, -0.65716142,
        0.28055233, -0.37547012,  0.59067526,  0.33665478,  0.42077829,
       -0.41913629, -0.4249573 , -0.20397895,  1.42890227, -1.52487517,
       -0.20301808,  0.90406554, -0.03323424, -0.31390094, -1.06626313,
       -0.26935305, -0.7790681 , -0.12644318,  0.03279707, -1.06920417,
        0.99995912, -0.61553184, -1.77352331,  1.13359646,  0.07542602,
       -0.34942593, -1.83851927, -0.1695914 , -1.15142748,  1.39710876,
       -1.66989521,  0.55804067,  0.91229549, -0.54868102, -2.82720554,
        1.29868361, -0.51334063,  1.01657033,  1.05299042,  1.32971863,
       -1.07982121,  0.36592833, -0.64593291, -0.47831428, -0.2267618 ])

In [72]:
arr = np.asarray([[[1,2],[3,4]], [[11,12],[13,14]], [[21,22],[23,24]]])
print(arr.shape)
arr.reshape((3,4)).reshape((3, 2, 2))


(3, 2, 2)
Out[72]:
array([[[ 1,  2],
        [ 3,  4]],

       [[11, 12],
        [13, 14]],

       [[21, 22],
        [23, 24]]])

In [ ]: