In [8]:
import cifar10,cifar10_input
import tensorflow as tf
import numpy as np
import time

In [9]:
max_steps = 3000
batch_size = 128
data_dir = '/tmp/cifar10_data/cifar-10-batches-bin'

In [10]:
def variable_with_weight_loss(shape,stddev,wl):
    var = tf.Variable(tf.truncated_normal(shape,stddev = stddev))
    if wl is not None:
        weight_loss = tf.multiply(tf.nn.l2_loss(var),wl,name='weight_loss')
        tf.add_to_collection('losses',weight_loss)
    return var

In [11]:
cifar10.maybe_download_and_extract()


---------------------------------------------------------------------------
PermissionError                           Traceback (most recent call last)
<ipython-input-11-ddaee26bfd14> in <module>()
----> 1 cifar10.maybe_download_and_extract()

/home/wjj/TFbook/chapter5/models-master/tutorials/image/cifar10/cifar10.py in maybe_download_and_extract()
    397   extracted_dir_path = os.path.join(dest_directory, 'cifar-10-batches-bin')
    398   if not os.path.exists(extracted_dir_path):
--> 399     tarfile.open(filepath, 'r:gz').extractall(dest_directory)

/home/wjj/anaconda3/lib/python3.6/tarfile.py in extractall(self, path, members, numeric_owner)
   2001             # Do not set_attrs directories, as we will do that further down
   2002             self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(),
-> 2003                          numeric_owner=numeric_owner)
   2004 
   2005         # Reverse sort directories.

/home/wjj/anaconda3/lib/python3.6/tarfile.py in extract(self, member, path, set_attrs, numeric_owner)
   2043             self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
   2044                                  set_attrs=set_attrs,
-> 2045                                  numeric_owner=numeric_owner)
   2046         except OSError as e:
   2047             if self.errorlevel > 0:

/home/wjj/anaconda3/lib/python3.6/tarfile.py in _extract_member(self, tarinfo, targetpath, set_attrs, numeric_owner)
   2115             self.makefile(tarinfo, targetpath)
   2116         elif tarinfo.isdir():
-> 2117             self.makedir(tarinfo, targetpath)
   2118         elif tarinfo.isfifo():
   2119             self.makefifo(tarinfo, targetpath)

/home/wjj/anaconda3/lib/python3.6/tarfile.py in makedir(self, tarinfo, targetpath)
   2144             # Use a safe mode for the directory, the real mode is set
   2145             # later in _extract_member().
-> 2146             os.mkdir(targetpath, 0o700)
   2147         except FileExistsError:
   2148             pass

PermissionError: [Errno 13] Permission denied: '/tmp/cifar10_data/cifar-10-batches-bin'

In [6]:
images_train,labels_train = cifar10_input.distorted_inputs(data_dir=data_dir,batch_size=batch_size)


Filling queue with 20000 CIFAR images before starting to train. This will take a few minutes.

In [7]:
images_test,labels_test = cifar10_input.inputs(eval_data=True, data_dir=data_dir,batch_size=batch_size)

In [8]:
image_holder = tf.placeholder(tf.float32,[batch_size,24,24,3])

label_holder = tf.placeholder(tf.int32, [batch_size])

In [9]:
weight1 = variable_with_weight_loss(shape=[5,5,3,64],stddev=53-2,wl=0.0)
kernel1 = tf.nn.conv2d(image_holder,weight1,[1,1,1,1],padding='SAME')
bias1 = tf.Variable(tf.constant(0.0,shape=[64]))
conv1 = tf.nn.relu(tf.nn.bias_add(kernel1,bias1))
pool1 = tf.nn.max_pool(conv1,ksize=[1,3,3,1],strides=[1,2,2,1],padding='SAME')
norm1 = tf.nn.lrn(pool1,4,bias=1.0,alpha=0.001/9.0,beta=0.75)

In [10]:
weight2 = variable_with_weight_loss(shape=[5,5,64,64],stddev=5e-2,wl=0.0)
kernel2 = tf.nn.conv2d(norm1,weight2,[1,1,1,1],padding='SAME')
bias2 = tf.Variable(tf.constant(0.1,shape=[64]))
conv2 = tf.nn.relu(tf.nn.bias_add(kernel2,bias2))
norm2 = tf.nn.lrn(conv2,4,bias=1.0,alpha=0.001/9.0,beta=0.75)
pool2 = tf.nn.max_pool(norm2,ksize=[1,3,3,1],strides=[1,2,2,1],padding='SAME')

In [11]:
reshape = tf.reshape(pool2,[batch_size,-1])
dim = reshape.get_shape()[1].value
weight3 = variable_with_weight_loss(shape=[dim,384],stddev=0.04,wl=0.004)
bias3 = tf.Variable(tf.constant(0.1,shape=[384]))
local3 = tf.nn.relu(tf.matmul(reshape,weight3)+bias3)

In [12]:
weight4 = variable_with_weight_loss(shape=[384,192],stddev=0.04,wl=0.04)
bias4 = tf.Variable(tf.constant(0.1,shape=[192]))
local4 = tf.nn.relu(tf.matmul(local3,weight4)+bias4)

In [13]:
weight5 = variable_with_weight_loss(shape=[192,10],stddev=0.04,wl=0.04)
bias5 = tf.Variable(tf.constant(0.0,shape=[10]))
logits = tf.add(tf.matmul(local4,weight5),bias5)

In [14]:
def loss(logits,labels):
    labels = tf.cast(labels,tf.int64)
    cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits,labels=labels,name='cross_entropy_per_example')
    cross_entropy_mean = tf.reduce_mean(cross_entropy,name='cross_entropy')
    tf.add_to_collection('losses',cross_entropy_mean)
    return tf.add_n(tf.get_collection('losses'),name='total_loss')

In [15]:
loss = loss(logits,label_holder)
train_op = tf.train.AdamOptimizer(1e-3).minimize(loss)

In [16]:
top_k_op = tf.nn.in_top_k(logits,label_holder,1)

In [17]:
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()

In [18]:
tf.train.start_queue_runners()


Out[18]:
[<Thread(Thread-4, started daemon 140681100121856)>,
 <Thread(Thread-5, started daemon 140681091729152)>,
 <Thread(Thread-6, started daemon 140681083336448)>,
 <Thread(Thread-7, started daemon 140681074943744)>,
 <Thread(Thread-8, started daemon 140681066551040)>,
 <Thread(Thread-9, started daemon 140680110274304)>,
 <Thread(Thread-10, started daemon 140680101881600)>,
 <Thread(Thread-11, started daemon 140680093488896)>,
 <Thread(Thread-12, started daemon 140680085096192)>,
 <Thread(Thread-13, started daemon 140680076703488)>,
 <Thread(Thread-14, started daemon 140680068310784)>,
 <Thread(Thread-15, started daemon 140680059918080)>,
 <Thread(Thread-16, started daemon 140679573403392)>,
 <Thread(Thread-17, started daemon 140679565010688)>,
 <Thread(Thread-18, started daemon 140679556617984)>,
 <Thread(Thread-19, started daemon 140679548225280)>,
 <Thread(Thread-20, started daemon 140679539832576)>,
 <Thread(Thread-21, started daemon 140679531439872)>,
 <Thread(Thread-22, started daemon 140679523047168)>,
 <Thread(Thread-23, started daemon 140679036532480)>,
 <Thread(Thread-24, started daemon 140679028139776)>,
 <Thread(Thread-25, started daemon 140679019747072)>,
 <Thread(Thread-26, started daemon 140679011354368)>,
 <Thread(Thread-27, started daemon 140679002961664)>,
 <Thread(Thread-28, started daemon 140678994568960)>,
 <Thread(Thread-29, started daemon 140678986176256)>,
 <Thread(Thread-30, started daemon 140678499661568)>,
 <Thread(Thread-31, started daemon 140678491268864)>,
 <Thread(Thread-32, started daemon 140678482876160)>,
 <Thread(Thread-33, started daemon 140678474483456)>,
 <Thread(Thread-34, started daemon 140678466090752)>,
 <Thread(Thread-35, started daemon 140678457698048)>,
 <Thread(Thread-36, started daemon 140678449305344)>,
 <Thread(Thread-37, started daemon 140677962790656)>]

In [19]:
for step in range(max_steps):
    start_time = time.time()
    image_batch,label_batch = sess.run([images_train,labels_train])
    _,loss_value = sess.run([train_op,loss],feed_dict={image_holder:image_batch,label_holder:label_batch})
    duration = time.time() - start_time
    if step%10 == 0:
        examples_per_sec = batch_size / duration
        sec_per_batch = float(duration)
        format_str=('step %d,loss%.2f(%.1f examples/sec; %.3f sec/batch)')
        print(format_str % (step,loss_value,examples_per_sec,sec_per_batch))


step 0,loss9.63(15.5 examples/sec; 8.277 sec/batch)
step 10,loss5.99(2200.9 examples/sec; 0.058 sec/batch)
step 20,loss5.62(2489.7 examples/sec; 0.051 sec/batch)
step 30,loss5.18(2489.3 examples/sec; 0.051 sec/batch)
step 40,loss4.89(2564.3 examples/sec; 0.050 sec/batch)
step 50,loss4.84(2284.5 examples/sec; 0.056 sec/batch)
step 60,loss4.64(1757.0 examples/sec; 0.073 sec/batch)
step 70,loss4.50(2351.0 examples/sec; 0.054 sec/batch)
step 80,loss4.34(2431.9 examples/sec; 0.053 sec/batch)
step 90,loss4.16(2482.4 examples/sec; 0.052 sec/batch)
step 100,loss4.11(2583.6 examples/sec; 0.050 sec/batch)
step 110,loss4.02(1316.3 examples/sec; 0.097 sec/batch)
step 120,loss3.88(2493.5 examples/sec; 0.051 sec/batch)
step 130,loss3.79(1689.0 examples/sec; 0.076 sec/batch)
step 140,loss3.61(2557.8 examples/sec; 0.050 sec/batch)
step 150,loss3.70(2344.3 examples/sec; 0.055 sec/batch)
step 160,loss3.69(847.0 examples/sec; 0.151 sec/batch)
step 170,loss3.67(2499.3 examples/sec; 0.051 sec/batch)
step 180,loss3.51(2553.5 examples/sec; 0.050 sec/batch)
step 190,loss3.45(2570.4 examples/sec; 0.050 sec/batch)
step 200,loss3.61(2453.1 examples/sec; 0.052 sec/batch)
step 210,loss3.53(2394.6 examples/sec; 0.053 sec/batch)
step 220,loss3.39(2397.5 examples/sec; 0.053 sec/batch)
step 230,loss3.33(1415.3 examples/sec; 0.090 sec/batch)
step 240,loss3.16(2342.8 examples/sec; 0.055 sec/batch)
step 250,loss3.23(2414.4 examples/sec; 0.053 sec/batch)
step 260,loss3.17(2413.0 examples/sec; 0.053 sec/batch)
step 270,loss3.14(2390.1 examples/sec; 0.054 sec/batch)
step 280,loss2.97(1523.8 examples/sec; 0.084 sec/batch)
step 290,loss3.03(2469.1 examples/sec; 0.052 sec/batch)
step 300,loss2.90(2549.5 examples/sec; 0.050 sec/batch)
step 310,loss2.90(2468.6 examples/sec; 0.052 sec/batch)
step 320,loss2.85(2456.9 examples/sec; 0.052 sec/batch)
step 330,loss2.92(1568.8 examples/sec; 0.082 sec/batch)
step 340,loss2.89(2356.7 examples/sec; 0.054 sec/batch)
step 350,loss2.75(2338.1 examples/sec; 0.055 sec/batch)
step 360,loss2.73(2339.6 examples/sec; 0.055 sec/batch)
step 370,loss2.81(2507.8 examples/sec; 0.051 sec/batch)
step 380,loss2.78(2373.4 examples/sec; 0.054 sec/batch)
step 390,loss2.83(2473.1 examples/sec; 0.052 sec/batch)
step 400,loss2.72(1271.4 examples/sec; 0.101 sec/batch)
step 410,loss2.79(2414.0 examples/sec; 0.053 sec/batch)
step 420,loss2.60(2463.6 examples/sec; 0.052 sec/batch)
step 430,loss2.74(2474.5 examples/sec; 0.052 sec/batch)
step 440,loss2.61(2485.2 examples/sec; 0.052 sec/batch)
step 450,loss2.58(1108.3 examples/sec; 0.115 sec/batch)
step 460,loss2.60(2540.7 examples/sec; 0.050 sec/batch)
step 470,loss2.47(2428.5 examples/sec; 0.053 sec/batch)
step 480,loss2.57(2530.6 examples/sec; 0.051 sec/batch)
step 490,loss2.50(2399.2 examples/sec; 0.053 sec/batch)
step 500,loss2.52(2345.5 examples/sec; 0.055 sec/batch)
step 510,loss2.55(2425.2 examples/sec; 0.053 sec/batch)
step 520,loss2.49(2010.2 examples/sec; 0.064 sec/batch)
step 530,loss2.35(2432.9 examples/sec; 0.053 sec/batch)
step 540,loss2.42(2360.1 examples/sec; 0.054 sec/batch)
step 550,loss2.32(2489.1 examples/sec; 0.051 sec/batch)
step 560,loss2.48(2486.0 examples/sec; 0.051 sec/batch)
step 570,loss2.43(2518.5 examples/sec; 0.051 sec/batch)
step 580,loss2.40(2540.6 examples/sec; 0.050 sec/batch)
step 590,loss2.31(2505.1 examples/sec; 0.051 sec/batch)
step 600,loss2.36(2415.5 examples/sec; 0.053 sec/batch)
step 610,loss2.26(2372.4 examples/sec; 0.054 sec/batch)
step 620,loss2.42(1814.4 examples/sec; 0.071 sec/batch)
step 630,loss2.22(2505.0 examples/sec; 0.051 sec/batch)
step 640,loss2.39(2434.8 examples/sec; 0.053 sec/batch)
step 650,loss2.31(2662.2 examples/sec; 0.048 sec/batch)
step 660,loss2.34(2480.8 examples/sec; 0.052 sec/batch)
step 670,loss2.26(1975.7 examples/sec; 0.065 sec/batch)
step 680,loss2.16(2468.1 examples/sec; 0.052 sec/batch)
step 690,loss2.19(2438.6 examples/sec; 0.052 sec/batch)
step 700,loss2.30(2581.7 examples/sec; 0.050 sec/batch)
step 710,loss2.29(2520.8 examples/sec; 0.051 sec/batch)
step 720,loss2.17(727.4 examples/sec; 0.176 sec/batch)
step 730,loss2.20(2435.5 examples/sec; 0.053 sec/batch)
step 740,loss2.10(2431.2 examples/sec; 0.053 sec/batch)
step 750,loss2.01(2528.3 examples/sec; 0.051 sec/batch)
step 760,loss2.07(2063.0 examples/sec; 0.062 sec/batch)
step 770,loss2.04(1243.8 examples/sec; 0.103 sec/batch)
step 780,loss2.08(2482.6 examples/sec; 0.052 sec/batch)
step 790,loss2.10(2444.2 examples/sec; 0.052 sec/batch)
step 800,loss2.09(2509.3 examples/sec; 0.051 sec/batch)
step 810,loss2.02(2413.9 examples/sec; 0.053 sec/batch)
step 820,loss2.00(2538.0 examples/sec; 0.050 sec/batch)
step 830,loss2.01(2404.9 examples/sec; 0.053 sec/batch)
step 840,loss1.98(2111.9 examples/sec; 0.061 sec/batch)
step 850,loss2.16(2456.5 examples/sec; 0.052 sec/batch)
step 860,loss1.95(2583.3 examples/sec; 0.050 sec/batch)
step 870,loss1.96(2479.7 examples/sec; 0.052 sec/batch)
step 880,loss2.14(2481.0 examples/sec; 0.052 sec/batch)
step 890,loss2.00(2073.6 examples/sec; 0.062 sec/batch)
step 900,loss1.88(2567.1 examples/sec; 0.050 sec/batch)
step 910,loss1.92(2457.0 examples/sec; 0.052 sec/batch)
step 920,loss2.05(2333.3 examples/sec; 0.055 sec/batch)
step 930,loss1.85(2498.5 examples/sec; 0.051 sec/batch)
step 940,loss2.06(2107.1 examples/sec; 0.061 sec/batch)
step 950,loss1.86(2534.9 examples/sec; 0.050 sec/batch)
step 960,loss1.90(2423.3 examples/sec; 0.053 sec/batch)
step 970,loss1.98(2485.9 examples/sec; 0.051 sec/batch)
step 980,loss1.94(2490.4 examples/sec; 0.051 sec/batch)
step 990,loss1.92(1861.2 examples/sec; 0.069 sec/batch)
step 1000,loss2.07(2606.7 examples/sec; 0.049 sec/batch)
step 1010,loss1.97(2346.1 examples/sec; 0.055 sec/batch)
step 1020,loss2.01(2474.3 examples/sec; 0.052 sec/batch)
step 1030,loss1.80(2453.3 examples/sec; 0.052 sec/batch)
step 1040,loss1.92(2071.8 examples/sec; 0.062 sec/batch)
step 1050,loss1.96(2473.6 examples/sec; 0.052 sec/batch)
step 1060,loss2.07(2481.8 examples/sec; 0.052 sec/batch)
step 1070,loss2.01(2412.1 examples/sec; 0.053 sec/batch)
step 1080,loss1.83(2482.2 examples/sec; 0.052 sec/batch)
step 1090,loss2.04(1981.4 examples/sec; 0.065 sec/batch)
step 1100,loss1.74(2386.2 examples/sec; 0.054 sec/batch)
step 1110,loss1.96(2484.0 examples/sec; 0.052 sec/batch)
step 1120,loss1.83(2416.2 examples/sec; 0.053 sec/batch)
step 1130,loss1.83(2305.8 examples/sec; 0.056 sec/batch)
step 1140,loss1.83(797.5 examples/sec; 0.161 sec/batch)
step 1150,loss1.88(2410.4 examples/sec; 0.053 sec/batch)
step 1160,loss1.71(2459.8 examples/sec; 0.052 sec/batch)
step 1170,loss1.77(2296.9 examples/sec; 0.056 sec/batch)
step 1180,loss1.90(2098.3 examples/sec; 0.061 sec/batch)
step 1190,loss1.92(1433.6 examples/sec; 0.089 sec/batch)
step 1200,loss1.83(2647.5 examples/sec; 0.048 sec/batch)
step 1210,loss1.82(2333.8 examples/sec; 0.055 sec/batch)
step 1220,loss1.83(2552.6 examples/sec; 0.050 sec/batch)
step 1230,loss1.75(2522.4 examples/sec; 0.051 sec/batch)
step 1240,loss1.83(1185.3 examples/sec; 0.108 sec/batch)
step 1250,loss1.71(2553.8 examples/sec; 0.050 sec/batch)
step 1260,loss1.79(2420.9 examples/sec; 0.053 sec/batch)
step 1270,loss1.82(2441.6 examples/sec; 0.052 sec/batch)
step 1280,loss1.82(2589.8 examples/sec; 0.049 sec/batch)
step 1290,loss1.55(2422.9 examples/sec; 0.053 sec/batch)
step 1300,loss1.63(2394.5 examples/sec; 0.053 sec/batch)
step 1310,loss1.80(2178.2 examples/sec; 0.059 sec/batch)
step 1320,loss1.71(2425.9 examples/sec; 0.053 sec/batch)
step 1330,loss1.81(2516.1 examples/sec; 0.051 sec/batch)
step 1340,loss1.69(2533.4 examples/sec; 0.051 sec/batch)
step 1350,loss1.66(2388.6 examples/sec; 0.054 sec/batch)
step 1360,loss1.72(1629.2 examples/sec; 0.079 sec/batch)
step 1370,loss1.78(2516.5 examples/sec; 0.051 sec/batch)
step 1380,loss1.70(2378.4 examples/sec; 0.054 sec/batch)
step 1390,loss1.73(2341.3 examples/sec; 0.055 sec/batch)
step 1400,loss1.69(2467.4 examples/sec; 0.052 sec/batch)
step 1410,loss1.75(1758.3 examples/sec; 0.073 sec/batch)
step 1420,loss1.73(2458.7 examples/sec; 0.052 sec/batch)
step 1430,loss1.57(2048.2 examples/sec; 0.062 sec/batch)
step 1440,loss1.74(2462.0 examples/sec; 0.052 sec/batch)
step 1450,loss1.74(2554.5 examples/sec; 0.050 sec/batch)
step 1460,loss1.67(2550.2 examples/sec; 0.050 sec/batch)
step 1470,loss1.65(2461.4 examples/sec; 0.052 sec/batch)
step 1480,loss1.85(1839.0 examples/sec; 0.070 sec/batch)
step 1490,loss1.64(2447.0 examples/sec; 0.052 sec/batch)
step 1500,loss1.69(2417.1 examples/sec; 0.053 sec/batch)
step 1510,loss1.62(2520.2 examples/sec; 0.051 sec/batch)
step 1520,loss1.82(2234.9 examples/sec; 0.057 sec/batch)
step 1530,loss1.77(931.3 examples/sec; 0.137 sec/batch)
step 1540,loss1.64(2463.1 examples/sec; 0.052 sec/batch)
step 1550,loss1.68(2630.3 examples/sec; 0.049 sec/batch)
step 1560,loss1.64(2588.3 examples/sec; 0.049 sec/batch)
step 1570,loss1.70(2611.5 examples/sec; 0.049 sec/batch)
step 1580,loss1.80(1240.2 examples/sec; 0.103 sec/batch)
step 1590,loss1.68(2461.2 examples/sec; 0.052 sec/batch)
step 1600,loss1.70(2470.7 examples/sec; 0.052 sec/batch)
step 1610,loss1.67(2415.5 examples/sec; 0.053 sec/batch)
step 1620,loss1.77(2409.6 examples/sec; 0.053 sec/batch)
step 1630,loss1.62(2362.3 examples/sec; 0.054 sec/batch)
step 1640,loss1.47(2477.3 examples/sec; 0.052 sec/batch)
step 1650,loss1.57(1827.6 examples/sec; 0.070 sec/batch)
step 1660,loss1.78(2401.3 examples/sec; 0.053 sec/batch)
step 1670,loss1.56(2409.0 examples/sec; 0.053 sec/batch)
step 1680,loss1.72(2403.2 examples/sec; 0.053 sec/batch)
step 1690,loss1.68(2242.0 examples/sec; 0.057 sec/batch)
step 1700,loss1.64(1716.2 examples/sec; 0.075 sec/batch)
step 1710,loss1.74(2402.1 examples/sec; 0.053 sec/batch)
step 1720,loss1.72(2407.6 examples/sec; 0.053 sec/batch)
step 1730,loss1.72(2427.6 examples/sec; 0.053 sec/batch)
step 1740,loss1.73(2443.3 examples/sec; 0.052 sec/batch)
step 1750,loss1.61(1010.6 examples/sec; 0.127 sec/batch)
step 1760,loss1.59(2433.5 examples/sec; 0.053 sec/batch)
step 1770,loss1.48(2449.1 examples/sec; 0.052 sec/batch)
step 1780,loss1.51(2642.2 examples/sec; 0.048 sec/batch)
step 1790,loss1.50(2516.3 examples/sec; 0.051 sec/batch)
step 1800,loss1.58(2156.6 examples/sec; 0.059 sec/batch)
step 1810,loss1.66(2486.3 examples/sec; 0.051 sec/batch)
step 1820,loss1.54(2435.0 examples/sec; 0.053 sec/batch)
step 1830,loss1.48(2497.4 examples/sec; 0.051 sec/batch)
step 1840,loss1.65(2528.4 examples/sec; 0.051 sec/batch)
step 1850,loss1.61(2557.4 examples/sec; 0.050 sec/batch)
step 1860,loss1.56(2461.9 examples/sec; 0.052 sec/batch)
step 1870,loss1.52(2146.1 examples/sec; 0.060 sec/batch)
step 1880,loss1.52(2384.9 examples/sec; 0.054 sec/batch)
step 1890,loss1.54(2491.4 examples/sec; 0.051 sec/batch)
step 1900,loss1.68(2451.0 examples/sec; 0.052 sec/batch)
step 1910,loss1.53(2436.8 examples/sec; 0.053 sec/batch)
step 1920,loss1.67(1819.5 examples/sec; 0.070 sec/batch)
step 1930,loss1.75(2369.3 examples/sec; 0.054 sec/batch)
step 1940,loss1.64(2560.9 examples/sec; 0.050 sec/batch)
step 1950,loss1.59(2447.0 examples/sec; 0.052 sec/batch)
step 1960,loss1.60(2457.6 examples/sec; 0.052 sec/batch)
step 1970,loss1.56(1152.5 examples/sec; 0.111 sec/batch)
step 1980,loss1.63(2479.6 examples/sec; 0.052 sec/batch)
step 1990,loss1.56(2367.6 examples/sec; 0.054 sec/batch)
step 2000,loss1.64(2594.2 examples/sec; 0.049 sec/batch)
step 2010,loss1.56(2411.6 examples/sec; 0.053 sec/batch)
step 2020,loss1.56(2383.7 examples/sec; 0.054 sec/batch)
step 2030,loss1.60(2421.3 examples/sec; 0.053 sec/batch)
step 2040,loss1.51(2406.0 examples/sec; 0.053 sec/batch)
step 2050,loss1.83(2370.3 examples/sec; 0.054 sec/batch)
step 2060,loss1.54(2589.9 examples/sec; 0.049 sec/batch)
step 2070,loss1.70(2382.2 examples/sec; 0.054 sec/batch)
step 2080,loss1.56(2467.3 examples/sec; 0.052 sec/batch)
step 2090,loss1.78(1740.5 examples/sec; 0.074 sec/batch)
step 2100,loss1.63(2543.4 examples/sec; 0.050 sec/batch)
step 2110,loss1.52(2535.1 examples/sec; 0.050 sec/batch)
step 2120,loss1.64(2369.1 examples/sec; 0.054 sec/batch)
step 2130,loss1.58(2520.5 examples/sec; 0.051 sec/batch)
step 2140,loss1.68(1989.7 examples/sec; 0.064 sec/batch)
step 2150,loss1.65(2404.0 examples/sec; 0.053 sec/batch)
step 2160,loss1.65(1984.3 examples/sec; 0.065 sec/batch)
step 2170,loss1.63(2441.0 examples/sec; 0.052 sec/batch)
step 2180,loss1.63(2545.6 examples/sec; 0.050 sec/batch)
step 2190,loss1.50(2292.8 examples/sec; 0.056 sec/batch)
step 2200,loss1.52(2732.0 examples/sec; 0.047 sec/batch)
step 2210,loss1.60(2404.4 examples/sec; 0.053 sec/batch)
step 2220,loss1.57(2203.4 examples/sec; 0.058 sec/batch)
step 2230,loss1.73(2573.6 examples/sec; 0.050 sec/batch)
step 2240,loss1.59(2450.1 examples/sec; 0.052 sec/batch)
step 2250,loss1.59(2565.3 examples/sec; 0.050 sec/batch)
step 2260,loss1.63(2512.5 examples/sec; 0.051 sec/batch)
step 2270,loss1.80(2447.6 examples/sec; 0.052 sec/batch)
step 2280,loss1.37(2497.6 examples/sec; 0.051 sec/batch)
step 2290,loss1.44(1956.5 examples/sec; 0.065 sec/batch)
step 2300,loss1.53(2359.4 examples/sec; 0.054 sec/batch)
step 2310,loss1.45(2437.5 examples/sec; 0.053 sec/batch)
step 2320,loss1.59(2443.9 examples/sec; 0.052 sec/batch)
step 2330,loss1.64(2473.8 examples/sec; 0.052 sec/batch)
step 2340,loss1.82(1809.6 examples/sec; 0.071 sec/batch)
step 2350,loss1.57(2517.8 examples/sec; 0.051 sec/batch)
step 2360,loss1.59(2536.2 examples/sec; 0.050 sec/batch)
step 2370,loss1.46(2529.4 examples/sec; 0.051 sec/batch)
step 2380,loss1.60(2308.0 examples/sec; 0.055 sec/batch)
step 2390,loss1.63(1630.2 examples/sec; 0.079 sec/batch)
step 2400,loss1.56(2457.1 examples/sec; 0.052 sec/batch)
step 2410,loss1.53(2488.0 examples/sec; 0.051 sec/batch)
step 2420,loss1.47(2503.9 examples/sec; 0.051 sec/batch)
step 2430,loss1.65(2565.7 examples/sec; 0.050 sec/batch)
step 2440,loss1.58(1121.7 examples/sec; 0.114 sec/batch)
step 2450,loss1.47(2429.9 examples/sec; 0.053 sec/batch)
step 2460,loss1.53(2558.5 examples/sec; 0.050 sec/batch)
step 2470,loss1.40(2516.8 examples/sec; 0.051 sec/batch)
step 2480,loss1.55(2421.8 examples/sec; 0.053 sec/batch)
step 2490,loss1.51(2546.1 examples/sec; 0.050 sec/batch)
step 2500,loss1.62(2426.9 examples/sec; 0.053 sec/batch)
step 2510,loss1.53(2362.1 examples/sec; 0.054 sec/batch)
step 2520,loss1.64(2498.0 examples/sec; 0.051 sec/batch)
step 2530,loss1.48(2536.4 examples/sec; 0.050 sec/batch)
step 2540,loss1.55(983.4 examples/sec; 0.130 sec/batch)
step 2550,loss1.59(2459.6 examples/sec; 0.052 sec/batch)
step 2560,loss1.58(2614.9 examples/sec; 0.049 sec/batch)
step 2570,loss1.40(2429.8 examples/sec; 0.053 sec/batch)
step 2580,loss1.53(2543.6 examples/sec; 0.050 sec/batch)
step 2590,loss1.58(1361.0 examples/sec; 0.094 sec/batch)
step 2600,loss1.66(2394.3 examples/sec; 0.053 sec/batch)
step 2610,loss1.50(2147.7 examples/sec; 0.060 sec/batch)
step 2620,loss1.57(2526.4 examples/sec; 0.051 sec/batch)
step 2630,loss1.53(2399.4 examples/sec; 0.053 sec/batch)
step 2640,loss1.51(2467.2 examples/sec; 0.052 sec/batch)
step 2650,loss1.60(2373.9 examples/sec; 0.054 sec/batch)
step 2660,loss1.49(1424.0 examples/sec; 0.090 sec/batch)
step 2670,loss1.59(2408.7 examples/sec; 0.053 sec/batch)
step 2680,loss1.52(2594.0 examples/sec; 0.049 sec/batch)
step 2690,loss1.50(2394.8 examples/sec; 0.053 sec/batch)
step 2700,loss1.61(2207.8 examples/sec; 0.058 sec/batch)
step 2710,loss1.52(1492.9 examples/sec; 0.086 sec/batch)
step 2720,loss1.59(2412.9 examples/sec; 0.053 sec/batch)
step 2730,loss1.54(2452.3 examples/sec; 0.052 sec/batch)
step 2740,loss1.60(2518.7 examples/sec; 0.051 sec/batch)
step 2750,loss1.63(2284.8 examples/sec; 0.056 sec/batch)
step 2760,loss1.44(1665.9 examples/sec; 0.077 sec/batch)
step 2770,loss1.57(2506.1 examples/sec; 0.051 sec/batch)
step 2780,loss1.44(2464.1 examples/sec; 0.052 sec/batch)
step 2790,loss1.55(2580.4 examples/sec; 0.050 sec/batch)
step 2800,loss1.39(2498.5 examples/sec; 0.051 sec/batch)
step 2810,loss1.52(2521.3 examples/sec; 0.051 sec/batch)
step 2820,loss1.69(2450.6 examples/sec; 0.052 sec/batch)
step 2830,loss1.48(2411.7 examples/sec; 0.053 sec/batch)
step 2840,loss1.29(2493.8 examples/sec; 0.051 sec/batch)
step 2850,loss1.64(2442.8 examples/sec; 0.052 sec/batch)
step 2860,loss1.64(1503.7 examples/sec; 0.085 sec/batch)
step 2870,loss1.56(2554.2 examples/sec; 0.050 sec/batch)
step 2880,loss1.57(2526.0 examples/sec; 0.051 sec/batch)
step 2890,loss1.38(2378.1 examples/sec; 0.054 sec/batch)
step 2900,loss1.45(2538.9 examples/sec; 0.050 sec/batch)
step 2910,loss1.51(2400.8 examples/sec; 0.053 sec/batch)
step 2920,loss1.48(2351.3 examples/sec; 0.054 sec/batch)
step 2930,loss1.40(2010.4 examples/sec; 0.064 sec/batch)
step 2940,loss1.43(2432.9 examples/sec; 0.053 sec/batch)
step 2950,loss1.52(2453.2 examples/sec; 0.052 sec/batch)
step 2960,loss1.36(2467.6 examples/sec; 0.052 sec/batch)
step 2970,loss1.67(2510.7 examples/sec; 0.051 sec/batch)
step 2980,loss1.39(1155.1 examples/sec; 0.111 sec/batch)
step 2990,loss1.52(2515.8 examples/sec; 0.051 sec/batch)

In [20]:
num_examples = 10000
import math
num_iter = int(math.ceil(num_examples / batch_size))
true_count = 0
total_sample_out = num_iter * batch_size
step = 0
while step < num_iter:
    image_batch,label_batch = sess.run([images_test,labels_test])
    predictions = sess.run([top_k_op],feed_dict={image_holder:image_batch,label_holder:label_batch})
    true_count += np.sum(predictions)
    step += 1

In [21]:
precision = true_count / total_sample_out
print('precision @ 1 = %.3f'%precision)


precision @ 1 = 0.531

In [22]:
1018*0.002


Out[22]:
2.036

In [ ]: