In [ ]:
import tensorflow as tf

In [ ]:
from tensorflow.contrib.layers import batch_norm

In [ ]:
n_inputs = 28 * 28
n_hidden1 = 300
n_hidden2 = 100
n_outputs = 10

In [ ]:
X = tf.placeholder(tf.float32, shape=(None, n_inputs), name='X')

In [ ]:
y = tf.placeholder(dtype=tf.int64, shape=(None), name='y')

In [ ]:
is_training = tf.placeholder(tf.bool, shape=(), name='is_training')

In [ ]:
bn_params = {
    'is_training': is_training,
    'decay': 0.99,
    'updates_collections': None,
}

In [ ]:
from tensorflow.contrib.layers import fully_connected

In [ ]:
# hidden1 = fully_connected(X, n_hidden1, scope='hidden1', normalizer_fn=batch_norm, normalizer_params=bn_params)
# hidden2 = fully_connected(hidden1, n_hidden2, scope='hidden2', normalizer_fn=batch_norm, normalizer_params=bn_params)
# logits = fully_connected(hidden2, n_outputs, scope='outputs', activation_fn=None, normalizer_fn=batch_norm, normalizer_params=bn_params)

In [ ]:
with tf.contrib.framework.arg_scope(
    [fully_connected],
    normalizer_fn=batch_norm,
    normalizer_params=bn_params):
    hidden1 = fully_connected(X, n_hidden1, scope='hidden1')
    hidden2 = fully_connected(hidden1, n_hidden2, scope='hidden2')
    logits = fully_connected(hidden2, n_outputs, scope='outputs', activation_fn=None)

In [ ]:
with tf.name_scope('loss'):
    xentropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=logits)
    loss = tf.reduce_mean(xentropy, name="loss")

In [ ]:
learning_rate = 0.01
with tf.name_scope('train'):
    optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
    training_op = optimizer.minimize(loss)

In [ ]:
with tf.name_scope('eval'):
    correct = tf.nn.in_top_k(logits, y, 1)
    accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))

In [ ]:
init = tf.global_variables_initializer()
saver = tf.train.Saver()

In [ ]:
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('/tmp/data/')

In [ ]:
n_epochs = 10
batch_size = 50

In [ ]:
with tf.Session() as sess:
    init.run()
    for epoch in range(n_epochs):
        for iteration in range(mnist.train.num_examples // batch_size):
            X_batch, y_batch = mnist.train.next_batch(batch_size=batch_size)
            sess.run(training_op, feed_dict={is_training: True, X: X_batch, y: y_batch})
        acc_train = accuracy.eval(feed_dict={is_training: False, X: X_batch, y: y_batch})
        acc_test  = accuracy.eval(feed_dict={is_training: False, X: mnist.test.images, y: mnist.test.labels})
        print(epoch, "Train accuracy:", acc_train, "Test accuracy:", acc_test)
        
    save_path = saver.save(sess, './my_model_final.ckpt')

In [ ]: