In [66]:
from antk.core import config, node_ops
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import time, sys

In [67]:
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)


Extracting MNIST_data/train-images-idx3-ubyte.gz
Extracting MNIST_data/train-labels-idx1-ubyte.gz
Extracting MNIST_data/t10k-images-idx3-ubyte.gz
Extracting MNIST_data/t10k-labels-idx1-ubyte.gz

In [68]:
x = tf.placeholder(tf.float32, [None, 784])

In [ ]:
y = node_ops.mult_log_reg(node_ops.dnn(x, [100,100,100], activation='tanhlecun'), numclasses=10)
y_ = tf.placeholder(tf.float32, [None, 10])

cross_entropy = -tf.reduce_sum(y_*tf.log(y))

train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)

correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))

accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

# tensorboard stuff
accuracy_summary = tf.scalar_summary('Accuracy', accuracy)
session = tf.Session()
summary_writer = tf.train.SummaryWriter('log/logistic_regression', session.graph)
session.run(tf.global_variables_initializer())

for i in range(1000):
    batch_xs, batch_ys = mnist.train.next_batch(100)
    session.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})

    acc, summary_str = session.run([accuracy, accuracy_summary], feed_dict={x: mnist.test.images,
                                           y_: mnist.test.labels})
    summary_writer.add_summary(summary_str, i)
    sys.stdout.write('\r')
    sys.stdout.write('\repoch: %f acc: %f' % (float(i*100.0)/float(mnist.train.images.shape[0]), acc))
    sys.stdout.flush()
    time.sleep(0.5)


epoch: 0.887273 acc: 0.739300

In [ ]: