In [1]:
import os
import time
import numpy as np
import tensorflow as tf
from tensorflow.contrib import learn
tf.logging.set_verbosity(tf.logging.INFO)
In [2]:
In [3]:
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data', one_hot=True)
In [9]:
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, shape=[None, 784])
y_ = tf.placeholder(tf.float32, shape=[None, 10])
W = tf.Variable(tf.zeros([784,10]), name="W")
b = tf.Variable(tf.zeros([10]), name="b")
y = tf.matmul(x,W) + b
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
saver = tf.train.Saver({"W": W, "b": b})
sess.run(tf.global_variables_initializer())
for _ in range(10000):
batch = mnist.train.next_batch(100)
train_step.run(feed_dict={x: batch[0], y_: batch[1]})
saver.save(sess, "model.ckpt")
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(accuracy.eval(feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
print(cross_entropy.eval(feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
sess.close()
In [14]:
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, shape=[None, 784], name="x")
y_ = tf.placeholder(tf.float32, shape=[None, 10], name="y_")
W = tf.Variable(tf.zeros([784,10]), name="W")
b = tf.Variable(tf.zeros([10]), name="b")
y = tf.matmul(x,W) + b
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
saver = tf.train.Saver({"W": W, "b": b})
sess.run(tf.global_variables_initializer())
saver.restore(sess, "model.ckpt")
#for _ in range(10000):
# batch = mnist.train.next_batch(100)
# train_step.run(feed_dict={x: batch[0], y_: batch[1]})
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(accuracy.eval(feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
print(cross_entropy.eval(feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
sess.close()
In [12]:
for _ in range(1000):
batch = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch[0], y_: batch[1]})
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(accuracy.eval(feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
In [39]:
#input_dir_path = r'C:\Users\ericd\Desktop\Projects\playgrounds\python-playground\mnist_tfrecords'
input_dir_path = r'C:\Users\ericd\Desktop\Projects\playgrounds\python-playground\mnist'
filenames = os.listdir(input_dir_path)
filenames = list(filter(lambda f: f.lower().endswith('.tfrecord') or f.lower().endswith('.tfrecords'), filenames))
filenames = list(map(lambda f: os.path.join(input_dir_path, f), filenames))
print("{} files found.".format(len(filenames)))
In [55]:
with tf.Graph().as_default():
filename_queue = tf.train.string_input_producer(filenames, num_epochs=10)
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'image': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.int64),
})
image = tf.decode_raw(features['image'], tf.uint8)
image.set_shape([28*28])
image = tf.cast(image, tf.float32) * (1. / 255) - 0.5
label = tf.cast(features['label'], tf.int32)
batch_size = 100
images, labels = tf.train.shuffle_batch(
[image, label], batch_size=batch_size, num_threads=2,
capacity=1000 + 3 * batch_size,
min_after_dequeue=1000)
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(images, W) + b
onehot_labels = tf.one_hot(indices=tf.cast(labels, tf.int32), depth=10)
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=onehot_labels, logits=y))
train_op = tf.train.GradientDescentOptimizer(0.5).minimize(loss)
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
sess = tf.Session()
sess.run(init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
try:
step = 0
while not coord.should_stop():
start_time = time.time()
_, labels_values, loss_value = sess.run([train_op, labels, loss])
duration = time.time() - start_time
if step % 100 == 0:
print('Step %d: loss = %.2f (%.3f sec)' % (step, loss_value, duration))
step += 1
except tf.errors.OutOfRangeError:
print('Done training for %d epochs, %d steps.' % (10, step))
finally:
print('Finished.')
coord.request_stop()
coord.join(threads)
sess.close()
In [ ]: