In [ ]:
import tensorflow as tf
#import tensorflow.contrib.learn.python.learn as learn
import tflearn
import scipy as sp
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import sklearn
import random
%matplotlib inline
In [ ]:
def importMNIST(filename):
"""
Import MNIST data from a csv file.
Separate labels and data.
"""
MNIST_raw = np.genfromtxt(filename, delimiter=",")
MNIST_labels = np.zeros(10000)
MNIST_data = []
for i in np.arange(0, MNIST_raw.shape[0], 1):
MNIST_labels[i] = MNIST_raw[i,0]
MNIST_data.append(MNIST_raw[i,1:])
return MNIST_labels, MNIST_data
def processLabels(MNIST_labels):
"""
One-hot encoding for the MNIST labels.
"""
# Convert label data to one-hot array
MNIST_labelsOH = np.zeros((10000,10))
MNIST_labelsOH[np.arange(10000), MNIST_labels.astype(np.int)] = 1
MNIST_labelsLst = []
for i in np.arange(0, MNIST_labelsOH.shape[0], 1):
MNIST_labelsLst.append(MNIST_labelsOH[i])
return MNIST_labelsLst
In [ ]:
MNIST_labels, MNIST_data = importMNIST("./inData/mnist_test.csv")
MNIST_labelsOH = processLabels(MNIST_labels)
MNIST_dataRS = np.reshape(MNIST_data, [10000, 784, 1])
In [ ]:
rand0 = random.randint(0,9999)
print(rand0, MNIST_labelsOH[rand0], MNIST_labels[rand0])
plt.ylabel("Amplitude")
plt.xlabel("Index (flattened)")
plt.title("Flattened MNIST number example")
plt.plot(MNIST_data[rand0])
plt.savefig("/tmp/mnistflat.pdf")
In [ ]:
MNIST_dataTest = np.reshape(MNIST_data[rand0],[28,28])
plt.imshow(MNIST_dataTest, cmap='gray_r', interpolation='nearest')
plt.xlabel("x index")
plt.ylabel("y index")
plt.title("MNIST number example")
plt.savefig("/tmp/mnistexample.pdf")
In [ ]:
sess = tf.InteractiveSession()
tf.reset_default_graph()
tflearn.initializations.normal()
# Input layer:
net = tflearn.layers.core.input_data(shape=[None, 784, 1])
# First layer:
net = tflearn.layers.conv.conv_1d(net, 32, 6, activation="relu")
net = tflearn.layers.conv.max_pool_1d(net, 2)
# Second layer:
net = tflearn.layers.conv.conv_1d(net, 64, 6, activation="relu")
net = tflearn.layers.conv.max_pool_1d(net, 2)
# Fully connected layer 1:
net = tflearn.layers.core.fully_connected(net, 1024, activation="relu")
# Output layer:
net = tflearn.layers.core.fully_connected(net, 10, activation="softmax")
net = tflearn.layers.estimator.regression(net, optimizer='adam', loss='categorical_crossentropy',\
learning_rate=0.00001)
In [ ]:
model = tflearn.DNN(net, tensorboard_verbose=3)
model.fit(MNIST_dataRS, MNIST_labelsOH, show_metric=True)
In [ ]: