In [1]:
from __future__ import division
import tensorflow as tf
import numpy as np
import tarfile
import os
import matplotlib.pyplot as plt
import time
%matplotlib inline

# READ AND DEFINE DATA SET
def csv_to_numpy_array(filePath, delimiter):
    return np.genfromtxt(filePath, delimiter=delimiter, dtype=None)

def import_data():
    if "data" not in os.listdir(os.getcwd()):
        # Untar directory of data if we haven't already
        tarObject = tarfile.open("data.tar.gz")
        tarObject.extractall()
        tarObject.close()
        print("Extracted tar to current directory")
    else:
        # we've already extracted the files
        pass

    print("loading training data")
    trainX = csv_to_numpy_array("data/trainX.csv", delimiter="\t")
    trainY = csv_to_numpy_array("data/trainY.csv", delimiter="\t")
    print("loading test data")
    testX = csv_to_numpy_array("data/testX.csv", delimiter="\t")
    testY = csv_to_numpy_array("data/testY.csv", delimiter="\t")
    return trainX,trainY,testX,testY

trainX,trainY,testX,testY = import_data()
# numFeatures = the number of words extracted from each email = number of input nodes
numFeatures = trainX.shape[1]
# numLabels = number of classes we are predicting (here just 2: Ham or Spam) = number of output nodes
numLabels = trainY.shape[1]


loading training data
loading test data

In [2]:
# DEFINE PLACEHOLDERS
n_i = numFeatures
n_o = numLabels
n_samples = trainX.shape[0]
n_h = 5
n_h_layers = 2
x = tf.placeholder('float32', [None, n_i]) # input placeholder
y_ = tf.placeholder('float32', [None, n_o]) # label placeholder

# DEFINE VARIABLES
def init_weights(shape):
    return tf.Variable(tf.random_normal(shape, stddev=0.01))

W1 = init_weights([n_i, n_h])
W2 = init_weights([n_h, n_o])
b = init_weights([n_h])

# DEFINE MODEL WITH SIGMOID ACTIVATION
def model(X, w_h, w_o, bias):
    h = tf.nn.sigmoid(tf.add(tf.matmul(X, w_h), bias[0]))
    return tf.matmul(h, w_o)

y = model(x, W1, W2, b)

# DEFINE COST FUNCTION
cost = tf.reduce_sum(tf.pow(tf.nn.sigmoid(y) - y_, 2) / 2)

# DEFINE OPTIMIZER AND PREDICTION 
train_op = tf.train.MomentumOptimizer(0.01, 0.95).minimize(cost)

predict_op = tf.nn.sigmoid(y)

# INITIALIZE SESSION
init = tf.initialize_all_variables()

sess = tf.Session()
sess.run(init)

# print inital weights and output bias
print("initial input weights:", W1.eval(session=sess))
print("initial hidden:", W2.eval(session=sess))
print("initial output bias:", b.eval(session=sess))

# GRAPH LIVE UPDATING
epoch_values = []
accuracy_values = []
# Turn on interactive plotting
plt.ion()
# Create the main, super plot
fig = plt.figure()
# Create two subplots on their own axes and give titles
ax1 = plt.subplot("211")
ax1.set_title("TRAINING ACCURACY", fontsize=18)

# iterate 1000 times
for i in range(1000):
    _, loss_value = sess.run([train_op, cost], feed_dict={x: trainX, y_: trainY})
    if i%100 == 0:
        #loss_value = sess.run(accuracy_op, feed_dict={x: trainX, y_: trainY})
        print("\nmse:", loss_value)
        epoch_values.append(i)
        accuracy_values.append(loss_value)
        accuracyLine, = ax1.plot(epoch_values, accuracy_values)
        fig.canvas.draw()
        
# RUN THROUGH TEST DATA 
predictions = sess.run(predict_op, feed_dict={x: testX, y_: testY})
correct = 0
for i in range(predictions.shape[0]):
    test_result = predictions[i]
    label_result = testY[i]
    test = 0
    label = 0
    if (test_result[0] > test_result[1]):
        test = 0
    else:
        test = 1
    if (label_result[0] > label_result[1]):
        label = 0
    else:
        label = 1
    if test == label:
        correct = correct + 1
        
correct = (correct / predictions.shape[0]) * 100
print(sess.run(predict_op, feed_dict={x: testX, y_: testY}))  
print(testY)
print("final accuracy on test set: %s" %str(correct))


initial input weights: [[ 0.00409399 -0.00457005 -0.00469943  0.00737868  0.00572969]
 [ 0.01690714  0.00653632  0.00218543 -0.00697609 -0.01825775]
 [ 0.0018333   0.00182021 -0.00391219 -0.00595224 -0.0086149 ]
 ..., 
 [-0.01350973 -0.01200246 -0.00776011 -0.00306294 -0.00500573]
 [-0.00501641 -0.01263148 -0.01286929 -0.00691761  0.00603941]
 [-0.01088441 -0.00421671  0.00660999  0.00272577  0.00030339]]
initial hidden: [[ 0.01425428 -0.00882506]
 [-0.00220674  0.00116308]
 [-0.00986179 -0.00816871]
 [-0.01040369 -0.01192335]
 [-0.00293965 -0.00478951]]
initial output bias: [-0.01274253 -0.00274669 -0.00163882  0.00071533  0.00019768]

mse: 238.343

mse: 235.975

mse: 18.9846

mse: 5.38242

mse: 3.92926

mse: 2.8145

mse: 2.22301

mse: 1.64663

mse: 1.47858

mse: 1.38405
[[  9.99873996e-01   1.25998617e-04]
 [  5.72291261e-04   9.99427617e-01]
 [  1.21487770e-04   9.99878526e-01]
 [  9.99503970e-01   4.95960296e-04]
 [  1.79745420e-03   9.98202562e-01]
 [  1.61107816e-03   9.98388886e-01]
 [  1.12894792e-02   9.88710582e-01]
 [  9.98741567e-01   1.25842425e-03]
 [  9.99946475e-01   5.35040344e-05]
 [  9.99279320e-01   7.20720622e-04]
 [  9.99511719e-01   4.88338701e-04]
 [  9.98666286e-01   1.33374077e-03]
 [  9.99635458e-01   3.64557403e-04]
 [  9.99840140e-01   1.59861302e-04]
 [  4.62899776e-03   9.95370924e-01]
 [  4.27253108e-04   9.99572694e-01]
 [  9.99743283e-01   2.56711093e-04]
 [  9.52210605e-01   4.77913022e-02]
 [  4.36646631e-04   9.99563396e-01]
 [  1.42173795e-02   9.85782385e-01]
 [  4.90219209e-05   9.99951005e-01]
 [  1.15279888e-03   9.98847246e-01]
 [  8.35957192e-03   9.91640449e-01]
 [  9.96648848e-01   3.35107627e-03]
 [  1.36715516e-01   8.63285184e-01]
 [  9.98415351e-01   1.58455083e-03]
 [  7.31389737e-04   9.99268591e-01]
 [  1.55557267e-04   9.99844432e-01]
 [  3.57285258e-03   9.96427119e-01]
 [  9.99972343e-01   2.76274641e-05]
 [  1.37891584e-05   9.99986172e-01]
 [  1.11001981e-07   9.99999881e-01]
 [  7.20655153e-05   9.99927878e-01]
 [  1.50656313e-04   9.99849319e-01]
 [  1.66405016e-03   9.98336017e-01]
 [  9.99145985e-01   8.53978097e-04]
 [  1.77302271e-01   8.22704136e-01]
 [  4.84214118e-03   9.95157897e-01]
 [  6.67013461e-03   9.93329942e-01]
 [  9.98921394e-01   1.07873196e-03]
 [  3.86103711e-05   9.99961376e-01]
 [  9.67594624e-01   3.24050076e-02]
 [  2.14149593e-03   9.97858584e-01]
 [  9.99254525e-01   7.45486701e-04]
 [  1.01726361e-01   8.98271501e-01]
 [  9.77847755e-01   2.21521147e-02]
 [  2.16199536e-04   9.99783814e-01]
 [  6.27777772e-03   9.93722260e-01]
 [  9.95302677e-01   4.69743786e-03]
 [  6.49473146e-02   9.35052037e-01]
 [  9.17060256e-01   8.29445198e-02]
 [  9.99961257e-01   3.87957261e-05]
 [  9.97223854e-01   2.77629797e-03]
 [  9.99348819e-01   6.51172653e-04]
 [  4.02007456e-04   9.99597967e-01]
 [  4.06889158e-05   9.99959350e-01]
 [  2.42474372e-03   9.97575223e-01]
 [  9.99761760e-01   2.38258945e-04]
 [  9.91679847e-01   8.32021516e-03]
 [  9.99476135e-01   5.23936236e-04]
 [  9.96153772e-01   3.84620717e-03]
 [  9.99982595e-01   1.74389461e-05]
 [  1.69388130e-02   9.83061254e-01]
 [  9.99958515e-01   4.14287679e-05]
 [  7.34866990e-06   9.99992609e-01]
 [  9.98356640e-01   1.64337421e-03]
 [  9.99473989e-01   5.25984738e-04]
 [  9.99410629e-01   5.89343312e-04]
 [  9.99735057e-01   2.64931819e-04]
 [  4.21281642e-04   9.99578774e-01]
 [  9.91490185e-01   8.50996654e-03]
 [  2.97006965e-01   7.02992618e-01]
 [  9.79978144e-01   2.00215876e-02]
 [  2.28837423e-04   9.99771178e-01]
 [  9.91490185e-01   8.50996654e-03]
 [  9.99955177e-01   4.48165629e-05]
 [  9.99821961e-01   1.78086950e-04]
 [  6.60213345e-06   9.99993443e-01]
 [  9.99988198e-01   1.17566142e-05]
 [  9.99878407e-01   1.21628495e-04]
 [  9.99690056e-01   3.09983938e-04]
 [  2.43172559e-04   9.99756873e-01]
 [  6.42821396e-05   9.99935746e-01]
 [  6.62971437e-01   3.37027729e-01]
 [  6.43798010e-03   9.93561864e-01]
 [  9.95658398e-01   4.34157811e-03]
 [  9.97262478e-01   2.73755635e-03]
 [  7.75361841e-05   9.99922514e-01]
 [  9.81366932e-01   1.86327938e-02]
 [  9.90298748e-01   9.70102474e-03]
 [  1.38798375e-02   9.86119986e-01]
 [  1.30299421e-03   9.98696983e-01]
 [  9.99893665e-01   1.06316475e-04]
 [  1.86193716e-02   9.81380939e-01]
 [  9.99734461e-01   2.65565817e-04]
 [  9.99791205e-01   2.08866957e-04]
 [  9.97056723e-01   2.94324639e-03]
 [  9.99835014e-01   1.64954210e-04]
 [  9.98480618e-01   1.51936733e-03]
 [  4.69356962e-03   9.95306432e-01]
 [  5.35400212e-01   4.64605272e-01]
 [  9.99583423e-01   4.16543102e-04]
 [  4.33330797e-03   9.95666742e-01]
 [  7.16476282e-03   9.92835104e-01]
 [  9.99981165e-01   1.87977985e-05]]
[[ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 1.  0.]
 [ 1.  0.]
 [ 0.  1.]
 [ 0.  1.]
 [ 1.  0.]]
final accuracy on test set: 100.0