In [1]:
from six.moves import cPickle as pickle
import matplotlib.pyplot as plt
import os
import tensorflow as tf
import numpy as np

In [2]:
wd = os.getcwd()
file_name = wd+'/front_dist_data'
with open(file_name, 'rb') as f:
    save = pickle.load(f)
    pos_data = save['pos_data']
    neg_data = save['neg_data']
    del save

print('pos_data: ',pos_data.shape)
print('neg_data: ',neg_data.shape)
pos_nm, pos_dim = pos_data.shape
neg_nm, neg_dim = neg_data.shape
assert pos_dim == neg_dim
data_dim = pos_dim

dataset = np.concatenate((pos_data, neg_data), axis=0)
labels = np.concatenate((np.ones(pos_nm),np.zeros(neg_nm)), axis=0)


('pos_data: ', (10000, 25))
('neg_data: ', (10000, 25))

In [3]:
'''Randomize Data'''
def randomize(dataset, labels):
    permutation = np.random.permutation(labels.shape[0])
    shuffled_dataset = dataset[permutation,:]
    shuffled_labels = labels[permutation].reshape(-1,1)
    return shuffled_dataset, shuffled_labels

dataset, labels = randomize(dataset, labels)

for i in range(3):
	index = np.random.randint(dataset.shape[0])
	plt.plot(dataset[index,:])
	print('label',labels[index])
	plt.show()

'''Assign Dataset'''
data_nm = dataset.shape[0]
train_dataset = dataset[0:int(0.6*data_nm),:].astype(np.float32)
train_labels = labels[0:int(0.6*data_nm),:].astype(np.float32)
valid_dataset = dataset[int(0.6*data_nm):int(0.8*data_nm),:].astype(np.float32)
valid_labels = labels[int(0.6*data_nm):int(0.8*data_nm),:].astype(np.float32)
test_dataset =  dataset[int(0.8*data_nm):,:].astype(np.float32)
test_labels = labels[int(0.8*data_nm):,:].astype(np.float32)


('label', array([ 0.]))
('label', array([ 0.]))
('label', array([ 1.]))

In [4]:
'''Define MSE'''
def MSE(predictions, labels):
    return np.mean((predictions-labels)**2)

In [16]:
batch_size = 20
hidden_nm = 10

graph = tf.Graph()
with graph.as_default():

    tf_train_dataset = tf.placeholder(tf.float32, shape=(batch_size,data_dim))
    tf_train_labels = tf.placeholder(tf.float32, shape=(batch_size,1))
    tf_valid_dataset = tf.constant(valid_dataset)
    tf_test_dataset = tf.constant(test_dataset)

    weights1 = tf.Variable(tf.truncated_normal([data_dim,hidden_nm], stddev=1.0))
    biases1 = tf.Variable(tf.zeros([hidden_nm]))
    
    weights2 = tf.Variable(tf.truncated_normal([hidden_nm,1], stddev=1.0))
    biases2 = tf.Variable(tf.zeros([1]))
    
    def model(data):
        hidden_in = tf.matmul(data, weights1) + biases1
        hidden_out = tf.nn.relu(hidden_in)
        
        o = tf.matmul(hidden_out, weights2) + biases2
        return o

    train_prediction = model(tf_train_dataset)
    loss = tf.reduce_mean(tf.square(tf_train_labels - train_prediction))
    optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(loss)

    valid_prediction = model(tf_valid_dataset)
    test_prediction = model(tf_test_dataset)

In [17]:
nm_steps = 10000
with tf.Session(graph=graph) as session:
    tf.initialize_all_variables().run()
    print('Initialized')
    for step in range(nm_steps):
        offset = (step * batch_size) % (train_labels.shape[0] - batch_size)
        batch_data = train_dataset[offset:(offset + batch_size), :]
        batch_labels = train_labels[offset:(offset + batch_size), :]
        feed_dict = {tf_train_dataset : batch_data, tf_train_labels : batch_labels}
        _, l, predictions = session.run([optimizer, loss, train_prediction], feed_dict=feed_dict)
        if (step % 1000 == 0):
            print('*'*40)
            print('Minibatch loss at step %d: %f' % (step, l))
            print('Minibatch MSE: %.3f' % MSE(predictions, batch_labels))
            print('Validation MSE: %.3f' % MSE(valid_prediction.eval(), valid_labels))
    print('Test MSE: %.3f' % MSE(test_prediction.eval(), test_labels))
    i_test = 0
    while(i_test!=''):
        i_test = input("Input an index of test data (or Enter to quit): ")
        label = test_labels[int(i_test),:]
        print(label)
        prd = model(test_dataset[i_test,:].reshape(1,25)).eval()
        print(prd)


Initialized
****************************************
Minibatch loss at step 0: 0.542610
Minibatch MSE: 0.543
Validation MSE: 0.534
****************************************
Minibatch loss at step 1000: 0.126708
Minibatch MSE: 0.127
Validation MSE: 0.136
****************************************
Minibatch loss at step 2000: 0.064544
Minibatch MSE: 0.065
Validation MSE: 0.130
****************************************
Minibatch loss at step 3000: 0.103932
Minibatch MSE: 0.104
Validation MSE: 0.127
****************************************
Minibatch loss at step 4000: 0.128556
Minibatch MSE: 0.129
Validation MSE: 0.122
****************************************
Minibatch loss at step 5000: 0.122910
Minibatch MSE: 0.123
Validation MSE: 0.118
****************************************
Minibatch loss at step 6000: 0.071682
Minibatch MSE: 0.072
Validation MSE: 0.115
****************************************
Minibatch loss at step 7000: 0.112688
Minibatch MSE: 0.113
Validation MSE: 0.113
****************************************
Minibatch loss at step 8000: 0.107181
Minibatch MSE: 0.107
Validation MSE: 0.107
****************************************
Minibatch loss at step 9000: 0.173820
Minibatch MSE: 0.174
Validation MSE: 0.105
Test MSE: 0.102
Input an index of test data (or Enter to quit): 1
[ 0.]
[[-0.02340201]]
Input an index of test data (or Enter to quit): 23
[ 0.]
[[ 0.10225669]]
Input an index of test data (or Enter to quit): 4
[ 1.]
[[ 0.61959016]]
Input an index of test data (or Enter to quit): 3
[ 0.]
[[ 0.46969333]]
Input an index of test data (or Enter to quit): 54
[ 1.]
[[ 0.73967028]]
Input an index of test data (or Enter to quit): 12
[ 1.]
[[ 0.77364874]]
Input an index of test data (or Enter to quit): 56
[ 0.]
[[ 0.04564634]]
Input an index of test data (or Enter to quit): 78
[ 1.]
[[ 0.74282873]]
Input an index of test data (or Enter to quit): 
  File "<string>", line unknown
    
    ^
SyntaxError: unexpected EOF while parsing