In [325]:
import tensorflow as tf
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
from sklearn.cross_validation import train_test_split
In [326]:
data = pd.read_csv("https://raw.githubusercontent.com/ml6973/Course/master/code/data/intro_to_ann.csv")
X, y = np.array(data.ix[:,0:2]), np.array(data.ix[:,2:3])
print(X.shape, y.shape)
X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=1)
print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)
plt.scatter(X[:,0], X[:,1], s=40, c=y, cmap=plt.cm.BuGn)
Out[326]:
In [327]:
epochs = 1000
learning_rate = 0.01
hl_nodes = 5
num_features = 2
num_output = 1
In [328]:
x = tf.placeholder(tf.float32, [None, num_features])
y_ = tf.placeholder(tf.float32, [None, num_output])
# Create model
def multilayer_perceptron(x, weights, biases):
# Hidden layer with sigmoid activation
layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
layer_1 = tf.nn.softmax(layer_1)
# Output layer with sigmoid activation
out_layer = tf.matmul(layer_1, weights['out']) + biases['out']
out_layer = tf.nn.sigmoid(out_layer)
return out_layer
In [329]:
# Store layers weight & bias
weights = {
'h1': tf.Variable(tf.random_normal([num_features, hl_nodes])),
'out': tf.Variable(tf.random_normal([hl_nodes, num_output]))
}
biases = {
'b1': tf.Variable(tf.random_normal([hl_nodes])),
'out': tf.Variable(tf.random_normal([num_output]))
}
# Construct model
pred = multilayer_perceptron(x, weights, biases)
In [330]:
# Define loss and optimizer
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(pred), reduction_indices=[1]))
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy)
# Initializing the variables
init = tf.initialize_all_variables()
In [331]:
# Launch the graph
with tf.Session() as sess:
sess.run(init)
for i in range(epochs):
sess.run([optimizer], feed_dict={x: X_train, y_: y_train})
print(sess.run([cross_entropy], feed_dict={x: X_train, y_: y_train}))
In [317]:
In [ ]: