In [1]:
# Import Necessary Modules
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
In [2]:
# Set up the random seed
rnd=tf.set_random_seed(111)
# Display plots inline
%matplotlib inline
# Plots Figure Size
matplotlib.rcParams['figure.figsize']=(10.0,10.0)
In [3]:
# read the datasets
Data = pd.read_csv("intro_to_ann.csv")
# Divide the Data into Train-Test
print (Data.head())
Xtrain, ytrain,Xtest,ytest = np.array(Data.ix[0:349,0:2]), np.array(Data.ix[0:349,2:3]),np.array(Data.ix[350:,0:2]), np.array(Data.ix[350:,2:3])
print('Training Set:',Xtrain.shape, ytrain.shape,'\nTesting Set: ',Xtest.shape, ytest.shape)
print('Total Data Size:',Data.shape)
In [4]:
# Function Modules
# Initial Weights (Random Number)
def weights(size):
wt=tf.random_normal(size,stddev=0.1)
var= tf.Variable(wt)
return var
# Foraward Propagation
def ForwardPropagation(X,wih,who):
h=tf.nn.softmax(tf.matmul(X,wih))
yhat=tf.nn.sigmoid(tf.matmul(h,who))
return yhat
In [5]:
# Network Parameters
ni=Xtrain.shape[1] # Nodes in Input Layer
nh=4 # Nodes in Hidden Layer
no=1 # Nodes in output layer
print('Input Nodes:',ni,'\nHidden Nodes:',nh,'\nOutput Nodes:',no)
In [6]:
# Variable for loading Data
X=tf.placeholder("float",[None,ni])
y=tf.placeholder("float",[None,no])
In [7]:
# Weight Initialization
wih = weights((ni,nh))
who = weights((nh,no))
In [8]:
# Forward Propagation using tensor flow
yhat = ForwardPropagation(X,wih,who)
predict = tf.argmax(yhat, dimension=1)
In [9]:
# Backward propagation using tensor flow
cost=tf.reduce_mean(-tf.reduce_sum(y * tf.log(yhat), reduction_indices=[1]))
updates = tf.train.GradientDescentOptimizer(0.05).minimize(cost)
In [10]:
# Run the code
S = tf.Session()
init = tf.initialize_all_variables()
S.run(init)
for iteration in range(100):
# Train the model
for i in range(len(Xtrain)):
costfunction=S.run(cost, feed_dict ={X: Xtrain, y: ytrain})
Optimized=S.run(updates, feed_dict ={X: Xtrain, y: ytrain})
print(costfunction)
plt.plot(iteration,costfunction,'x')