In [1]:
import tensorflow as tf
import numpy as np
x=tf.placeholder(tf.float32,shape=[None,2])
y=tf.placeholder(tf.float32,shape=[None,1])

weights=tf.Variable(tf.random_normal([2,6]),dtype=tf.float32)
bias=tf.Variable(tf.random_normal([6]),dtype=tf.float32)

multiply1=tf.add(tf.matmul(x,weights),bias)
z=tf.sigmoid(multiply1)

out1 = tf.Variable(tf.random_normal([6,1]))
z2 = tf.sigmoid(tf.matmul(z,out1))

cost = tf.reduce_mean((y*tf.log(z2)+(1-y)*tf.log(1-z2))*-1)
optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(cost)

inp = np.array([[0,0],[0,1],[1,0],[1,1]])
op = np.array([[1],[0],[0],[1]])
with tf.Session() as sess:

    tf.global_variables_initializer().run()
    for i in range(12001):
        res,_= sess.run([cost,optimizer],feed_dict={x:inp,y:op})
        if i%1000==0:
            print ("iteration= ",i,"cost= ",res)
    print ("Validating output for XNOR GATE")
    result=sess.run(z2,feed_dict={x:inp})
    print (result)
    print(sess.run(weights))


iteration=  0 cost=  0.887876
iteration=  1000 cost=  0.67232
iteration=  2000 cost=  0.637792
iteration=  3000 cost=  0.611275
iteration=  4000 cost=  0.585045
iteration=  5000 cost=  0.556807
iteration=  6000 cost=  0.525693
iteration=  7000 cost=  0.491379
iteration=  8000 cost=  0.453967
iteration=  9000 cost=  0.414074
iteration=  10000 cost=  0.372873
iteration=  11000 cost=  0.331931
iteration=  12000 cost=  0.292862
Validating output for XNOR GATE
[[ 0.82871932]
 [ 0.23158985]
 [ 0.28528056]
 [ 0.68104416]]
[[-1.29240608 -1.93940651  3.30660963 -3.62568855 -0.31293344 -1.45087087]
 [-1.26238441 -2.40155053 -1.29829419 -3.47788453  2.08266759 -1.92081583]]

In [ ]: