In [2]:
import tensorflow as tf
import numpy as np
x=tf.placeholder(tf.float32,shape=[None,2])
y=tf.placeholder(tf.float32,shape=[None,1])

weights=tf.Variable(tf.random_normal([2,1]),dtype=tf.float32)
bias=tf.Variable(tf.random_normal([1]),dtype=tf.float32)



multiply1=tf.add(tf.matmul(x,weights),bias)
z=tf.nn.sigmoid(multiply1)


cost=tf.reduce_mean((y*tf.log(z)+(1-y)*tf.log(1-z))*-1)
optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(cost)

inp=np.array([[0,0],[0,1],[1,0],[1,1]])
op=np.array([[0],[0],[0],[1]])
with tf.Session() as sess:
   
    tf.global_variables_initializer().run()
    for i in range(12001):
        res,_=sess.run([cost,optimizer],feed_dict={x:inp,y:op})
        if i%1000==0:
            print ("iteration= ",i,"cost= ",res)
    print ("Validating output for AND GATE")
    result=sess.run(z,feed_dict={x:inp})
    print (result)


iteration=  0 cost=  0.649264
iteration=  1000 cost=  0.461209
iteration=  2000 cost=  0.361876
iteration=  3000 cost=  0.300671
iteration=  4000 cost=  0.258616
iteration=  5000 cost=  0.227533
iteration=  6000 cost=  0.203403
iteration=  7000 cost=  0.184011
iteration=  8000 cost=  0.168027
iteration=  9000 cost=  0.154595
iteration=  10000 cost=  0.143131
iteration=  11000 cost=  0.133224
iteration=  12000 cost=  0.124574
Validating output for AND GATE
[[ 0.00538206]
 [ 0.13219047]
 [ 0.13284305]
 [ 0.81176299]]

In [3]:
import tensorflow as tf
import numpy as np
x=tf.placeholder(tf.float32,shape=[None,2])
y=tf.placeholder(tf.float32,shape=[None,1])

weights=tf.Variable(tf.random_normal([2,1]),dtype=tf.float32)
bias=tf.Variable(tf.random_normal([1]),dtype=tf.float32)



multiply1=tf.add(tf.matmul(x,weights),bias)
z=tf.nn.sigmoid(multiply1)


cost=tf.reduce_mean((y*tf.log(z)+(1-y)*tf.log(1-z))*-1)
optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(cost)

inp=np.array([[0,0],[0,1],[1,0],[1,1]])
op=np.array([[0],[1],[1],[1]])
with tf.Session() as sess:
   
    tf.global_variables_initializer().run()
    for i in range(12001):
        res,_=sess.run([cost,optimizer],feed_dict={x:inp,y:op})
        if i%1000==0:
            print ("iteration= ",i,"cost= ",res)
    print ("Validating output for OR GATE")
    result=sess.run(z,feed_dict={x:inp})
    print (result)


iteration=  0 cost=  0.434314
iteration=  1000 cost=  0.325146
iteration=  2000 cost=  0.256231
iteration=  3000 cost=  0.210026
iteration=  4000 cost=  0.177188
iteration=  5000 cost=  0.152755
iteration=  6000 cost=  0.133924
iteration=  7000 cost=  0.119005
iteration=  8000 cost=  0.106921
iteration=  9000 cost=  0.0969515
iteration=  10000 cost=  0.0885997
iteration=  11000 cost=  0.0815107
iteration=  12000 cost=  0.075425
Validating output for OR GATE
[[ 0.15865861]
 [ 0.93761951]
 [ 0.93830144]
 [ 0.99917573]]

In [15]:
import tensorflow as tf
import numpy as np
x=tf.placeholder(tf.float32,shape=[None,2])
y=tf.placeholder(tf.float32,shape=[None,1])

weights=tf.Variable(tf.random_normal([2,1]),dtype=tf.float32)
bias=tf.Variable(tf.random_normal([1]),dtype=tf.float32)



multiply1=tf.add(tf.matmul(x,weights),bias)
z=tf.nn.sigmoid(multiply1)


cost=tf.reduce_mean((y*tf.log(z)+(1-y)*tf.log(1-z))*-1)
optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(cost)

inp=np.array([[0,0],[0,1],[1,0],[1,1]])
op=np.array([[1],[0],[0],[0]])
with tf.Session() as sess:
   
    tf.global_variables_initializer().run()
    for i in range(12001):
        res,_=sess.run([cost,optimizer],feed_dict={x:inp,y:op})
        if i%1000==0:
            print ("iteration= ",i,"cost= ",res)
    print ("Validating output for NOR GATE")
    result=sess.run(z,feed_dict={x:inp})
    print (result)


iteration=  0 cost=  0.492456
iteration=  1000 cost=  0.360434
iteration=  2000 cost=  0.278884
iteration=  3000 cost=  0.225514
iteration=  4000 cost=  0.188372
iteration=  5000 cost=  0.161178
iteration=  6000 cost=  0.140477
iteration=  7000 cost=  0.124236
iteration=  8000 cost=  0.111184
iteration=  9000 cost=  0.100487
iteration=  10000 cost=  0.0915746
iteration=  11000 cost=  0.0840453
iteration=  12000 cost=  0.0776082
Validating output for NOR GATE
[[ 0.83699691]
 [ 0.06316088]
 [ 0.06417812]
 [ 0.00089962]]

In [12]:
import tensorflow as tf
import numpy as np
x=tf.placeholder(tf.float32,shape=[None,2])
y=tf.placeholder(tf.float32,shape=[None,1])

#weights=tf.Variable(tf.random_normal([2,1]),dtype=tf.float32)
#bias=tf.Variable(tf.random_normal([1]),dtype=tf.float32)



#multiply1=tf.add(tf.matmul(x,weights),bias)
#z=tf.nn.sigmoid(multiply1)


Theta1 = tf.Variable(tf.random_uniform([2,2], -1, 1), name="Theta1")
Theta2 = tf.Variable(tf.random_uniform([2,1], -1, 1), name="Theta2")
Bias1 = tf.Variable(tf.zeros([2]), name="Bias1")
Bias2 = tf.Variable(tf.zeros([1]), name="Bias2")
A2 = tf.sigmoid(tf.matmul(x, Theta1) + Bias1)
z = tf.sigmoid(tf.matmul(A2, Theta2) + Bias2)
    
cost=tf.reduce_mean((y*tf.log(z)+(1-y)*tf.log(1-z))*-1)
optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(cost)

inp=np.array([[0,0],[0,1],[1,0],[1,1]])
op=np.array([[0],[1],[1],[0]])
with tf.Session() as sess:
   
    tf.global_variables_initializer().run()
    for i in range(100000):
        res,_=sess.run([cost,optimizer],feed_dict={x:inp,y:op})
        if i%1000==0:
            print ("iteration = ",i,"cost= ",res)
    print ("Validating output for XOR GATE")
    result=sess.run(z,feed_dict={x:inp})
    print (result)


iteration =  0 cost=  0.696949
iteration =  1000 cost=  0.695423
iteration =  2000 cost=  0.694884
iteration =  3000 cost=  0.694513
iteration =  4000 cost=  0.694247
iteration =  5000 cost=  0.69405
iteration =  6000 cost=  0.693901
iteration =  7000 cost=  0.693784
iteration =  8000 cost=  0.693692
iteration =  9000 cost=  0.693616
iteration =  10000 cost=  0.693554
iteration =  11000 cost=  0.693501
iteration =  12000 cost=  0.693457
iteration =  13000 cost=  0.693419
iteration =  14000 cost=  0.693386
iteration =  15000 cost=  0.693357
iteration =  16000 cost=  0.693332
iteration =  17000 cost=  0.69331
iteration =  18000 cost=  0.69329
iteration =  19000 cost=  0.693272
iteration =  20000 cost=  0.693256
iteration =  21000 cost=  0.693242
iteration =  22000 cost=  0.693229
iteration =  23000 cost=  0.693217
iteration =  24000 cost=  0.693206
iteration =  25000 cost=  0.693196
iteration =  26000 cost=  0.693187
iteration =  27000 cost=  0.693178
iteration =  28000 cost=  0.69317
iteration =  29000 cost=  0.693163
iteration =  30000 cost=  0.693155
iteration =  31000 cost=  0.693149
iteration =  32000 cost=  0.693142
iteration =  33000 cost=  0.693136
iteration =  34000 cost=  0.693129
iteration =  35000 cost=  0.693123
iteration =  36000 cost=  0.693117
iteration =  37000 cost=  0.693111
iteration =  38000 cost=  0.693104
iteration =  39000 cost=  0.693098
iteration =  40000 cost=  0.693091
iteration =  41000 cost=  0.693083
iteration =  42000 cost=  0.693075
iteration =  43000 cost=  0.693066
iteration =  44000 cost=  0.693057
iteration =  45000 cost=  0.693047
iteration =  46000 cost=  0.693035
iteration =  47000 cost=  0.693021
iteration =  48000 cost=  0.693006
iteration =  49000 cost=  0.692989
iteration =  50000 cost=  0.692969
iteration =  51000 cost=  0.692946
iteration =  52000 cost=  0.692918
iteration =  53000 cost=  0.692885
iteration =  54000 cost=  0.692845
iteration =  55000 cost=  0.692797
iteration =  56000 cost=  0.692738
iteration =  57000 cost=  0.692664
iteration =  58000 cost=  0.692571
iteration =  59000 cost=  0.692454
iteration =  60000 cost=  0.692302
iteration =  61000 cost=  0.692105
iteration =  62000 cost=  0.691844
iteration =  63000 cost=  0.691495
iteration =  64000 cost=  0.691024
iteration =  65000 cost=  0.690382
iteration =  66000 cost=  0.689504
iteration =  67000 cost=  0.688302
iteration =  68000 cost=  0.686667
iteration =  69000 cost=  0.684471
iteration =  70000 cost=  0.681572
iteration =  71000 cost=  0.677819
iteration =  72000 cost=  0.673061
iteration =  73000 cost=  0.667136
iteration =  74000 cost=  0.659872
iteration =  75000 cost=  0.651097
iteration =  76000 cost=  0.640672
iteration =  77000 cost=  0.628553
iteration =  78000 cost=  0.614839
iteration =  79000 cost=  0.599775
iteration =  80000 cost=  0.583691
iteration =  81000 cost=  0.566888
iteration =  82000 cost=  0.549543
iteration =  83000 cost=  0.531655
iteration =  84000 cost=  0.513036
iteration =  85000 cost=  0.493352
iteration =  86000 cost=  0.472187
iteration =  87000 cost=  0.449152
iteration =  88000 cost=  0.424001
iteration =  89000 cost=  0.396758
iteration =  90000 cost=  0.367796
iteration =  91000 cost=  0.337831
iteration =  92000 cost=  0.307799
iteration =  93000 cost=  0.278672
iteration =  94000 cost=  0.251263
iteration =  95000 cost=  0.226127
iteration =  96000 cost=  0.20353
iteration =  97000 cost=  0.183506
iteration =  98000 cost=  0.165926
iteration =  99000 cost=  0.150573
Validating output for XOR GATE
[[ 0.07469255]
 [ 0.88426852]
 [ 0.88427436]
 [ 0.20160414]]

In [11]:
import tensorflow as tf
x_ = tf.placeholder(tf.float32, shape=[4,2], name="x-input")
y_ = tf.placeholder(tf.float32, shape=[4,1], name="y-input")
Theta1 = tf.Variable(tf.random_uniform([2,2], -1, 1), name="Theta1")
Theta2 = tf.Variable(tf.random_uniform([2,1], -1, 1), name="Theta2")
Bias1 = tf.Variable(tf.zeros([2]), name="Bias1")
Bias2 = tf.Variable(tf.zeros([1]), name="Bias2")
A2 = tf.sigmoid(tf.matmul(x_, Theta1) + Bias1)
Hypothesis = tf.sigmoid(tf.matmul(A2, Theta2) + Bias2)
cost = tf.reduce_mean(( (y_ * tf.log(Hypothesis)) + 
        ((1 - y_) * tf.log(1.0 - Hypothesis)) ) * -1)
train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cost)
XOR_X = [[0,0],[0,1],[1,0],[1,1]]
XOR_Y = [[0],[1],[1],[0]]

init = tf.global_variables_initializer()

sess = tf.Session()
sess.run(init)
for i in range(100000):
        sess.run(train_step, feed_dict={x_: XOR_X, y_: XOR_Y})

        
if i % 1000 == 0:
        print('Hypothesis ', sess.run(Hypothesis, feed_dict={x_: XOR_X, y_: XOR_Y}))

In [ ]: