In [1]:
import numpy as np

In [2]:
# sigmoid function
def nonlin(x, deriv=False):
    if deriv:
        return x*(1-x)
    return 1/(1+np.exp(-x))

In [3]:
# input dataset
X = np.array([[0,0,1],
             [0,1,1],
             [1,0,1],
             [1,1,1]])

In [4]:
# output dataset
y = np.array([[0,0,1,1]]).T

In [5]:
# seed random numbers to make calculation
# deterministic (just a good practice)
np.random.seed(1)

In [6]:
# initialize weights randomly with mean 0
syn0 = 2*np.random.random((3,1)) - 1

In [7]:
l0 = X

In [14]:
for iter in range(10000):
    
    # forward propogation
    # l0 = X
    l1 = nonlin(np.dot(l0,syn0))
    
    # how much did we miss?
    l1_error = y - l1
    
    # multiply how much we missed by the 
    # slope of the sigmoid at the values in l1
    l1_delta = l1_error * nonlin(l1, True)
    
    # update weights
    syn0 += np.dot(l0.T,l1_delta)

In [15]:
print('Output after training')
print(l1)


Output after training
[[ 0.00170954]
 [ 0.00139493]
 [ 0.99886139]
 [ 0.99860451]]

In [ ]: