Artificial Intelligence for Humans
Introduction to the Math of Neural Networks
If you were to write the above equation as code (using Numpy/Python) you would have the following:
In [3]:
import numpy as np
i = np.arange(1,11) # 11, because arange is not inclusive
s = np.sum(2*i)
print(s)
In [ ]:
More traditional looping (non-Numpy) would perform the summation as follows:
In [4]:
s = 0
for i in range(1,11):
s += 2*i
print(s)
In [11]:
import numpy as np
i = np.arange(1,6) # 6, because arange is not inclusive
s = np.prod(2*i)
print(s)
In [12]:
s = 1
for i in range(1,6): # 6, because arange is not inclusive
s *= 2*i
print(s)
In [ ]:
In [32]:
import numpy as np
import matplotlib.pyplot as plt
def linear_transfer(t):
return t
x = np.arange(-5.0, 5.0, 0.02)
plt.xlabel("x")
plt.ylabel("y")
plt.plot(x, linear_transfer(x), 'r')
plt.show()
In [31]:
import numpy as np
import matplotlib.pyplot as plt
def relu_transfer(t):
return np.maximum(0,x)
x = np.arange(-5.0, 5.0, 0.02)
plt.ylim([-2,3])
plt.xlim([-5,4])
plt.xlabel("x")
plt.ylabel("y")
plt.plot(x, relu_transfer(x), 'r')
plt.show()
In [27]:
x = np.array([1,2,3,4,5])
print(np.maximum(x,10))
In [33]:
import numpy as np
import matplotlib.pyplot as plt
def sigmoid_transfer(t):
return 1.0 / (1+np.exp(-x))
x = np.arange(-5.0, 5.0, 0.02)
plt.xlabel("x")
plt.ylabel("y")
plt.plot(x, sigmoid_transfer(x), 'r')
plt.show()
In [34]:
import numpy as np
import matplotlib.pyplot as plt
def tanh_transfer(t):
return np.tanh(t)
x = np.arange(-5.0, 5.0, 0.02)
plt.xlabel("x")
plt.ylabel("y")
plt.plot(x, tanh_transfer(x), 'r')
plt.show()
In [ ]:
import numpy as np
a = np.array( [1, 1, 0] ) # First 1 is the bias, 1 and 0 are the inputs