In [29]:
import numpy as np

In [30]:
# Defining the sigmoid function for activations
def sigmoid(x):
    return 1/(1+np.exp(-x))

In [31]:
# Network size
N_input = 4
N_hidden = 3
N_output = 2

In [32]:
np.random.seed(42)

In [33]:
# Make some fake data
X = np.random.randn(4)
print(X)


[ 0.49671415 -0.1382643   0.64768854  1.52302986]

In [34]:
weights_input_to_hidden = np.random.normal(0, scale=0.1, size=(N_input, N_hidden))
weights_hidden_to_output = np.random.normal(0, scale=0.1, size=(N_hidden, N_output))
print(weights_input_to_hidden)
print(weights_hidden_to_output)


[[-0.02341534 -0.0234137   0.15792128]
 [ 0.07674347 -0.04694744  0.054256  ]
 [-0.04634177 -0.04657298  0.02419623]
 [-0.19132802 -0.17249178 -0.05622875]]
[[-0.10128311  0.03142473]
 [-0.09080241 -0.14123037]
 [ 0.14656488 -0.02257763]]

In [35]:
# TODO: Make a forward pass through the network
hidden_layer_in = np.dot(X, weights_input_to_hidden)
print('Hidden-layer Input:')
print(hidden_layer_in)


Hidden-layer Input:
[-0.34365494 -0.29801368  0.00097362]

In [36]:
hidden_layer_out = sigmoid(hidden_layer_in)
print('Hidden-layer Output:')
print(hidden_layer_out)


Hidden-layer Output:
[ 0.41492192  0.42604313  0.5002434 ]

In [37]:
output_layer_in = np.dot(hidden_layer_out, weights_hidden_to_output)
output_layer_out = sigmoid(output_layer_in)

print('Output-layer Output:')
print(output_layer_out)


Output-layer Output:
[ 0.49815196  0.48539772]

In [ ]: