my very own mini version of tensorflow to learn neural nets better.
A NN is just a layer of nodes which take in an input and send out an output. That output can go to another node layer, and MORE AND MORE, finally ending up at an output layer.
Each node can do MATH to its inputs, like multiplying inputs by a weight and then use even more MATH, like using an activation function like a sigmoid or a plain simple x:x to asses what kind of output to send.
In [2]:
import numpy as np
In [30]:
def topological_sort(feed_dict):
"""
Sort the nodes in topological order using Kahn's Algorithm.
`feed_dict`: A dictionary where the key is a `Input` Node and the value is the respective value feed to that Node.
Returns a list of sorted nodes.
"""
input_nodes = [n for n in feed_dict.keys()]
G = {}
nodes = [n for n in input_nodes]
while len(nodes) > 0:
n = nodes.pop(0)
if n not in G:
G[n] = {'in': set(), 'out': set()}
for m in n.outbound_nodes:
if m not in G:
G[m] = {'in': set(), 'out': set()}
G[n]['out'].add(m)
G[m]['in'].add(n)
nodes.append(m)
L = []
S = set(input_nodes)
while len(S) > 0:
n = S.pop()
if isinstance(n, Input):
n.value = feed_dict[n]
L.append(n)
for m in n.outbound_nodes:
G[n]['out'].remove(m)
G[m]['in'].remove(n)
# if no other incoming edges add to S
if len(G[m]['in']) == 0:
S.add(m)
return L
def forward_pass(output_node, sorted_nodes):
"""
Performs a forward pass through a list of sorted nodes.
Arguments:
`output_node`: A node in the graph, should be the output node (have no outgoing edges).
`sorted_nodes`: A topologically sorted list of nodes.
Returns the output Node's value
"""
for n in sorted_nodes:
n.forward()
return output_node.value
In [20]:
class Node(object):
def __init__(self, inbound_nodes=[]):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound Node to _that_ Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
# A calculated value
self.value = None
def forward(self):
"""
Forward propagation.
Compute the output value based on `inbound_nodes` and
store the result in self.value.
"""
raise NotImplemented
class Input(Node):
def __init__(self):
# An Input node has no inbound nodes,
# so no need to pass anything to the Node instantiator.
Node.__init__(self)
# NOTE: Input node is the only node where the value
# may be passed as an argument to forward().
#
# All other node implementations should get the value
# of the previous node from self.inbound_nodes
#
# Example:
# val0 = self.inbound_nodes[0].value
def forward(self, value=None):
# Overwrite the value if one is passed in.
if value is not None:
self.value = value
In [66]:
class Add(Node):
def __init__(self, *inputs):
Node.__init__(self, [i for i in inputs])
def forward(self):
"""
You'll be writing code here in the next quiz!
"""
self.value = 0
for i in range(len(self.inbound_nodes)):
self.value += self.inbound_nodes[i]
class Mul(Node):
def __init__(self, *inputs):
Node.__init__(self, [i for i in inputs])
def forward(self):
"""
You'll be writing code here in the next quiz!
"""
self.value = 1
for i in range(len(self.inbound_nodes)):
self.value *= self.inbound_nodes[i]
class Linear(Node):
def __init__(self, inputs, weights, bias):
Node.__init__(self, [inputs, weights, bias])
# NOTE: The weights and bias properties here are not
# numbers, but rather references to other nodes.
# The weight and bias values are stored within the
# respective nodes.
def forward(self):
"""
Set self.value to the value of the linear function output.
"""
inputs = self.inbound_nodes[0].value
weights = self.inbound_nodes[1].value
bias = self.inbound_nodes[2].value
# simple lists version
#self.value = bias
#for x, w in zip(inputs, weights):
# self.value += x * w
# numpy version
self.value = np.dot(inputs, weights) + bias
In [61]:
"""
NOTE: Here we're using an Input node for more than a scalar.
In the case of weights and inputs the value of the Input node is
actually a python list!
In general, there's no restriction on the values that can be passed to an Input node.
"""
inputs, weights, bias = Input(), Input(), Input()
f = Linear(inputs, weights, bias)
feed_dict = {
inputs: [6, 14, 3],
weights: [0.5, 0.25, 1.4],
bias: 2
}
graph = topological_sort(feed_dict)
output = forward_pass(f, graph)
print(output) # should be 12.7 with this example
In [ ]:
"""
The setup is similar to the prevous `Linear` node you wrote
except you're now using NumPy arrays instead of python lists.
Update the Linear class in miniflow.py to work with
numpy vectors (arrays) and matrices.
Test your code here!
"""
import numpy as np
X, W, b = Input(), Input(), Input()
f = Linear(X, W, b)
X_ = np.array([[-1., -2.], [-1, -2]])
W_ = np.array([[2., -3], [2., -3]])
b_ = np.array([-3., -5])
feed_dict = {X: X_, W: W_, b: b_}
graph = topological_sort(feed_dict)
output = forward_pass(f, graph)
"""
Output should be:
[[-9., 4.],
[-9., 4.]]
"""
print(output)
In [ ]: