In [ ]:
import numpy as np
import theano
import theano.tensor as T
class Layer(object):
def __init__(self, inputs, in_size, out_size, activation_function=None):
self.w = theano.shared(np.random.normal(0,1,(in_size, out_size)))
self.b = theano.shared(np.zeros(out_size,) + 0.1)
self.wx_plus_b = T.dot(inputs, w) + self.b
self.activation_function = activation_function
if activation_function == None:
self.outputs = self.wx_plus_b
else:
self.outputs = self.activation_function(self.wx_plus_b)
l1 = Layer(inputs, 10, 1, T.nnet.relu)