In [75]:
import numpy as np
import theano
from theano import tensor as T
W_values = np.array([[1,2],[1,1]], dtype=theano.config.floatX)
bvis_values = np.array([1,1], dtype=theano.config.floatX)
bhid_values = np.array([2,3], dtype=theano.config.floatX)
W = theano.shared(W_values) # we assume that ``W_values`` contains the
# initial values of your weight matrix
bvis = theano.shared(bvis_values)
bhid = theano.shared(bhid_values)
def t_propup(vis,vis_sum):
pre_sigmoid_activation = T.dot(vis, W) + T.dot(bhid.reshape([1,bhid.shape[0]]).T,vis_sum).T
return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]
t_ipt = T.matrix()
t_ipt_sum = t_ipt.sum(axis=1).reshape([1,t_ipt.shape[0]])
t_results, t_updates = theano.scan( fn = t_propup,
non_sequences = [t_ipt, t_ipt_sum],
n_steps=1
)
tmp_f = theano.function( [t_ipt], t_results, updates = t_updates)
In [11]:
tmp = np.array([[10,10],[-10,-10],[-10,-10]], dtype = theano.config.floatX)
In [18]:
tmp_f(tmp)
Out[18]:
In [78]:
def propdown(hid):
pre_softmax_activation = T.dot(hid, W.T) + bvis #---------------------------[edited]
return [pre_softmax_activation, T.nnet.softmax(pre_softmax_activation)]
ipt = T.matrix()
results, updates = theano.scan( fn = propdown,
non_sequences = ipt,
n_steps=1
)
tmp_f2 = theano.function( [ipt], results, updates = updates)
In [79]:
tmp_f2(np.array([[1,1],[0,0],[0,1],[1,0]], dtype = theano.config.floatX))
Out[79]:
In [76]:
(T.dot(np.array([[1,20],[0,0],[0,1],[1,0]], dtype = theano.config.floatX), W.T) + bhid).eval()
Out[76]:
In [77]:
T.nnet.softmax((T.dot(np.array([[1,20],[0,0],[0,1],[1,0]], dtype = theano.config.floatX), W.T) + bhid).eval()).eval()
Out[77]:
In [39]:
T.nnet.softmax(np.array([0,0,0,1], dtype = theano.config.floatX)).eval()
Out[39]: