In [36]:
import numpy as np
import theano
import theano.tensor as T
from __future__ import division
def accuracy(y_target, y_predict):
correct = np.equal(y_target, y_predict)
accuracy_socere = np.sum(correct) / len(correct)
return accuracy_socere
rng = np.random
#Data and feature variable
N, feats = 400, 784
In [40]:
D = (rng.randn(N, feats), rng.randint(size=N , low=0, high=2))
In [16]:
#declare theano symbolic variable
x = T.dmatrix('x')
y = T.dvector('y')
w = theano.shared(rng.randn(feats), name = 'w')
b = theano.shared(0.1, name = 'b')
#construct expression graph
p_1 = T.nnet.sigmoid(T.dot(x, w) + b)
prediction = p_1 > 0.5
#xent = T.nnet.binary_crossentropy(p_1, y)
xent = -y * T.log(p_1) - (1 - y) * T.log(1 - p_1)
#cost = xent.mean()
cost = xent.mean() - 0.01 * (w**2).sum()
gw, gb = T.grad(cost, [w, b])
#compile
learning_rate = 0.1
train = theano.function([x, y], [prediction, xent.mean()], updates=((w, w - learning_rate * gb),(b, b - learning_rate * gb)))
predict = theano.function([x], prediction)
In [41]:
#Training
for i in range(500):
pred, err = train(D[0], D[1])
if i % 50 == 0:
print "loss is ", err
print "accuracy is ", accuracy(D[1], predict(D[0]))