In [36]:
import numpy as np
import theano
import theano.tensor as T
from __future__ import division

def accuracy(y_target, y_predict):
    correct = np.equal(y_target, y_predict)
    accuracy_socere = np.sum(correct) / len(correct) 
    return accuracy_socere

rng = np.random

#Data and feature variable
N, feats = 400, 784

In [40]:
D = (rng.randn(N, feats), rng.randint(size=N , low=0, high=2))

In [16]:
#declare theano symbolic variable
x = T.dmatrix('x')
y = T.dvector('y')

w = theano.shared(rng.randn(feats), name = 'w')
b = theano.shared(0.1, name = 'b')

#construct expression graph
p_1 = T.nnet.sigmoid(T.dot(x, w) + b)
prediction = p_1 > 0.5

#xent = T.nnet.binary_crossentropy(p_1, y) 
xent = -y * T.log(p_1) - (1 - y) * T.log(1 - p_1)
#cost = xent.mean()
cost = xent.mean() - 0.01 * (w**2).sum()

gw, gb = T.grad(cost, [w, b])

#compile
learning_rate = 0.1
train = theano.function([x, y], [prediction, xent.mean()], updates=((w, w - learning_rate * gb),(b, b - learning_rate * gb)))
predict = theano.function([x], prediction)

In [41]:
#Training
for i in range(500):
    pred, err = train(D[0], D[1])
    if i % 50 == 0:
        print "loss is ", err
        print "accuracy is ", accuracy(D[1], predict(D[0]))


loss is  1.31290051926
accuracy is  0.505
loss is  0.179308733232
accuracy is  0.9875
loss is  0.101845526928
accuracy is  1.0
loss is  0.0771790089458
accuracy is  1.0
loss is  0.0657142916187
accuracy is  1.0
loss is  0.0594480094746
accuracy is  1.0
loss is  0.0557085164475
accuracy is  1.0
loss is  0.0533558543766
accuracy is  1.0
loss is  0.0518245747071
accuracy is  1.0
loss is  0.0508045480885
accuracy is  1.0