In [20]:
# 線形回帰の最尤法
# 勾配降下法
# 従属変数を転置する必要がある。

import numpy as np
import theano as th
import theano.tensor as T
from math import pi

a = np.ones((500, 1))
x = np.random.randn(500,1)
b = 2
c = 4
u = np.random.randn(500,1)
y = c * a + b * x + u

X = np.hstack((a,x))

n = len(y)

tX = th.shared(np.asarray(X, dtype = th.config.floatX), borrow = True)
ty = th.shared(np.asarray(y.T, dtype = th.config.floatX), borrow = True) 
sh_theta = th.shared(np.ones(2, dtype = th.config.floatX), borrow = True, name = 'theta')
sigma = th.shared(1.)

liklihood = - (n / 2.) * T.log(2 * pi * sigma ** 2) - (1/sigma**2) * T.sum((ty - T.dot(tX, sh_theta))**2)

g_theta = T.grad(liklihood, sh_theta)
g_sigma = T.grad(liklihood, sigma)


learning_rate = 0.0001
updates1 = [(sh_theta, sh_theta + learning_rate * g_theta)]
updates2 = [(sigma, sigma + learning_rate * g_sigma)]

train1 = th.function([], liklihood, updates = updates1)
train2 = th.function([], liklihood, updates = updates2)

iteration = 100000
for iter in range(iteration):
    train1()
    train2()

t = sh_theta.get_value() 
s = sigma.get_value()
print 'theta : ' , t
print 'True theta: ', [4,2]
print 'sigma:', s
print 'True sigma:', 1


theta :  [ 4.05360992  1.95336957]
True theta:  [4, 2]
sigma: 1.3160388223
True sigma: 1

In [ ]: