In [1]:
import numpy as np
import theano.tensor as T
from theano import shared, function
rng = np.random.RandomState(123)
In [2]:
# Create a sample logistic regression problem.
true_w = rng.randn(100)
true_b = rng.randn()
xdata = rng.randn(50, 100)
ydata = (np.dot(xdata, true_w) + true_b) > 0.0
In [7]:
print xdata.shape
print ydata.shape
In [9]:
# Step 1. Declare Theano variables
x = T.dmatrix()
y = T.dvector()
w = shared(rng.randn(100))
b = shared(np.zeros(()))
print "Initial model"
print w.get_value()
print b.get_value()
In [10]:
# Step 2. Construct Theano expression graph
p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b))
xent = -y * T.log(p_1) - (1 - y) * T.log(1 - p_1)
prediction = p_1 > 0.5
cost = xent.mean() + 0.01 * (w ** 2).sum()
gw, gb = T.grad(cost, [w, b])
In [11]:
# Step 3. Compile expressions to functions
train = function(inputs=[x, y],
outputs=[prediction, xent],
updates={w:w - 0.1 * gw,
b:b - 0.1 * gb})
In [12]:
# Step 4. Perform computation
for loop in range(100):
pval, xval = train(xdata, ydata)
print xval.mean()