In [1]:
require 'nn';
require 'rnn';
matio = require 'matio'
In [2]:
data = matio.load('ex4data1.mat')
trainset = {}
trainset.data = data.X
trainset.label = data.y[{ {}, 1 }]
In [4]:
setmetatable(trainset,
{__index = function(t,i)
return {t.data[i], t.label[i]}
end}
);
function trainset:size()
return self.data:size(1)
end
In [6]:
mean = {}
stdv = {}
for i=1,400 do
mean[i] = trainset.data[{ {},{i} }]:mean()
stdv[i] = trainset.data[{ {}, {i} }]:std()
--print(i .. 'th mean: ' .. mean[i])
--print(i .. 'th std dev: ' .. stdv[i])
trainset.data[{ {},{i} }]:add(-mean[i])
if stdv[i] ~= 0 then
trainset.data[{ {},{i} }]:div(stdv[i])
end
end
In [7]:
batchSize = 5000
rho = 20
hiddenSize = 10
nIndex = 20
nClass = 10
In [8]:
rnn = nn.Sequential()
r = nn.Recurrent(
hiddenSize, nn.Linear(nIndex, hiddenSize), --nn.Linear(nIndex, hiddenSize) = W_hx
nn.Linear(hiddenSize, hiddenSize), nn.Tanh(), --nn.Linear(hiddenSize, hiddenSize) = W_hh
rho
)
rnn:add(r)
rnn:add(nn.Linear(hiddenSize, nClass))
rnn:add(nn.LogSoftMax())
rnn = nn.Sequencer(rnn)
In [9]:
criterion = nn.SequencerCriterion(nn.ClassNLLCriterion())
In [10]:
lr = 0.1
i = 1
In [11]:
prev = 100
for epoch = 1,1e3 do
local inputs, targets = {}, {}
for step=1,rho do -- 1 ~ 20
table.insert(inputs, trainset.data[{{},{(step-1)*rho+1, step*rho}}])
table.insert(targets, trainset.label[{{}}])
end
local outputs = rnn:forward(inputs)
local err = criterion:forward(outputs, targets)
if epoch%10 == 1 then print(epoch, err/rho) end
local gradOutputs = criterion:backward(outputs, targets)
rnn:backward(inputs, gradOutputs)
rnn:updateParameters(lr)
rnn:zeroGradParameters()
end
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
Out[11]:
In [12]:
correction = {}
for i=10,20 do
correction[i] = 0
end
for i=1,trainset:size() do
local answer = trainset.label[i]
local inputs = {}
for step=1,rho do
table.insert(inputs, trainset.data[{{i},{(step-1)*20+1, step*20}}])
end
local prediction = rnn:forward(inputs)
for d=10,20 do
guess = prediction[d][{1,{}}]
local confidences, indices = torch.sort(guess, true)
-- if i%100 == 1 then print(answer, guess, indices[1]) end
if (answer == indices[1]) then
correction[d] = correction[d] + 1
end
end
end
for i=10,20 do
print(i, " = ", correction[i], 100*correction[i]/trainset:size() .. '%')
end
Out[12]:
Out[12]:
In [ ]: