Logistic regression model

reference


In [54]:
require 'nn'
require 'csvigo'

In [74]:
loaded = csvigo.load{path='ex2data1.txt', mode='raw'}


Out[74]:
<csv>	parsing file: ex2data1.txt	
<csv>	parsing done	

In [75]:
trainset = {}
trainset.data = torch.Tensor(loaded)[{ {},{1,2} }]
trainset.label = torch.Tensor(loaded)[{ {},3 }]
trainset.label = trainset.label + 1

In [76]:
trainset


Out[76]:
{
  data : DoubleTensor - size: 100x2
  label : DoubleTensor - size: 100
}

In [77]:
-- ignore setmetatable for now, it is a feature beyond the scope of this tutorial. It sets the index operator.
setmetatable(trainset,
    {__index = function(t,i)
                return {t.data[i], t.label[i]}
        end}
);
function trainset:size()
    return self.data:size(1)
end

In [78]:
print(trainset[33])


Out[78]:
{
  1 : DoubleTensor - size: 2
  2 : 1
}

Define the model


In [85]:
net = nn.Sequential()
net:add(nn.Linear(2,2))
net:add(nn.Sigmoid())

Define a loss function


In [86]:
criterion = nn.ClassNLLCriterion()

Train the model (using SGD)


In [87]:
trainer = nn.StochasticGradient(net, criterion)
trainer.learningRate = 0.01
trainer.maxIteration = 100 -- just do 5 epochs of training.

In [88]:
trainer:train(trainset)


Out[88]:
# StochasticGradient: training	
Out[88]:
# current error = -0.3999999976942	
Out[88]:
# current error = -0.3999999976942	
Out[88]:
# current error = -0.39999999769421	
Out[88]:
# current error = -0.39999999769421	
Out[88]:
# current error = -0.39999999769421	
# StochasticGradient: you have reached the maximum number of iterations	
# training error = -0.39999999769421	

Test the trained model


In [90]:
predicted = net:forward(trainset.data[33])
print(predicted, trainset.label[33])


Out[90]:
 1.0000e+00
 6.4293e-24
[torch.DoubleTensor of size 2]

1	

In [91]:
correct = 0
for i=1,100 do
    local groundtruth = trainset.label[i]
    local prediction = net:forward(trainset.data[i])
    local confidences, indices = torch.sort(prediction, true)
    if groundtruth == indices[1] then
        correct = correct + 1
    end
end

In [92]:
print(correct .. '%')


Out[92]:
40%	

In [93]:
class_performance = {0,0}
for i=1,100 do
    local groundtruth = trainset.label[i]
    local prediction = net:forward(trainset.data[i])
    local confidences, indices = torch.sort(prediction, true)
    if groundtruth == indices[1] then
        class_performance[groundtruth] = class_performance[groundtruth]+1
    end
end

In [94]:
for i=1,2 do
    print(i, class_performance[i]*2 .. '%')
end


Out[94]:
1	80%	
2	0%