In [13]:
require 'torch'
require 'nn'
require 'nnx'
require 'image'
require 'optim'
In [2]:
-- download dataset
if not paths.dirp('cifar-10-batches-t7') then
local www = 'http://torch7.s3-website-us-east-1.amazonaws.com/data/cifar-10-torch.tar.gz'
local tar = paths.basename(www)
os.execute('wget ' .. www .. '; '.. 'tar xvf ' .. tar)
end
In [3]:
trsize = 50000
tesize = 10000
In [4]:
-- load dataset
trainData = {
data = torch.Tensor(50000, 3072),
labels = torch.Tensor(50000),
size = function() return trsize end
}
for i = 0,4 do
subset = torch.load('cifar-10-batches-t7/data_batch_' .. (i+1) .. '.t7', 'ascii')
trainData.data[{ {i*10000+1, (i+1)*10000} }] = subset.data:t()
trainData.labels[{ {i*10000+1, (i+1)*10000} }] = subset.labels
end
trainData.labels = trainData.labels + 1
subset = torch.load('cifar-10-batches-t7/test_batch.t7', 'ascii')
testData = {
data = subset.data:t():double(),
labels = subset.labels[1]:double(),
size = function() return tesize end
}
testData.labels = testData.labels + 1
In [5]:
-- resize dataset (if using small version)
trainData.data = trainData.data[{ {1,trsize} }]
trainData.labels = trainData.labels[{ {1,trsize} }]
In [6]:
testData.data = testData.data[{ {1,tesize} }]
testData.labels = testData.labels[{ {1,tesize} }]
In [7]:
-- reshape data
trainData.data = trainData.data:reshape(trsize,3,32,32)
testData.data = testData.data:reshape(tesize,3,32,32)
In [14]:
----------------------------------------------------------------------
-- preprocess/normalize train/test sets
--
print '<trainer> preprocessing data (color space + normalization)'
-- preprocess trainSet
normalization = nn.SpatialContrastiveNormalization(1, image.gaussian1D(7))
for i = 1,trainData:size() do
-- rgb -> yuv
local rgb = trainData.data[i]
local yuv = image.rgb2yuv(rgb)
-- normalize y locally:
yuv[1] = normalization(yuv[{{1}}])
trainData.data[i] = yuv
end
-- normalize u globally:
mean_u = trainData.data[{ {},2,{},{} }]:mean()
std_u = trainData.data[{ {},2,{},{} }]:std()
trainData.data[{ {},2,{},{} }]:add(-mean_u)
trainData.data[{ {},2,{},{} }]:div(-std_u)
-- normalize v globally:
mean_v = trainData.data[{ {},3,{},{} }]:mean()
std_v = trainData.data[{ {},3,{},{} }]:std()
trainData.data[{ {},3,{},{} }]:add(-mean_v)
trainData.data[{ {},3,{},{} }]:div(-std_v)
-- preprocess testSet
for i = 1,testData:size() do
-- rgb -> yuv
local rgb = testData.data[i]
local yuv = image.rgb2yuv(rgb)
-- normalize y locally:
yuv[{1}] = normalization(yuv[{{1}}])
testData.data[i] = yuv
end
-- normalize u globally:
testData.data[{ {},2,{},{} }]:add(-mean_u)
testData.data[{ {},2,{},{} }]:div(-std_u)
-- normalize v globally:
testData.data[{ {},3,{},{} }]:add(-mean_v)
testData.data[{ {},3,{},{} }]:div(-std_v)
Out[14]:
In [15]:
print(testData.data:size())
In [16]:
print(trainData.data:size())
Out[16]: