In [2]:
require 'nn';
In [4]:
file = torch.DiskFile('dat/facies_vectors.t7', 'r')
facies = file:readObject()
file:close()
file = torch.DiskFile('dat/validation_data_nofacies.t7', 'r')
validate = file:readObject()
file:close()
In [7]:
-- build tables
print("facies size: ", facies:size()[1], "x", facies:size()[2])
print("validate size: ", validate:size()[1], "x", validate:size()[2])
-- initialize
training_data = {}
val_data = {}
depth = {}
-- build the training wells into the table
training_data["shrimplin"] = facies[{{1,471},{3,9}}]
training_data["alexander"] = facies[{{472,937},{3,9}}]
training_data["shankle"] = facies[{{938,1386},{3,9}}]
training_data["luke"] = facies[{{1387,1847},{3,9}}]
training_data["kimzey"] = facies[{{1848,2286},{3,9}}]
training_data["cross"] = facies[{{2287,2787},{3,9}}]
training_data["nolan"] = facies[{{2788,3202},{3,9}}]
training_data["recruit"] = facies[{{3203,3282},{3,9}}]
training_data["newby"] = facies[{{3283,3745},{3,9}}]
training_data["churchman"] = facies[{{3746,4149},{3,9}}]
-- build the validation data into a table
val_data["stuart"] = validate[{{1,474},{2,8}}]
val_data["crawford"] = validate[{{475,830},{2,8}}]
-- build a depth log for plotting
depth["shrimplin"] = facies[{{1,471},{2}}]
depth["alexander"] = facies[{{472,937},{2}}]
depth["shankle"] = facies[{{938,1386},{2}}]
depth["luke"] = facies[{{1387,1847},{2}}]
depth["kimzey"] = facies[{{1848,2286},{2}}]
depth["cross"] = facies[{{2287,2787},{2}}]
depth["nolan"] = facies[{{2788,3202},{2}}]
depth["recruit"] = facies[{{3203,3282},{2}}]
depth["newby"] = facies[{{3283,3745},{2}}]
depth["churchman"] = facies[{{3746,4149},{2}}]
Out[7]:
In [8]:
val_data
Out[8]:
In [9]:
-- normalize the data
-- training data
mean = {}
stdv = {}
for key,value in pairs(training_data) do --over each well
mean[key] = torch.Tensor(7)
stdv[key] = torch.Tensor(7)
for i = 1, 7 do --over each log
mean[key][i] = training_data[key][{{},{i}}]:mean()
training_data[key][{{},{i}}]:add(-mean[key][i])
stdv[key][i] = training_data[key][{{},{i}}]:std()
training_data[key][{{},{i}}]:div(stdv[key][i])
end
end
-- validation data
mean = {}
stdv = {}
for key,value in pairs(val_data) do --over each well
mean[key] = torch.Tensor(7)
stdv[key] = torch.Tensor(7)
for i = 1, 7 do --over each log
mean[key][i] = val_data[key][{{},{i}}]:mean()
val_data[key][{{},{i}}]:add(-mean[key][i])
stdv[key][i] = val_data[key][{{},{i}}]:std()
val_data[key][{{},{i}}]:div(stdv[key][i])
end
end
-- facies labels for training
facies_labels = {}
facies_labels["shrimplin"] = facies[{{1,471},{1}}]
facies_labels["alexander"] = facies[{{472,937},{1}}]
facies_labels["shankle"] = facies[{{938,1386},{1}}]
facies_labels["luke"] = facies[{{1387,1847},{1}}]
facies_labels["kimzey"] = facies[{{1848,2286},{1}}]
facies_labels["cross"] = facies[{{2287,2787},{1}}]
facies_labels["nolan"] = facies[{{2788,3202},{1}}]
facies_labels["recruit"] = facies[{{3203,3282},{1}}]
facies_labels["newby"] = facies[{{3283,3745},{1}}]
facies_labels["churchman"] = facies[{{3746,4149},{1}}]
In [11]:
-- build the neural net
net = nn.Sequential()
net:add(nn.Linear(7,200))
net:add(nn.ReLU())
net:add(nn.Linear(200,50))
net:add(nn.ReLU())
net:add(nn.Linear(50,9))
net:add(nn.LogSoftMax())
In [12]:
-- test the net -> forward
temp = torch.Tensor(7)
for i = 1,7 do
temp[i] = training_data["shrimplin"][1][i]
end
input = temp
output = net:forward(input)
-- zero gradients and initialize
net:zeroGradParameters()
gradInput = net:backward(input, torch.rand(9))
In [13]:
-- define the loss function
criterion = nn.ClassNLLCriterion()
criterion:forward(output,facies_labels["shrimplin"][1])
gradients = criterion:backward(output, facies_labels["shrimplin"][1])
gradInput = net:backward(input, gradients)
In [15]:
-- condition the data
trainset = {}
-- the data
trainset["data"] = torch.Tensor(facies:size()[1],7)
idx = 0
for key,value in pairs(training_data) do
for i = 1,training_data[key]:size()[1] do
trainset["data"][i + idx] = training_data[key][i]
end
idx = idx + training_data[key]:size()[1]
end
-- the answers
trainset["facies"] = torch.Tensor(facies:size()[1])
idx = 0
for key,value in pairs(facies_labels) do
for i = 1, facies_labels[key]:size()[1] do
trainset["facies"][i + idx] = facies_labels[key][i]
end
idx = idx + facies_labels[key]:size()[1]
end
-- write index() and size() functions
setmetatable(trainset,
{__index = function(t, i)
return {t.data[i], t.facies[i]}
end}
);
function trainset:size()
return self.data:size(1)
end
-- condition the validation data
valset = {}
-- the data
valset["data"] = torch.Tensor(validate:size()[1],7)
idx = 0
for key,value in pairs(val_data) do
for i = 1,val_data[key]:size()[1] do
valset["data"][i + idx] = val_data[key][i]
end
idx = idx + val_data[key]:size()[1]
end
-- eliminate NaNs
nan_mask = trainset.data:ne(trainset.data)
trainset.data[nan_mask] = 0
nan_mask = valset.data:ne(valset.data)
valset.data[nan_mask] = 0
In [16]:
-- train the net
trainer = nn.StochasticGradient(net, criterion)
trainer.learningRate = .001
trainer.maxIteration = 20
print("starting training")
timer = torch.Timer()
trainer:train(trainset)
print("training time =", timer:time().real)
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
Out[16]:
In [21]:
preds = torch.Tensor(valset.data:size()[1])
for i = 1,valset.data:size()[1] do
local prediction = net:forward(valset.data[i])
local confidences, indices = torch.sort(prediction, true)
preds[i] = indices[1]
end
In [23]:
#preds
Out[23]:
In [24]:
preds
Out[24]:
In [ ]: