In [2]:
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
In [103]:
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.linear1 = nn.Linear(2, 2, bias=False)
def forward(self, x):
result = self.linear1(x)
return result
In [104]:
net = Net()
net
Out[104]:
In [105]:
input_vec = Variable(torch.randn(4, 2))
out = net(input_vec)
# define a dummy target label
target = Variable(torch.LongTensor([1, 0, 0, 1]))
loss_fn = nn.CrossEntropyLoss()
err = loss_fn(out, target)
print(err)
In [106]:
err.backward()
In [107]:
def printnorm(self, input, output):
print('inside ' + self.__class__.__name__ + " forward")
In [108]:
net.linear1.register_forward_hook(printnorm)
Out[108]:
In [109]:
out = net(input_vec)
In [ ]: