In [2]:
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F

Notes on hooks (forward pass hook)

If I instantiate the hook inside the forward pass command for a instance specific feature size, the forward hook is not called.


In [103]:
class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.linear1 = nn.Linear(2, 2, bias=False)
        
    def forward(self, x):
        result = self.linear1(x)
        return result

In [104]:
net = Net()
net


Out[104]:
Net (
  (linear1): Linear (2 -> 2)
)

In [105]:
input_vec = Variable(torch.randn(4, 2))
out = net(input_vec)
# define a dummy target label
target = Variable(torch.LongTensor([1, 0, 0, 1]))
loss_fn = nn.CrossEntropyLoss()
err = loss_fn(out, target)
print(err)


Variable containing:
 0.7019
[torch.FloatTensor of size 1]


In [106]:
err.backward()

In [107]:
def printnorm(self, input, output):
    print('inside ' + self.__class__.__name__ + " forward")

In [108]:
net.linear1.register_forward_hook(printnorm)


Out[108]:
<torch.utils.hooks.RemovableHandle at 0x10c6d0780>

In [109]:
out = net(input_vec)


inside Linear forward

In [ ]: