In [3]:
import torch
from torch.autograd import Variable
In [37]:
tensor = torch.FloatTensor([[1,2],[3,4]]) # build a tensor
x = Variable(tensor, requires_grad=True) # build a variable, usually for compute gradients
print(tensor) # [torch.FloatTensor of size 2x2]
print(x) # [torch.FloatTensor of size 2x2]
In [42]:
x.sum(), x.sqrt(), x**2
Out[42]:
In [32]:
x.data
Out[32]:
In [34]:
x.requires_grad, x.grad, x.grad_fn, x.volatile
Out[34]:
In [123]:
# create graph
x = Variable(torch.FloatTensor([3]),requires_grad=True)
y = 2*x +3
x.requires_grad, y.requires_grad
Out[123]:
In [77]:
y.grad_fn.next_functions[0][0]
Out[77]:
In [124]:
y.backward() # calculate dy / dx == y.backward(torch.Tensor([1.0]))
In [125]:
x.grad # dy / dx is stored and accmulated
Out[125]:
In [126]:
x.grad.data.zero_() # empty gradient storage
Out[126]:
In [127]:
y.backward(torch.Tensor([3.0])) # calculate dy / dx
In [128]:
x.grad
Out[128]:
In [167]:
x1 = Variable(torch.ones(2), requires_grad=True)
x2 = Variable(torch.ones(2))
In [168]:
y = x1*2 + x2
y
Out[168]:
In [169]:
y.backward(torch.Tensor([1,1]))
In [170]:
x1.grad, x2.grad
Out[170]:
In [171]:
x1.grad.data.numpy()
Out[171]:
In [ ]: