In [1]:
import torch
from torch.autograd import Variable
In [2]:
x = Variable(torch.ones(2,2), requires_grad=True)
print(x)
In [4]:
y = x + 2
print(y)
In [5]:
print(y.grad_fn)
In [7]:
z = y*y*3
out = z.mean()
print(z,out)
In [8]:
out.backward()
In [9]:
print(x.grad)
In [11]:
x = torch.randn(3)
x = Variable(x, requires_grad=True)
y = x * 2
while y.data.norm() < 1000:
y = y * 2
print(y)
In [35]:
gradients = torch.FloatTensor([0.1, 1.0, 0.0001])
y.backward(gradients)
print(x.grad)
In [17]:
torch.norm # 范数
Out[17]:
In [16]:
y.data.norm(p=10)
Out[16]:
In [23]:
a = torch.randn(4, 2)
print(a)
In [25]:
torch.norm(a, 0, 1)
Out[25]:
In [32]:
torch.norm(a, 1, -1)
Out[32]:
In [51]:
x1 = torch.ones(3)
x1 = Variable(x1, requires_grad=True)
y1 = x1 * 2
while y1.data.norm() < 1000:
y1 = y1 * 2
z = y1 * 10
In [48]:
gradients = torch.FloatTensor([0.1, 1.0, 0.0001])
z.backward(gradients)
print(x1.grad)
In [ ]: