pytorch中的前项计算和反向传播

前项计算1

import torch

# (3*(x+2)^2)/4
#grad_fn 保留计算的过程

x = torch.ones([2,2],requires_grad=True)
print(x)
y = x+2
print(y)
z = 3*y.pow(2)
print(z)
out = z.mean()
print(out)

#带有反向传播属性的tensor不能直接转化为numpy格式,需要先进性detach操作
print(x.detach().numpy())
print(x.numpy())

Traceback (most recent call last):
File "C:/Users/liuxinyu/Desktop/pytorch_test/day2/前向计算.py", line 17, in <module>
print(x.numpy())
RuntimeError: Can't call numpy() on Variable that requires grad. Use var.detach().numpy() instead.
tensor([[1., 1.],
[1., 1.]], requires_grad=True)
tensor([[3., 3.],
[3., 3.]], grad_fn=<AddBackward0>)
tensor([[27., 27.],
[27., 27.]], grad_fn=<MulBackward0>)
tensor(27., grad_fn=<MeanBackward0>)
[[1. 1.]
[1. 1.]]

 前向计算2

import torch

a = torch.randn(2,2)
a = ((a*3)/(a-1))
print(a.requires_grad)
a.requires_grad_(True) #就地修改
print(a.requires_grad)
b = (a*a).sum()
print(b.grad_fn)

with torch.no_grad():
    c = (a*a).sum()
    print(c.requires_grad)

False
True
<SumBackward0 object at 0x000000000249D550>
False

  反向传播

import torch

# (3*(x+2)^2)/4
#grad_fn 保留计算的过程

x = torch.ones([2,2],requires_grad=True)
print(x)
y = x+2
print(y)
z = 3*y.pow(2)
print(z)
out = z.mean()
print(out)
out.backward()
print(x.grad)

tensor([[1., 1.],
        [1., 1.]], requires_grad=True)
tensor([[3., 3.],
        [3., 3.]], grad_fn=<AddBackward0>)
tensor([[27., 27.],
        [27., 27.]], grad_fn=<MulBackward0>)
tensor(27., grad_fn=<MeanBackward0>)
tensor([[4.5000, 4.5000],
        [4.5000, 4.5000]])

  

猜你喜欢

转载自www.cnblogs.com/LiuXinyu12378/p/12299674.html