import torch
from torch import autograd
# 框架信息
print("***************************************************")
print("torch.__version__=", torch.__version__)
print("torch.cuda.is_available()=", torch.cuda.is_available())
print()
# y=a^2x+bx+c
x = torch.tensor(1.)
x2 = torch.tensor(2.)
a = torch.tensor(1., requires_grad=True)
b = torch.tensor(2., requires_grad=True)
c = torch.tensor(3., requires_grad=True)
y = b * x2
y2 = y * x
y2.backward(retain_graph=True)
print("before:", a.grad, b.grad, c.grad)
grads = autograd.grad(y, b,retain_graph=True)
print("after:", grads)
grad2 = autograd.grad(y2, b)
跨变量求梯度
猜你喜欢
转载自blog.csdn.net/swx595182208/article/details/130061088
今日推荐
周排行