Master Liu Er of Station B’s Pytorch Divine Degree Learning Lecture 4 Back Propagation Homework

Master Liu Er of Station B’s Pytorch Divine Degree Learning Lecture 4 Back Propagation Homework

import torch

x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]

w_1 = torch.Tensor([1.0])
w_2 = torch.Tensor([2.0])
b = torch.Tensor([1.0])
w_1.requires_grad = True  # 计算梯度
w_2.requires_grad = True 
b.requires_grad = True 


def forward(x):
    return  w_1 * x * x + w_2 * x + b
 
def loss(x,y):
    y_pred = forward(x)
    return (y_pred - y) **2


print("predict (before training)",4,forward(4).item())

for epoch in range(100):
    for x ,y in zip(x_data ,y_data):
        l = loss(x,y)     # 计算Loss
        l.backward()    # 求梯度
        lr = 0.01
        print('\tgrad:',x, y, w_1.grad.item())
        print('\tgrad:', w_2.grad.item())
        print('\tgrad:', b.grad.item())
        
        w_1.data = w_1.data - lr * w_1.grad.data
        
        w_2.data = w_2.data - lr * w_2.grad.data
        
        b.data = b.data -lr *b.grad.data
        
        
        w_1.grad.data.zero_()    # 请0
        w_2.grad.data.zero_()  
        b.grad.data.zero_()  
        
        
    print("progress:",epoch,l.item)  # 每个epoch的Loss

print("predict  (after training)" , 4 ,forward(4).item())

Guess you like

Origin blog.csdn.net/gubeiqing/article/details/115857689