Pytorch handwriting linear regression

pytorch handwriting linear regression

 

Torch Import 
Import matplotlib.pyplot AS PLT 
from matplotlib.animation Import FuncAnimation 

LEARN_RATE = 0.1 
#. 1. Preparation data 
X = torch.randn ([500,1]) 
y_true = X * + 0.8. 3 

# 2. calculate predicted values t_tred = x B + W * 

W = torch.rand ([], requires_grad = True) 
B = torch.tensor (of 0. The, requires_grad = True) 

plt.figure () 
plt.grid (True) 

# open interactive mode 
plt.ion () 
I in Range for (50): 

    plt.cla () 

    for J in [W, B]: 
        IF j.grad None Not IS: 
            j.grad.zero_ () 
    y_predict + W B = X * 

    #. 3 calculates the loss. the gradient parameter is set to 0, the reverse propagation 

    Loss = (y_predict-y_true) .pow (2) .mean () 

    loss.backward ()

    # 4 update parameters, grad derivative represented 

    w.data = w.data - LEARN_RATE * w.grad 
    b.data b.data = - * LEARN_RATE b.grad 


    plt.scatter (x.numpy (), y_true.numpy () ) 
    . plt.plot (x.numpy (), y_predict.detach () numpy (), Color = "G") 

    plt.pause (0.1) 


    IF% 50 == 0 I: 
        Print ( "{} of times, the loss of {}, weights w = {}, {} = paranoid B. "the format (I, loss.data, w.data, b.data)) 

# interactive mode off 
plt.ioff () 
plt.show ()

  

Guess you like

Origin www.cnblogs.com/LiuXinyu12378/p/11374748.html