Pytorch——一维线性回归

数据

"""一维线性回归"""
import numpy as np
import torch
from torch import nn, optim
from torch.autograd import Variable


# 数据
x_train = np.array([[3.3], [4.4], [5.5]], dtype=np.float32)
y_train = np.array([[1.7], [2.76], [2.09]], dtype=np.float32)

# pytorch处理单元为Tensor,先将numpy.array转换成Tensor
x_train = torch.from_numpy(x_train)
y_train = torch.from_numpy(y_train)

建立简单模型y=w*x + b

# 建立一个简单的模型
class LinearRegression(nn.Module):
    def __init__(self):
        super(LinearRegression, self).__init__()
        self.linear = nn.Linear(1, 1) # 输入输出都是一维
    
    def forward(self, x):
        out = self.linear(x) # 定义了一个简单的 y=wx +b
        return out

创建model

# 定义模型model
if torch.cuda.is_available():
    model = LinearRegression().cuda()
else:
    model = LinearRegression()

定义损失函数和优化函数,使用均方误差

# 定义损失函数和优化函数,使用均方误差作为损失函数
criterion = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=1e-3) # 随机梯度下降

训练

# 训练模型
num_epochs = 1000
for epoch in range(num_epochs):
    if torch.cuda.is_available():
        inputs = Variable(x_train).cuda()
        target = Variable(y_train).cuda()
    else:
        inputs = Variable(x_train)
        target = Variable(y_train)
    
    # forward
    out = model(inputs) # 得到前向传播的结果
    loss = criterion(out, target) # 得到损失函数
    
    # backward
    optimizer.zero_grad() # 归零参数,反向传播前一定要做
    loss.backward()
    optimizer.step()
    
    if (epoch+1) % 20 == 0:
        print('Epoch[{}/{}], loss: {:.6f}'.format(epoch+1, num_epochs, loss.item()))

Epoch[20/1000], loss: 0.240405
Epoch[40/1000], loss: 0.240179
Epoch[60/1000], loss: 0.239954
Epoch[80/1000], loss: 0.239729
Epoch[100/1000], loss: 0.239505
Epoch[120/1000], loss: 0.239282
Epoch[140/1000], loss: 0.239059
Epoch[160/1000], loss: 0.238837
Epoch[180/1000], loss: 0.238616
Epoch[200/1000], loss: 0.238395
Epoch[220/1000], loss: 0.238175
Epoch[240/1000], loss: 0.237956
Epoch[260/1000], loss: 0.237737
Epoch[280/1000], loss: 0.237519
Epoch[300/1000], loss: 0.237302
Epoch[320/1000], loss: 0.237085
Epoch[340/1000], loss: 0.236869
Epoch[360/1000], loss: 0.236654
Epoch[380/1000], loss: 0.236439
Epoch[400/1000], loss: 0.236225
Epoch[420/1000], loss: 0.236012
Epoch[440/1000], loss: 0.235799
Epoch[460/1000], loss: 0.235587
Epoch[480/1000], loss: 0.235376
Epoch[500/1000], loss: 0.235165
Epoch[520/1000], loss: 0.234955
Epoch[540/1000], loss: 0.234746
Epoch[560/1000], loss: 0.234537
Epoch[580/1000], loss: 0.234329
Epoch[600/1000], loss: 0.234121
Epoch[620/1000], loss: 0.233914
Epoch[640/1000], loss: 0.233708
Epoch[660/1000], loss: 0.233502
Epoch[680/1000], loss: 0.233297
Epoch[700/1000], loss: 0.233093
Epoch[720/1000], loss: 0.232889
Epoch[740/1000], loss: 0.232686
Epoch[760/1000], loss: 0.232483
Epoch[780/1000], loss: 0.232281
Epoch[800/1000], loss: 0.232080
Epoch[820/1000], loss: 0.231879
Epoch[840/1000], loss: 0.231679
Epoch[860/1000], loss: 0.231480
Epoch[880/1000], loss: 0.231281
Epoch[900/1000], loss: 0.231083
Epoch[920/1000], loss: 0.230885
Epoch[940/1000], loss: 0.230688
Epoch[960/1000], loss: 0.230491
Epoch[980/1000], loss: 0.230296
Epoch[1000/1000], loss: 0.230100

预测一下结果

import matplotlib.pyplot as plt


model.eval()
predict = model(Variable(x_train))
predict = predict.data.numpy()
plt.plot(x_train.numpy(), y_train.numpy(), 'ro', label='Orifinal data')
plt.plot(x_train.numpy(), predict, label='Fitting Line')
plt.show()

在这里插入图片描述

发布了165 篇原创文章 · 获赞 30 · 访问量 1万+

猜你喜欢

转载自blog.csdn.net/weixin_44478378/article/details/104313983