LogisticRegressionModel two classification probability problem

import torch
import torch.nn.functional as F
import matplotlib.pylab as plt
import torch.nn as nn
import numpy as np


#1.构建数据集 y=x+1
x=torch.tensor([[1.0],
                [2.0],
                [3.0]])
y=torch.tensor([[0.0],
                [0.0],
                [1.0]])

plt.plot(x,y)
plt.show()

#2.搭建神经网络
class LogisticRegressionModel(nn.Module):
    def __init__(self,num_input,num_output):
        super(LogisticRegressionModel,self).__init__() #这里遗忘
        self.predict=nn.Linear(num_input,num_output,bias=True)   #与线性回归一样

    def forward(self,x):
        y=torch.sigmoid(self.predict(x)) #用sigmoid 将线性变换后的值映射到0~1之间
        return y

net=LogisticRegressionModel(1,1)

#3.定义优化器和损失函数
optimizer=torch.optim.SGD(net.parameters(),lr=0.05)
loss_func=nn.BCELoss() #二分类交叉熵损失函数 算概率损失 MSE 线性损失 不取均值后学习率上升没×1/N

#4.训练模型
for epoch in range(1000):
    y_prediction=net(x)
    loss=loss_func(y_prediction,y)

    optimizer.zero_grad()
    loss.backward()
    optimizer.step()


x=np.linspace(0,10,200)# 0~10 分成200份
x_test=torch.FloatTensor(x).view((200,1))
y_test=net(x_test)
y=y_test.data.numpy()
plt.plot(x,y)
plt.xlabel('hours')
plt.ylabel('Probability of Pass')
plt.show()

Guess you like

Origin blog.csdn.net/qq_21686871/article/details/114242507