寒假PyTorch工具第二天

课程记录

从动态图到逻辑回归分类


课程代码

import torch
import numpy as np 
import torch.nn as nn


# data
def get_data():
    mean_val = 10.
    num_data = 20

    # class 0
    # print(torch.ones(20, 2))
    x0 = torch.normal(mean_val * torch.ones(num_data, 2), 1)
    y0 = torch.zeros(num_data)
    print(x0)

    # class 1
    x1 = torch.normal(-mean_val * torch.ones(num_data, 2), 1)
    y1 = torch.ones((num_data,  ))

    # mix
    x = torch.cat([x0, x1], dim = 0)
    y = torch.cat([y0, y1], dim = 0)
    return x, y

train_x, train_y = get_data() 
print('data:', train_x, train_y)

# model
class Logistic(nn.Module):
    def __init__(self):
        super(Logistic, self).__init__()
        self.feature = nn.Linear(2, 1)
        self.activate = nn.Sigmoid()
    def forward(self, x):
        x = self.feature(x)
        x = self.activate(x)
        x = x.squeeze()
        return x

model = Logistic()

# loss function
loss_f = nn.BCELoss()

    
# optimizer
lr = 0.01
optimizer = torch.optim.SGD(model.parameters(), lr = lr, momentum=0.9)


# iteration
def main():
    for _ in range(1000):
        # flow
        y_pre = model(train_x)
        loss = loss_f(y_pre, train_y)
        if _ %20 == 0:
            print('loss:', loss.item())


        # backward
        loss.backward()
        optimizer.step()


        
if __name__ == '__main__':
    main()
    print('finished...')

作业

1.      逻辑回归模型为什么可以进行二分类?

2.      采用代码实现逻辑回归模型的训练,并尝试调整数据生成中的mean_value,将mean_value设置为更小的值,例如1,或者更大的值,例如5,会出现什么情况?

再尝试仅调整bias,将bias调为更大或者负数,模型训练过程是怎么样的?

答案

略, 但是证明了当sigmoid激活函数的时候, normal的重要性, 加了肯定比不加好

猜你喜欢

转载自blog.csdn.net/u013625492/article/details/114213573