Optimizer(1)
pytorch
Official website link:
https://pytorch.org/docs/stable/optim.html
Use Cases:
import torch
from torch import nn
import torchvision
from torch.utils.data import DataLoader
train_data = torchvision.datasets.CIFAR10(root="./dataset",train=True,transform=torchvision.transforms.ToTensor(),download=True)
# 利用DataLoader加载数据集
train_dataloader = DataLoader(train_data,batch_size=64)
# 定义训练设备
device = torch.device("cuda") # GPU训练
# 搭建神经网络
class Module(nn.Module):
def __init__(self):
super().__init__()
self.model = nn.Sequential(
nn.Conv2d(3,32,5,1,2), # 卷积
nn.MaxPool2d(2) , # 最大池化
nn.Conv2d(32,32,5,1,2),
nn.MaxPool2d(2),
nn.Conv2d(32,64,5,1,2),
nn.MaxPool2d(2),
nn.Flatten(),
nn.Linear(64*4*4,64),
nn.Linear(64,10)
)
def forward(self,input):
input = self.model(input)
return input
# 新建网络实例
mymodule = Module()
mymodule = mymodule.to(device)
# 损失函数(交叉熵)
loss = nn.CrossEntropyLoss()
loss = loss.to(device)
# 记录训练的次数
# total_train_step = 0
# 优化器
optim = torch.optim.SGD(mymodule.parameters(),lr = 0.01) # mymodule.parameters() 为模型需要优化的参数 lr = 0.01 为学习率
if __name__ == '__main__':
for epoch in range(20):
mymodule.train()
print("------第 {} 轮训练-----".format(epoch+1))
# 定义每一轮的损失
running_loss = 0.0
for data in train_dataloader:
imgs,targets = data
imgs = imgs.to(device)
targets = targets.to(device)
# 将图片传入网络中进行训练,得到网络输出的结果
outputs = mymodule(imgs)
# 使用损失函数
result_loss = loss(outputs,targets) # 得到损失函数的值
# 参数优化
optim.zero_grad() # 对上一次的梯度进行清零
result_loss.backward() # 调用损失函数的反向传播求出每个节点的梯度
optim.step() # 对每一个参数进行调优
# 累加在该轮训练中该数据训练的损失
running_loss = running_loss + result_loss
# total_train_step += 1
# if total_train_step % 100 == 0:
# print("训练次数: {},loss: {}".format(total_train_step,result_loss.item()))
print("第 {} 轮 训练的损失: {}".format(epoch+1,running_loss))
Part of the results:
Files already downloaded and verified
------第 1 轮训练-----
第 1 轮 训练的损失: 1713.95751953125
------第 2 轮训练-----
第 2 轮 训练的损失: 1468.3868408203125
------第 3 轮训练-----
第 3 轮 训练的损失: 1311.38818359375
------第 4 轮训练-----
第 4 轮 训练的损失: 1215.1497802734375
### 经过多轮的训练 损失可能会变小