Pytorch复现经典卷积神经网络_AlexNet 和 ResNet

import torch
from torch.autograd import Variable
import torch.nn as nn

print(torch.__version__)

"""
nn.Conv2d(1,1,kernel_size=,stride=,padding=)
nn.MaxPool2d(kernel_size=,strid=)
nn.AveragePool2d(kernel_size=,strid=)
nn.AdaptiveAvgPool2d()
nn.Linear(1,1)
nn.ReLu()
nn.Dropout()
nn.BatchNorm2d()
torch.flatten(x,start=,end=)

"""

#跟据Alext论文复现代码

class AlexNet(nn.Module):
    def __init__(self,num_classes=1000):
        super(AlexNet,self).__init__()
        self.features = nn.Sequential(
            # padding=[(224-11)/4]+1
            nn.Conv2d(3,48,kernel_size=11,stride=4,padding=1),
            nn.ReLU(),
            nn.Conv2d(48,128,kernel_size=5,stride=1,padding=2),
            nn.ReLU(),
            nn.MaxPool2d(kernel_size=2),
            nn.Conv2d(128,192,kernel_size=3,stride=1,padding=1),
            nn.ReLU(),
            nn.MaxPool2d(kernel_size=2),
            nn.Conv2d(192,192,kernel_size=3,stride=1,padding=1),
            nn.ReLU(),
            nn.Conv2d(192,128,kernel_size=3,stride=1,padding=1),
            nn.ReLU(),
            nn.MaxPool2d(kernel_size=2)
        )
        self.classifier=nn.Sequential(
            nn.Linear(6*6*128,2048),
            nn.ReLU(),
            nn.Dropout(0,5),
            nn.Linear(2048,2048),
            nn.ReLU(),
            nn.Dropout(0,5),
            nn.Linear(2048,num_classes)
            #最后一层激活和损失函数写在一起
        )

    def forward(self,img):
        img = self.features(img)
        img = torch.flatten(img,start=1,end=-1)
        img = self.classifier(img)
        return img


#初始化
# module=AlexNet(10)

#残差块复现
class ResidualBlock(nn.Module):
    def __init__(self,num_filters_in=64,num_filters_out=64):
        super(ResidualBlock,self).__init__()
        self.features=nn.Sequential(
            nn.Conv2d(num_filters_in,num_filters_out,kernel_size=3,stride=1,padding=1),
            nn.ReLU(),
            nn.Conv2d(num_filters_in,num_filters_out,kernel_size=3,stride=1,padding=1),

        )
        self.classifier=nn.Sequential(

        )
    def forward(self,x):
        x=self.features(x)+x
        return x


#resNet论文复现
class ResNet(nn.Module):
    def __init__(self,num_classes=1000):
        super(ResNet,self).__init__()
        self.res1=ResidualBlock(64,64)
        self.res2=ResidualBlock(64,64)
        self.res3=ResidualBlock(64,64)
        self.res4 = ResidualBlock(64, 128)
        self.res5 = ResidualBlock(128, 128)
        self.res6 = ResidualBlock(128, 128)
        self.res7 = ResidualBlock(128, 128)
        self.res8 = ResidualBlock(128, 256)
        self.res9 = ResidualBlock(256, 256)
        self.res10 = ResidualBlock(256, 256)
        self.res11 = ResidualBlock(256, 256)
        self.res12 = ResidualBlock(256, 256)
        self.res13 = ResidualBlock(256, 256)
        self.res14 = ResidualBlock(256, 512)
        self.res15 = ResidualBlock(512, 512)
        self.res16 = ResidualBlock(512, 512)

        self.features=nn.Sequential(
            nn.Conv2d(3,64,kernel_size=7,stride=2,padding=1),
            nn.ReLU(),
            nn.MaxPool2d(kernel_size=2)
        )
        self.pool=nn.AvgPool2d(kernel_size=2)
        self.classifier=nn.Linear(7*7*512,1000)

    def forward(self,x):
        x=self.features(x)
        x=self.res1(x)
        x=self.res2(x)
        x=self.res3(x)
        x=self.res4(x)
        x=self.res5(x)
        x = self.res6(x)
        x = self.res7(x)
        x = self.res8(x)
        x = self.res9(x)
        x = self.res10(x)
        x = self.res11(x)
        x = self.res12(x)
        x = self.res13(x)
        x = self.res14(x)
        x = self.res15(x)
        x = self.res16(x)
        x=self.pool(x)
        x=torch.flatten(x)
        x=self.classifier(x)
        return x

猜你喜欢

转载自blog.csdn.net/qq_41251963/article/details/107870600