吴恩达第四课第二周编程作业

本次作业目的

Keras与残差网络的搭建

下载地址

链接:https://pan.baidu.com/s/1Gu5RYjnq4b5YSSgOAPYwTA 
提取码:1hyn 
 

代码

Keras

import numpy as np
from keras import layers
from keras.layers import Input, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D
from keras.layers import AveragePooling2D, MaxPooling2D, Dropout, GlobalMaxPooling2D, GlobalAveragePooling2D
from keras.models import Model
from keras.preprocessing import image
from keras.utils import layer_utils
from keras.utils.data_utils import get_file
from keras.applications.imagenet_utils import preprocess_input
import pydot
from IPython.display import SVG
from keras.utils.vis_utils import model_to_dot
from keras.utils import plot_model
import kt_utils
import keras.backend as K
K.set_image_data_format('channels_last')
import matplotlib.pyplot as plt
from matplotlib.pyplot import imshow
import os
os.environ['KERAS_BACKEND']='tensorflow'

X_train_orig,Y_train_orig,X_test_orig,Y_test_orig,classes=kt_utils.load_dataset()
X_train=X_train_orig/255
X_test=X_test_orig/255
Y_train=Y_train_orig.T
Y_test=Y_test_orig.T

def model(input_shape):
    #定义一个placeholder
    X_input=Input(input_shape)
    #填充
    X=ZeroPadding2D((3,3))(X_input)
    #对X使用CONV -> BN -> RELU块
    X=Conv2D(32,(7,7),strides=(1,1),name='conv0')(X)
    X=BatchNormalization(axis=3,name='bn0')(X)
    X=Activation('relu')(X)
    #池化
    X=MaxPooling2D((2,2),name='max_pool')(X)
    #降维,矩阵转化为向量 + 全连接层
    X=Flatten()(X)
    X=Dense(1,activation='sigmoid',name='fc')(X)
    model=Model(inputs=X_input,outputs=X,name='HappyModel')

    return model
def HappyModel(input_shape):
    #创建一个placeholder
    X_input=Input(input_shape)
    #填充
    X=ZeroPadding2D((3,3))(X_input)
    #对X使用CONV -> BN -> RELU块
    X=Conv2D(32,(7,7),strides=(1,1),name='conv0')(X)
    X=BatchNormalization(axis=3,name='bn0')(X)
    X=Activation('relu')(X)
    #池化
    X=MaxPooling2D((2,2),name='max_pool')(X)
    #降维,全连接层
    X=Flatten()(X)
    X=Dense(1,activation='sigmoid',name='fc')(X)
    model=Model(inputs=X_input,outputs=X,name='HappyModel')
    return model


残差

import numpy as np
import tensorflow as tf

from keras import layers
from keras.layers import Input, Add, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D
from keras.models import Model, load_model
from keras.preprocessing import image
from keras.utils import layer_utils
from keras.utils.data_utils import get_file
from keras.applications.imagenet_utils import preprocess_input
from keras.utils.vis_utils import model_to_dot
from keras.utils import plot_model
from keras.initializers import glorot_uniform

import pydot
from IPython.display import SVG
import scipy.misc
from matplotlib.pyplot import imshow
import keras.backend as K
K.set_image_data_format('channels_last')
K.set_learning_phase(1)

import resnets_utils
import os
os.environ['KERAS_BACKEND']='tensorflow'

#恒等块
def identity_block(X, f, filters, stage, block):
    """
    :param filters:卷积层过滤器的数量
    :param stage: 整数,根据每层的位置来命名每一层,与block参数一起使用。
    :param block: 字符串,据每层的位置来命名每一层,与stage参数一起使用。
    """
    conv_name_base='res'+str(stage)+block+'_branch'
    bn_name_base='bn'+str(stage)+block+'_branch'
    F1,F2,F3=filters
    X_shortcut=X
    #第一部分
    X=Conv2D(filters=F1,kernel_size=(1,1),strides=(1,1),padding='valid',
             name=conv_name_base+'2a',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'2a')(X)
    X=Activation('relu')(X)
    #第二部分
    X=Conv2D(filters=F2,kernel_size=(f,f),strides=(1,1),padding='same',
             name=conv_name_base+'2b',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'2b')(X)
    X=Activation('relu')(X)
    #第三部分
    X=Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),padding='valid',
             name=conv_name_base+'2c',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'2c')(X)
    #****注意,这里没有激活函数
    #将捷径与输入加在一起
    X=Add()([X,X_shortcut])
    X=Activation('relu')(X)
    return X
#卷积快
def convolutional_block(X, f, filters, stage, block, s=2):
    """
    :param s:  整数,指定要使用的步幅
    """
    conv_name_base='res'+str(stage)+block+'_branch'
    bn_name_base='bn'+str(stage)+block+'_branch'
    F1,F2,F3=filters
    X_shortcut=X
    #第一部分
    X=Conv2D(filters=F1,kernel_size=(1,1),strides=(s,s),padding='valid',
             name=conv_name_base+'2a',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base)(X)
    X=Activation('relu')(X)
    #第二部分
    X=Conv2D(filters=F2,kernel_size=(f,f),strides=(1,1),padding='same',
             name=conv_name_base+'2b',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base)(X)
    X=Activation('relu')(X)
    #第三部分
    X=Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),padding='valid',
             name=conv_name_base+'3c',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'3c')(X)
    #捷径
    X_shortcut=Conv2D(filters=F3,kernel_size=(1,1),strides=(s,s),padding='valid',
             name=conv_name_base+'1',kernel_initializer=glorot_uniform(seed=0))(X_shortcut)
    X_shortcut=BatchNormalization(axis=3,name=bn_name_base+'1')(X_shortcut)
    #将捷径与输入加在一起
    X=Add()([X,X_shortcut])
    X=Activation('relu')(X)
    return X
#50层残差网络
def ResNet50(input_shape=(64,64,3),classes=6):
    """
    :param input_shape:图像维度
    :param classes: 分类数
    """
    X_input = Input(input_shape)
    X = ZeroPadding2D((3, 3))(X_input)
    #stage1
    X = Conv2D(filters=64, kernel_size=(7, 7), strides=(2, 2),
               name='conv1', kernel_initializer=glorot_uniform(seed=0))(X)
    X = BatchNormalization(axis=3, name='bn_conv1')(X)
    X = Activation('relu')(X)
    X = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(X)
    # stage2
    X = convolutional_block(X, f=3, filters=[64, 64, 256], stage=2, block='a', s=1)
    X = identity_block(X, f=3, filters=[64, 64, 256], stage=2, block='b')
    X = identity_block(X, f=3, filters=[64, 64, 256], stage=2, block='c')
    # stage3
    X = convolutional_block(X, f=3, filters=[128, 128, 512], stage=3, block='a', s=1)
    X = identity_block(X, f=3, filters=[128, 128, 512], stage=3, block='b')
    X = identity_block(X, f=3, filters=[128, 128, 512], stage=3, block='c')
    X = identity_block(X, f=3, filters=[128, 128, 512], stage=3, block='d')
    # stage4
    X = convolutional_block(X, f=3, filters=[256, 256, 1024], stage=3, block='a', s=2)
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='b')
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='c')
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='d')
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='f')
    # stage5
    X = convolutional_block(X, f=3, filters=[512,512,2048], stage=5, block='a', s=2)
    X = identity_block(X, f=3, filters=[256, 256, 2048], stage=5, block='b')
    X = identity_block(X, f=3, filters=[256, 256, 2048], stage=5, block='c')
    #均值池化层
    X=AveragePooling2D(pool_size=(2,2),padding='same')(X)
    #输出层
    X=Flatten()(X)
    ##全连接层(密集连接)使用softmax激活函数
    X=Dense(classes,activation='softmax',name='fc'+str(classes),kernel_initializer=glorot_uniform(seed=0))(X)
    model=Model(inputs=X_input,outputs=X,name='ResNet50')
    return model

参考网址:https://blog.csdn.net/u013733326/article/details/80250818

发布了19 篇原创文章 · 获赞 3 · 访问量 1407

猜你喜欢

转载自blog.csdn.net/qq_41705596/article/details/93983731