cv-AlexNet study notes

#AlexNet

data set

ILSVRC2010

The use of technology

  1. resume
  2. Train on multiple GPUs
  3. LRN
  4. overlap pooling
  5. overAllArchitecture

To prevent over-fitting means

  1. Data enhancement
  2. Dropout

Learning details

  • batch-size:128
  • momentum 0.9 ,weight decay 0.005

result

keras code implementation

#%%
from tensorflow import keras
from tensorflow.keras import layers,models
from tensorflow.keras.preprocessing import image 
import matplotlib.pyplot as plt
import os
import numpy as np

#%%

#%%
base_dir=r"./datasets/cat-and-dog"
train_dir=os.path.join(base_dir,"training_set")
test_dir=os.path.join(base_dir,"test_set")

#%%
# 查看数据
#%%
train_gen = image.ImageDataGenerator(
    rescale=1./255
)
val_gen=image.ImageDataGenerator(
    rescale=1.255
)
train_generator = train_gen.flow_from_directory(
    train_dir,
    target_size=(227,227)
)
val_generator = val_gen.flow_from_directory(
    test_dir,
    target_size=(227,227)
)

#%%
names=['cats','dogs']
k=0
for batch_image,batch_label in train_generator:
    k+=1
    
    if k==1:
        print("batch_image_shape:",batch_image.shape)
        print("batch_label_shape:",batch_label.shape)
        batch_label=batch_label.astype(np.int8)
        plt.figure()
        for r in range(4):
            for c in range(8):
                im_idx = r*8+c
                plt.subplot(4,8,im_idx+1)
                plt.imshow(batch_image[im_idx])
                #plt.title(names[batch_label[im_idx]])
                plt.xticks([])
                plt.yticks([])

        plt.show()        
    else: break

#%%
alexNet_input = keras.Input(shape=(227,227,3))
x = layers.Conv2D(96,11,strides=4,activation='relu',kernel_initializer='uniform')(alexNet_input)
#response-normolized
x = layers.MaxPooling2D(pool_size=3,strides=2)(x)
x = layers.Conv2D(256,5,strides=1,padding='same',activation='relu',kernel_initializer='uniform')(x)
# response_normlized
x = layers.MaxPooling2D(pool_size=3,strides=2)(x)

x = layers.Conv2D(384,3,strides=1,padding='same',activation='relu',kernel_initializer='uniform')(x)
x = layers.Conv2D(384,3,strides=1,padding='same',activation='relu',kernel_initializer='uniform')(x)
x = layers.Conv2D(256,3,strides=1,padding='same',activation='relu',kernel_initializer='uniform')(x)
x = layers.MaxPooling2D(pool_size=3,strides=2)(x)

x = layers.Flatten()(x)
x = layers.Dense(4096,activation='relu')(x)
x = layers.Dropout(0.5)(x)
x = layers.Dense(4096,activation='relu')(x)
x = layers.Dropout(0.5)(x)
alexNet_output = layers.Dense(2,activation='softmax')(x)

alexNet = keras.Model(alexNet_input,alexNet_output,name='alexNet')
alexNet.summary()
#%%
alexNet.compile(
    optimizer='SGD',
    loss='categorical_crossentropy',
    metrics=['accuracy']
)

history=alexNet.fit_generator(
    train_generator,
    validation_data=val_generator,
    epochs=20,
    verbose=1
)

#%%
plt.plot(history.history['acc'],label='acc')
plt.plot(history.history['val_acc'],label='val_acc')
plt.xlabel('Epoch')
plt.ylabel('Acc')
plt.ylim([0.1,1])
plt.legend(loc='lower right')

#%%

Guess you like

Origin www.cnblogs.com/zhouyu0-0/p/11741227.html