Fine-tuning means to 'unfreeze' the top layers and train the unfrozen layers together with the newly added parts.
1. Data preparation
from keras.applications import VGG16
from keras import models
from keras import layers
from keras import optimizers
import os,shutil
from keras.preprocessing.image import ImageDataGenerator
base_dir = "/kaggle/input/cats-and-dogs-small"
train_dir = os.path.join(base_dir,'train')
validation_dir = os.path.join(base_dir,'validation')
2. Model loading and freezing
conv_base = VGG16(weights='imagenet',include_top=False,input_shape=(150,150,3))
conv_base.trainable = True
set_trainable = False
for layer in conv_base.layers:
if layer.name == 'block5_conv1':
set_trainable = True
if set_trainable:
layer.trainable = True
else:
layer.trainable = False
3. Add a classifier
model = models.Sequential()
model.add(conv_base)
model.add(layers.Flatten())
model.add(layers.Dense(256,activation='relu'))
model.add(layers.Dense(1,activation='sigmoid'))
Check the parameters that need to be updated
4. Generate iterator
train_datagen = ImageDataGenerator(rescale=1.0/255,rotation_range=40,width_shift_range=0.2,height_shift_range=0.2,shear_range=0.2,zoom_range=0.2,horizontal_flip=True,fill_mode='nearest')
test_datagen = ImageDataGenerator(rescale=1.0/255)
train_generator = train_datagen.flow_from_directory(train_dir,target_size=(150,150),batch_size=20,class_mode='binary')
validation_generator = test_datagen.flow_from_directory(validation_dir,target_size=(150,150),batch_size=20,class_mode='binary')
5. Generate the optimizer and start training
model.compile(loss='binary_crossentropy',optimizer=optimizers.RMSprop(lr=2e-5),metrics=['acc'])
history=model.fit_generator(train_generator,steps_per_epoch=100,epochs=50,validation_data=validation_generator,validation_steps=50)
6. Draw a curve graph
import matplotlib.pyplot as plt
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(1,len(acc)+1)
plt.subplot(221)
plt.plot(epochs,acc,'bo',label='training acc')
plt.plot(epochs,val_acc,'b',label='validation acc')
plt.title('training and validation acc')
plt.legend()
plt.subplot(222)
plt.plot(epochs,loss,'bo',label='training loss')
plt.plot(epochs,val_loss,'b',label='validation loss')
plt.title('training and validation loss')
plt.legend()
plt.show()
7. Smooth curve
import matplotlib.pyplot as plt
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
def smooth_curve(points,factor=0.8):
smooth_points = []
for point in points:
if smooth_points:
previous = smooth_points[-1]
smooth_points.append(previous*factor+point*(1-factor))
else:
smooth_points.append(point)
return smooth_points
plt.subplot(221)
plt.plot(epochs,smooth_curve(acc),'bo',label='smoothed training acc')
plt.plot(epochs,smooth_curve(val_acc),'b',label='smoothed vaildation acc')
plt.title('training and validation acc')
plt.legend()
plt.subplot(222)
plt.plot(epochs,smooth_curve(loss),'bo',label='smoothed training loss')
plt.plot(epochs,smooth_curve(val_loss),'b',label='smoothed validation loss')
plt.title('training and validation loss')
plt.legend()
plt.show()
8. Evaluation model
test_dir = os.path.join(base_dir,'test')
test_datagen = ImageDataGenerator(rescale=1.0/255)
test_generator = test_datagen.flow_from_directory(test_dir,target_size =(150,150),batch_size=20,class_mode='binary')
test_loss,test_acc = model.evaluate_generator(test_generator,steps=50)
print('test_acc:',test_acc)