tensorflow某些层交替训练

问题描述:遇到多loss约束,需要某些层单独训练。

代码:

    方法一:

first_train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope='conv0'and'dense_1'and 'trans_1'and'dense_2'and'trans_2'and'dense_3'and'trans_3'and'dense_4'and'linear_batch'and'linear')
optimizer_1 = tf.train.AdamOptimizer(learning_rate = lr)
train_op_1 = slim.learning.create_train_op(loss, optimizer_1, update_ops = first_train_vars) 
        
        
second_train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope='conv_0'and'conv_1'and'conv_2')
optimizer_2 = tf.train.AdamOptimizer(learning_rate = lr/1000)
train_op_2 = slim.learning.create_train_op(loss_cng, optimizer_2, update_ops = second_train_vars) 

    方法二:

all_vars =  tf.trainable_variables() 
group_vars = ...
second_vars = [...]

first_train_vars = [var for var in all_vars if var.name.startswith(group_vars)]
optimizer_1 = tf.train.AdamOptimizer(learning_rate = 5e-6)
train_op_1 = slim.learning.create_train_op(m_loss, optimizer_1 , variables_to_train = first_train_vars)

temp = list()
second_train_vars = list()
for j in range(len(second_vars)):
   #second_train_vars.append(tf.get_collection(tf.GraphKeys.TRAINABLE_RESOURCE_VARIABLES, scope=second_vars[j]))
   temp.append([var for var in all_vars if var.name.startswith(second_vars[j])])
for m in range(len(temp)):
   for n in range(len(temp[m])):
       second_train_vars.append(temp[m][n])
optimizer_2 = tf.train.AdamOptimizer(learning_rate = lr)
train_op_2 = slim.learning.create_train_op(loss, optimizer_2, variables_to_train = second_train_vars)


猜你喜欢

转载自blog.csdn.net/weixin_39880579/article/details/80652754