Notes - model training: Regular Loss

Front

  • add_to_collection
import tensorflow as tf

tf.add_to_collection("reg_losses", 1.0)
tf.add_to_collection("reg_losses", 1.0)
loss = tf.get_collection("reg_losses")
with tf.Session() as sess:
    print(loss)

"""
运行结果:
[1.0, 1.0]
"""

Add regular Loss

  • Manually add loss to collection
reg_loss = tf.reduce_sum(tf.abs(w))
reg_loss = tf.reduce_sum(tf.square(w))
tf.add_to_collection("reg_losses", reg_loss)
  • Automatically added to the collection loss
with tf.contrib.framework.arg_scope(
        [fully_connected],
        weights_regularizer=tf.contrib.layers.l2_regularizer(scale=0.01)):
    hidden1 = fully_connected(X, n_hidden1, scope="hidden1"
                              # , weights_regularizer=tf.contrib.layers.l2_regularizer(scale=0.01)
                              )

The combined regular Loss

 # reg_losses = tf.add_n(tf.get_collection("reg_losses"))
 reg_losses = tf.add_n(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
 total_loss = tf.add(loss, reg_losses)

Guess you like

Origin blog.csdn.net/chen_holy/article/details/91437016