decayed_learning_rate

import tensorflow as tf
# TRAINING_STEPS = 10
# # LEARNING_RATE = 1
# # x = tf.Variable(tf.constant(5, dtype=tf.float32), name="x")
# # y = tf.square(x)
# #
# # train_op = tf.train.GradientDescentOptimizer(LEARNING_RATE).minimize(y)
# #
# # with tf.Session() as sess:
# #     sess.run(tf.global_variables_initializer())
# #     for i in range(TRAINING_STEPS):
# #         sess.run(train_op)
# #         x_value = sess.run(x)
# #         print ("After %s iteration(s): x%s is %f."% (i+1, i+1, x_value) )

# TRAINING_STEPS = 1000
# LEARNING_RATE = 0.001
# x = tf.Variable(tf.constant(5, dtype=tf.float32), name="x")
# y = tf.square(x)
#
# train_op = tf.train.GradientDescentOptimizer(LEARNING_RATE).minimize(y)
#
# with tf.Session() as sess:
#     sess.run(tf.global_variables_initializer())
#     for i in range(TRAINING_STEPS):
#         sess.run(train_op)
#         if i % 100 == 0:
#             x_value = sess.run(x)
#             print ("After %s iteration(s): x%s is %f."% (i+1, i+1, x_value))

TRAINING_STEPS = 100
global_step = tf.Variable(0)

# learning_rate, 初始学习率
# global_step, 当前迭代次数
# decay_steps, 衰减速度(在迭代到该次数时学习率衰减为earning_rate * decay_rate)
# decay_rate, 学习率衰减系数,通常介于0 - 1之间。
# staircase = False, (默认值为False, 当为True时,(global_step / decay_steps)则被转化为整数), 选择不同的衰减方式。
# name = None
# decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
# 直观解释:假设给定初始学习率learning_rate为0.1,学习率衰减率为0.1,decay_steps为10000。
# 则随着迭代次数从1到10000,当前的学习率decayed_learning_rate慢慢的从0.1降低为0.1*0.1=0.01,
# 当迭代次数到20000,当前的学习率慢慢的从0.01降低为0.1*0.1^2=0.001,以此类推。
# 也就是说每10000次迭代,学习率衰减为前10000次的十分之一,该衰减是连续的,这是在staircase为False的情况下。
# 如果staircase为True,则global_step / decay_steps始终取整数,也就是说衰减是突变的,每decay_steps次变化一次,变化曲线是阶梯状。
LEARNING_RATE = tf.train.exponential_decay(0.1, global_step, 1, 0.96, staircase=True)

x = tf.Variable(tf.constant(5, dtype=tf.float32), name="x")
y = tf.square(x)
train_op = tf.train.GradientDescentOptimizer(LEARNING_RATE).minimize(y, global_step=global_step)

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for i in range(TRAINING_STEPS):
        sess.run(train_op)
        #if i % 10 == 0:
        LEARNING_RATE_value = sess.run(LEARNING_RATE)
        x_value = sess.run(x)
        print ("After %s iteration(s): x%s is %f, learning rate is %f."% (i+1, i+1, x_value, LEARNING_RATE_value))

猜你喜欢

转载自blog.csdn.net/feidao84/article/details/82194443
今日推荐