def cyclic_decay(learning_rate,
min_learning_rate=1e-4,
cycle_length=1000,
decay_steps=20000,
decay_rate=0.5):
"""Cyclic learning rate."""
step = tf.to_float(tf.train.get_or_create_global_step())
decay = decay_rate ** (step // decay_steps)
min_learning_rate = min_learning_rate * decay
max_learning_rate = learning_rate * decay
cycle = tf.sin(step * 2 * 3.141592 / cycle_length)
learning_rate = ((max_learning_rate - min_learning_rate) * (cycle + 1) * 0.5 +
min_learning_rate)
return learning_rate
评论列表
文章目录