def optimize(self, loss, global_step,
learning_rate_initial=TC_LEARNING_RATE_INITIAL,
learning_rate_decay=TC_LEARNING_RATE_DECAY,
learning_rate_decay_steps=TC_LEARNING_RATE_DECAY_STEPS):
"""
Creates a learning rate and an optimizer for the loss
:param tf.Tensor loss: the tensor with the loss of the model
:param tf.Tensor global_step: the global step for training
:param int learning_rate_initial: the initial learning rate
:param int learning_rate_decay: the decay of the learning rate
:param int learning_rate_decay_steps: the number of steps to decay the learning rate
:return (tf.Tensor, tf.Tensor): a tuple with the optimizer and the learning rate
"""
learning_rate = tf.train.exponential_decay(learning_rate_initial, global_step,
learning_rate_decay_steps,
learning_rate_decay,
staircase=True, name='learning_rate')
# optimizer
optimizer = tf.train.RMSPropOptimizer(learning_rate)
# optimizer = tf.train.GradientDescentOptimizer(learning_rate)
# optimizer = tf.train.AdamOptimizer(learning_rate)
optimizer = optimizer.minimize(loss, global_step=global_step)
return optimizer, learning_rate
text_classification_model_simple.py 文件源码
python
阅读 30
收藏 0
点赞 0
评论 0
评论列表
文章目录