def _build_optimizer(self):
"""Based on the loss tensor, build an optimizer that minimizes the loss.
This function returns an optimizer operation that updates the model's trainable parameters
by determining the loss's gradients w.r.t. each of the trainable parameters. Specifically,
RMSProp is used to minimize the loss. The gradients are clipped to the max_gradient_norm to
prevent too drastic updates of the trainable parameters. See also tf.clip_by_global_norm
Returns:
tf.Operation: An operation that updates the model's trainable parameters.
"""
# Clip the gradients
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self._loss, tvars), self.max_gradient_norm)
# Optimize the variables
optimizer = tf.train.RMSPropOptimizer(self._learning_rate)
return optimizer.apply_gradients(zip(grads, tvars))
评论列表
文章目录