def add_training_op(self, loss):
"""Sets up the training Ops.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train. See
TODO:
- Get the gradients for the loss from optimizer using
optimizer.compute_gradients.
- if self.clip_gradients is true, clip the global norm of
the gradients using tf.clip_by_global_norm to self.config.max_grad_norm
- Compute the resultant global norm of the gradients using
tf.global_norm and save this global norm in self.grad_norm.
- Finally, actually create the training operation by calling
optimizer.apply_gradients.
See: https://www.tensorflow.org/api_docs/python/train/gradient_clipping
Args:
loss: Loss tensor.
Returns:
train_op: The Op for training.
"""
optimizer = tf.train.GradientDescentOptimizer(learning_rate=self.config.lr)
### YOUR CODE HERE (~6-10 lines)
# - Remember to clip gradients only if self.config.clip_gradients
# is True.
# - Remember to set self.grad_norm
### END YOUR CODE
assert self.grad_norm is not None, "grad_norm was not set properly!"
return train_op
评论列表
文章目录