def clip_gradients_by_norm(grads_and_vars, add_to_summary=True):
if add_to_summary:
for grad, var in grads_and_vars:
if grad is not None:
variable_summaries(grad, 'grad/{}'.format(var.name[:-2]))
# Clip by norm. Grad can be null when not training some modules.
with tf.name_scope('clip_gradients_by_norm'):
grads_and_vars = [
(
tf.check_numerics(
tf.clip_by_norm(gv[0], 10.),
'Invalid gradient'
), gv[1]
)
if gv[0] is not None else gv
for gv in grads_and_vars
]
if add_to_summary:
for grad, var in grads_and_vars:
if grad is not None:
variable_summaries(
grad, 'clipped_grad/{}'.format(var.name[:-2]))
return grads_and_vars
评论列表
文章目录