def apply_gradients(self, grads_and_vars, global_step=None, name=None):
with tf.name_scope(name, self._name) as name:
update_op = self._opt.apply_gradients(
grads_and_vars, global_step=global_step)
clip_update_ops = []
with tf.control_dependencies([update_op]):
for grad, var in grads_and_vars:
if grad is None or var not in self._vars_to_clip_dims:
continue
with tf.name_scope("clip_" + var.op.name):
if isinstance(grad, tf.Tensor):
clip_update_ops.append(self._clip_dense(var))
else:
clip_update_ops.append(
self._clip_sparse(grad, var))
# In case no var was clipped, still need to run the update_op.
return tf.group(*([update_op] + clip_update_ops), name=name)
评论列表
文章目录