def apply_gradients(self, var_list, accum_grad_list, name=None):
update_ops = []
with tf.device(self._device):
with tf.control_dependencies(None):
self._create_slots(var_list)
with tf.name_scope(name, self._name, []) as name:
self._prepare()
for var, accum_grad in zip(var_list, accum_grad_list):
with tf.name_scope("update_" + var.op.name), tf.device(var.device):
clipped_accum_grad = tf.clip_by_norm(accum_grad, self._clip_norm)
update_ops.append(self._apply_dense(clipped_accum_grad, var))
return update_ops;
#return tf.group(*update_ops, name=name)
评论列表
文章目录