def apply_gradients(self, grads_and_vars, global_step=None, name=None):
with tf.name_scope(name, self._name) as name:
update_op = self._opt.apply_gradients(
grads_and_vars, global_step=global_step)
add_noise_ops = []
with tf.control_dependencies([update_op]):
for grad, var in grads_and_vars:
if grad is None:
continue
with tf.name_scope("psgld_noise_" + var.op.name):
if isinstance(grad, tf.Tensor):
add_noise_ops.append(self._noise_dense(var))
else:
add_noise_ops.append(self._noise_sparse(grad, var))
## running combined op
return tf.group(*([update_op] + add_noise_ops), name=name)
评论列表
文章目录