def _get_opt(self):
# build the self.opt_op for training
self.set_train_var()
tvars = self.var_list
self.print_trainable()
with tf.name_scope("Optimizer"):
opt = self._get_optx()
grads = tf.gradients(self.loss+self.l2loss, tvars)
grads = list(zip(grads, tvars))
# Op to update all variables according to their gradient
self.opt_op = opt.apply_gradients(grads_and_vars=grads,global_step = self.global_step)
if self.flags.visualize and "grad" in self.flags.visualize:
for grad, var in grads:
tf.summary.histogram(var.name + '/gradient', grad, collections=[tf.GraphKeys.GRADIENTS])
评论列表
文章目录