def setup_train_op(self):
"""
Add train_op to self
"""
with tf.variable_scope("train_step"):
adam_optimizer = tf.train.AdamOptimizer()
grads, vars = zip(*adam_optimizer.compute_gradients(self.loss))
clip_val = self.config.max_gradient_norm
# if -1 then do not perform gradient clipping
if clip_val != -1:
clipped_grads, _ = tf.clip_by_global_norm(grads, self.config.max_gradient_norm)
self.global_grad = tf.global_norm(clipped_grads)
self.gradients = zip(clipped_grads, vars)
else:
self.global_grad = tf.global_norm(grads)
self.gradients = zip(grads, vars)
self.train_op = adam_optimizer.apply_gradients(self.gradients)
self.init = tf.global_variables_initializer()
评论列表
文章目录