def add_training_op(self, loss):
#optimizer = tf.train.AdamOptimizer(self.config.lr)
#optimizer = tf.train.AdagradOptimizer(self.config.lr)
optclass = getattr(tf.train, self.config.optimizer + 'Optimizer')
assert issubclass(optclass, tf.train.Optimizer)
optimizer = optclass(self.config.learning_rate)
gradient_var_pairs = optimizer.compute_gradients(loss)
vars = [x[1] for x in gradient_var_pairs]
gradients = [x[0] for x in gradient_var_pairs]
if self.config.gradient_clip > 0:
clipped, _ = tf.clip_by_global_norm(gradients, self.config.gradient_clip)
else:
clipped = gradients
self.grad_norm = tf.global_norm(clipped)
train_op = optimizer.apply_gradients(zip(clipped, vars))
return train_op
base_aligner.py 文件源码
python
阅读 30
收藏 0
点赞 0
评论 0
评论列表
文章目录