def build_optimizer(loss, learning_rate, grad_clip):
'''
??Optimizer
loss: ??
learning_rate: ???
'''
# ??clipping gradients
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(loss, tvars), grad_clip)
train_op = tf.train.AdamOptimizer(learning_rate)
optimizer = train_op.apply_gradients(zip(grads, tvars))
return optimizer
评论列表
文章目录