def set_train_op(loss, tvars):
if FLAGS.optimizer_type == "sgd":
optimizer = tf.train.GradientDescentOptimizer(learning_rate=FLAGS.learning_rate)
elif FLAGS.optimizer_type == "rmsprop":
optimizer = tf.train.RMSPropOptimizer(learning_rate=FLAGS.learning_rate)
elif FLAGS.optimizer_type == "adam":
optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate)
else:
raise ValueError("Wrong optimizer_type.")
gradients = optimizer.compute_gradients(loss, var_list=tvars)
clipped_gradients = [(grad if grad is None else tf.clip_by_norm(grad, FLAGS.max_grads), var)
for grad, var in gradients]
train_op = optimizer.apply_gradients(clipped_gradients)
return train_op
评论列表
文章目录