def set_up_optimizer(loss, optimizer, params, clip_gradients):
opt = {
'adam': tf.train.AdamOptimizer,
'sgd': tf.train.GradientDescentOptimizer,
'momentum': tf.train.MomentumOptimizer,
'adadelta': tf.train.AdadeltaOptimizer,
'adagrad': tf.train.AdagradOptimizer,
'rmsprop': tf.train.RMSPropOptimizer
}[optimizer](**params)
# optionally clip gradients by norm
grads_and_vars = opt.compute_gradients(loss)
if clip_gradients is not None:
grads_and_vars = [(tf.clip_by_norm(grad, clip_gradients), var)
for grad, var in grads_and_vars]
return opt, opt.apply_gradients(grads_and_vars)
评论列表
文章目录