def get_optimizer(self, learning_rate = 0.001, grad_clip = 5):
#-----------------------------------------------------------------------
# Build a loss function
#-----------------------------------------------------------------------
with tf.variable_scope('loss'):
loss = tf.losses.mean_squared_error(self.target, self.output)
#-----------------------------------------------------------------------
# Build the optimizer
#-----------------------------------------------------------------------
with tf.variable_scope('optimizer'):
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(loss, tvars),
grad_clip)
train_op = tf.train.AdamOptimizer(learning_rate)
optimizer = train_op.apply_gradients(zip(grads, tvars))
return optimizer, loss
评论列表
文章目录