def add_optimizer(self):
self.global_step = tf.Variable(0, trainable=False)
learning_rate = tf.train.exponential_decay(0.01, self.global_step, 50,
0.1, staircase=True)
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
gradients = optimizer.compute_gradients(self.loss)
self.apply_gradient_op = optimizer.apply_gradients(gradients, self.global_step)
for var in tf.trainable_variables():
tf.histogram_summary(var.op.name, var)
for grad, var in gradients:
if grad is not None:
tf.histogram_summary(var.op.name + '/gradients', grad)
return self.apply_gradient_op
评论列表
文章目录