def train_op(self, total_loss, global_step):
self._loss_summary(total_loss)
optimizer = tf.train.AdamOptimizer()
grads = optimizer.compute_gradients(total_loss)
apply_gradient_op = optimizer.apply_gradients(grads, global_step=global_step)
variable_averages = tf.train.ExponentialMovingAverage(
self.moving_average_decay, global_step)
variable_averages_op = variable_averages.apply(tf.trainable_variables())
with tf.control_dependencies([apply_gradient_op, variable_averages_op]):
train_op = tf.no_op(name = "train")
return train_op
cpm.py 文件源码
python
阅读 25
收藏 0
点赞 0
评论 0
评论列表
文章目录