def build_training(self, global_steps, inputs, inferences):
with tf.name_scope('target'):
label_indices = self.classification.target_label_indices(inputs)
with tf.name_scope('error'):
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=inferences,
labels=label_indices,
name='softmax_cross_entropy')
loss = tf.reduce_mean(cross_entropy, name='loss')
averager = tf.train.ExponentialMovingAverage(0.99, name='loss_averager')
averaging = averager.apply([loss])
with tf.name_scope(''):
tf.summary.scalar('metrics/loss', loss)
tf.summary.scalar('metrics/loss.average', averager.average(loss))
with tf.control_dependencies([averaging]):
with tf.name_scope(self.args.optimizer.get_name()):
gradients = self.args.optimizer.compute_gradients(loss, var_list=tf.trainable_variables())
train = self.args.optimizer.apply_gradients(gradients, global_steps, name='optimize')
with tf.name_scope(''):
for gradient, t in gradients:
if gradient is not None:
tf.summary.histogram(t.op.name + '.gradients', gradient)
return loss, train
评论列表
文章目录