def calculate_softmax_and_summaries(logits, one_hot_labels, name):
"""Calculate the softmax cross entropy loss and associated summaries.
Args:
logits: Tensor of logits, first dimension is batch size.
one_hot_labels: Tensor of one hot encoded categorical labels. First
dimension is batch size.
name: Name to use as prefix for summaries.
Returns:
loss: Dimensionless tensor representing the mean negative
log-probability of the true class.
"""
loss = tf.nn.softmax_cross_entropy_with_logits(
logits=logits, labels=one_hot_labels)
loss = tf.reduce_mean(loss)
softmax_summaries(loss, logits, one_hot_labels, name)
return loss
评论列表
文章目录