def calculate_sparse_softmax_and_summaries(logits, labels, name):
"""Calculate the softmax cross entropy loss and associated summaries.
Args:
logits: Tensor of logits, first dimension is batch size.
labels: Tensor of categorical labels [ints]. First
dimension is batch size.
name: Name to use as prefix for summaries.
Returns:
loss: Dimensionless tensor representing the mean negative
log-probability of the true class.
"""
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=labels)
loss = tf.reduce_mean(loss)
softmax_summaries(loss, logits, labels, name)
return loss
评论列表
文章目录