def categorical_crossentropy(output, target, from_logits=False):
"""Categorical crossentropy between an output tensor
and a target tensor, where the target is a tensor of the same
shape as the output.
"""
# Note: tf.nn.softmax_cross_entropy_with_logits
# expects logits, Keras expects probabilities.
if not from_logits:
# scale preds so that the class probas of each sample sum to 1
output /= tf.reduce_sum(output,
reduction_indices=len(output.get_shape()) - 1,
keep_dims=True)
# manual computation of crossentropy
epsilon = _to_tensor(_EPSILON, output.dtype.base_dtype)
output = tf.clip_by_value(output, epsilon, 1. - epsilon)
return - tf.reduce_sum(target * tf.log(output),
reduction_indices=len(output.get_shape()) - 1)
else:
try:
return tf.nn.softmax_cross_entropy_with_logits(labels=target,
logits=output)
except TypeError:
return tf.nn.softmax_cross_entropy_with_logits(output, target)
tensorflow_backend.py 文件源码
python
阅读 32
收藏 0
点赞 0
评论 0
评论列表
文章目录