def softmax(x):
""" Softmax.
Computes softmax activations.
For each batch `i` and class `j` we have
softmax[i, j] = exp(logits[i, j]) / sum(exp(logits[i]))
Arguments:
x: A `Tensor`. Must be one of the following types: `float32`,
`float64`. 2-D with shape `[batch_size, num_classes]`.
Returns:
A `Tensor`. Has the same type as `x`. Same shape as `x`.
"""
return tf.nn.softmax(x)
评论列表
文章目录