def softmax(input, dim=None, _stacklevel=3):
r"""Applies a softmax function.
Softmax is defined as:
:math:`softmax(x) = \frac{exp(x_i)}{\sum_j exp(x_j)}`
It is applied to all slices along dim, and will rescale them so that the elements
lie in the range `(0, 1)` and sum to 1.
See :class:`~torch.nn.Softmax` for more details.
Arguments:
input (Variable): input
dim (int): A dimension along which softmax will be computed.
.. note::
This function doesn't work directly with NLLLoss,
which expects the Log to be computed between the Softmax and itself.
Use log_softmax instead (it's faster and has better numerical properties).
"""
if dim is None:
dim = _get_softmax_dim('softmax', input.dim(), _stacklevel)
return torch._C._nn.softmax(input, dim)
评论列表
文章目录