def log_softmax(input, dim=None, _stacklevel=3):
r"""Applies a softmax followed by a logarithm.
While mathematically equivalent to log(softmax(x)), doing these two
operations separately is slower, and numerically unstable. This function
uses an alternative formulation to compute the output and gradient correctly.
See :class:`~torch.nn.LogSoftmax` for more details.
Arguments:
input (Variable): input
dim (int): A dimension along which log_softmax will be computed.
"""
if dim is None:
dim = _get_softmax_dim('log_softmax', input.dim(), _stacklevel)
return torch._C._nn.log_softmax(input, dim)
评论列表
文章目录