def binary_cross_entropy(input, target, weight=None, size_average=True):
r"""Function that measures the Binary Cross Entropy
between the target and the output.
See :class:`~torch.nn.BCELoss` for details.
Args:
input: Variable of arbitrary shape
target: Variable of the same shape as input
weight (Variable, optional): a manual rescaling weight
if provided it's repeated to match input tensor shape
size_average (bool, optional): By default, the losses are averaged
over observations for each minibatch. However, if the field
sizeAverage is set to False, the losses are instead summed
for each minibatch. Default: True
Examples::
>>> input = autograd.Variable(torch.randn(3), requires_grad=True)
>>> target = autograd.Variable(torch.LongTensor(3).random_(2))
>>> loss = F.binary_cross_entropy(F.sigmoid(input), target)
>>> loss.backward()
"""
if not (target.size() == input.size()):
warnings.warn("Using a target size ({}) that is different to the input size ({}) is deprecated. "
"Please ensure they have the same size.".format(target.size(), input.size()))
if input.nelement() != target.nelement():
raise ValueError("Target and input must have the same number of elements. target nelement ({}) "
"!= input nelement ({})".format(target.nelement(), input.nelement()))
if weight is not None:
new_size = _infer_size(target.size(), weight.size())
weight = weight.expand(new_size)
return _functions.thnn.BCELoss.apply(input, target, weight, size_average)
评论列表
文章目录