def backward(self, sth):
input, label = self.saved_tensors
grad_fs = grad_label = None
if self.needs_input_grad[0]:
fs = torch.nn.Softmax()(
torch.autograd.Variable(input, requires_grad=False)
).data
# neg. one hot label
y = input.new().resize_as_(input).zero_()
for i, l in enumerate(label):
y[i, l] = -1.
fs.add_(y).mul_(1. / len(label))
grad_fs = fs
if self.needs_input_grad[1]:
raise NotImplementedError()
return grad_fs, grad_label
softmax_with_cross_entropy_loss.py 文件源码
python
阅读 28
收藏 0
点赞 0
评论 0
评论列表
文章目录