def backward(self, grad_output):
vector1, vector2 = self.saved_tensors
grad_add_matrix = grad_vector1 = grad_vector2 = None
if self.needs_input_grad[0]:
grad_add_matrix = grad_output
if self.alpha != 1:
grad_add_matrix = grad_add_matrix.mul(self.alpha)
if self.needs_input_grad[1]:
grad_vector1 = torch.mv(grad_output, vector2)
if self.beta != 1:
grad_vector1 *= self.beta
if self.needs_input_grad[2]:
# TODO: maybe it's better to do transpose + mv + transpose
grad_vector2 = torch.mm(vector1.unsqueeze(0), grad_output)
if self.beta != 1:
grad_vector2 *= self.beta
return grad_add_matrix, grad_vector1, grad_vector2
评论列表
文章目录