def backward(self, grad_output):
matrix, vector = self.saved_tensors
grad_add_vector = grad_matrix = grad_vector = None
if self.needs_input_grad[0]:
grad_add_vector = grad_output
if self.alpha != 1:
grad_add_vector = grad_add_vector.mul(self.alpha)
if self.needs_input_grad[1]:
grad_matrix = torch.ger(grad_output, vector)
if self.beta != 1:
grad_matrix *= self.beta
if self.needs_input_grad[2]:
grad_vector = torch.mv(matrix.t(), grad_output)
if self.beta != 1:
grad_vector *= self.beta
return grad_add_vector, grad_matrix, grad_vector
评论列表
文章目录