def backward(self, grad_output):
grad_input1 = torch.zeros(self.input1.size())
if grad_output.is_cuda:
self.batchgrid = self.batchgrid.cuda()
grad_input1 = grad_input1.cuda()
#print('gradout:',grad_output.size())
grad_output_temp = grad_output.contiguous()
grad_output_view = grad_output_temp.view(-1, self.height*self.width, 2)
grad_output_view.contiguous()
grad_output_temp = torch.transpose(grad_output_view, 1, 2)
grad_output_temp.contiguous()
batchgrid_temp = self.batchgrid.view(-1, self.height*self.width, 3)
batchgrid_temp.contiguous()
grad_input1 = torch.baddbmm(grad_input1, grad_output_temp, batchgrid_temp)
return grad_input1
评论列表
文章目录