def backward(self, grad_output):
grad_input1 = torch.zeros(self.input1.size())
if grad_output.is_cuda:
self.batchgrid = self.batchgrid.cuda()
grad_input1 = grad_input1.cuda()
#print('gradout:',grad_output.size())
grad_input1 = torch.baddbmm(grad_input1, torch.transpose(grad_output.view(-1, self.height*self.width, 2), 1,2), self.batchgrid.view(-1, self.height*self.width, 3))
#print(grad_input1)
return grad_input1*self.lr
评论列表
文章目录