def _backward(self):
# TODO: we need to have a custom loss function to take mask into account
# TODO: pass in this way might be too unefficient, but it's ok for now
if self.training:
self.optimizer.zero_grad()
loss_vb = F.binary_cross_entropy(input=self.output_vb.transpose(0, 1).contiguous().view(1, -1),
target=self.target_vb.transpose(0, 1).contiguous().view(1, -1),
weight=self.mask_ts.transpose(0, 1).contiguous().view(1, -1))
loss_vb /= self.batch_size
if self.training:
loss_vb.backward()
self.optimizer.step()
return loss_vb.data[0]
评论列表
文章目录