def train_(self, input_img, input_qst, label):
self.optimizer.zero_grad()
output = self(input_img, input_qst)
loss = F.nll_loss(output, label)
loss.backward()
self.optimizer.step()
pred = output.data.max(1)[1]
correct = pred.eq(label.data).cpu().sum()
accuracy = correct * 100. / len(label)
return accuracy
评论列表
文章目录