def train_epoch(model, data_iter, criterion, optimizer):
total_loss = 0.
total_words = 0.
for (data, target) in data_iter:#tqdm(
#data_iter, mininterval=2, desc=' - Training', leave=False):
data = Variable(data)
target = Variable(target)
if opt.cuda:
data, target = data.cuda(), target.cuda()
target = target.contiguous().view(-1)
pred = model.forward(data)
loss = criterion(pred, target)
total_loss += loss.data[0]
total_words += data.size(0) * data.size(1)
optimizer.zero_grad()
loss.backward()
optimizer.step()
data_iter.reset()
return math.exp(total_loss / total_words)
评论列表
文章目录