def train(train_loader, net, criterion, optimizer, epoch, train_args):
train_loss = AverageMeter()
curr_iter = (epoch - 1) * len(train_loader)
for i, data in enumerate(train_loader):
inputs, labels = data
assert inputs.size()[2:] == labels.size()[1:]
N = inputs.size(0)
inputs = Variable(inputs).cuda()
labels = Variable(labels).cuda()
optimizer.zero_grad()
outputs = net(inputs)
assert outputs.size()[2:] == labels.size()[1:]
assert outputs.size()[1] == voc.num_classes
loss = criterion(outputs, labels) / N
loss.backward()
optimizer.step()
train_loss.update(loss.data[0], N)
curr_iter += 1
writer.add_scalar('train_loss', train_loss.avg, curr_iter)
if (i + 1) % train_args['print_freq'] == 0:
print('[epoch %d], [iter %d / %d], [train loss %.5f]' % (
epoch, i + 1, len(train_loader), train_loss.avg
))
评论列表
文章目录