def train():
net.train()
loss_avg = 0.0
for batch_idx, (data, target) in enumerate(train_loader):
data, target = torch.autograd.Variable(data.cuda()), torch.autograd.Variable(target.cuda())
# forward
output = net(data)
# backward
optimizer.zero_grad()
loss = F.cross_entropy(output, target)
loss.backward()
optimizer.step()
# exponential moving average
loss_avg = loss_avg * 0.2 + loss.data[0] * 0.8
state['train_loss'] = loss_avg
# test function (forward only)
评论列表
文章目录