def train_model(self, train_loader, path, num_batch):
self.train()
fitness = 0
train_len = 0
for batch_idx, (data, target) in enumerate(train_loader):
if self.args.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data), Variable(target)
self.optimizer.zero_grad()
output = self(data, path, -1)
pred = output.data.max(1)[1] # get the index of the max log-probability
fitness += pred.eq(target.data).cpu().sum()
train_len += len(target.data)
loss = F.cross_entropy(output, target)
loss.backward()
self.optimizer.step()
if not batch_idx < num_batch -1:
break
fitness = fitness / train_len
return fitness
评论列表
文章目录