def update_core(self):
# When we pass one iterator and optimizer to StandardUpdater.__init__,
# they are automatically named 'main'.
train_iter = self.get_iterator('main')
optimizer = self.get_optimizer('main')
#report({"lr":optimizer.lr})
# Get the next batch
x, t = train_iter.__next__()
# Compute the loss at this time step and accumulate it
loss = optimizer.target(x, t)
# optimizer.target.cleargrads() # Clear the parameter gradients
optimizer.target.zerograds() # Clear the parameter gradients
loss.backward() # Backprop
optimizer.update() # Update the parameters
sequential_updater.py 文件源码
python
阅读 19
收藏 0
点赞 0
评论 0
评论列表
文章目录