def update(self, lossfun=None, *args, **kwds):
"""Updates parameters based on a loss function or computed gradients.
This method runs in two ways.
- If ``lossfun`` is given, then use it as a loss function to compute
gradients.
- Otherwise, this method assumes that the gradients are already
computed.
In both cases, the computed gradients are used to update parameters.
The actual update routines are defined by the :meth:`update_one`
method (or its CPU/GPU versions, :meth:`update_one_cpu` and
:meth:`update_one_gpu`).
"""
if lossfun is not None:
self.target.zerograds()
loss = lossfun(*args, **kwds)
loss.backward()
del loss
self.call_hooks()
self.prepare()
self.t += 1
states = self._states
for name, param in self.target.namedparams():
with cuda.get_device(param.data):
self.update_one(param, states[name])
评论列表
文章目录