def update_learning_rate(self, lr):
if isinstance(self._optimizer, optimizers.Adam):
self._optimizer.alpha = lr
return
if isinstance(self._optimizer, Eve):
self._optimizer.alpha = lr
return
if isinstance(self._optimizer, optimizers.AdaDelta):
# AdaDelta has no learning rate
return
self._optimizer.lr = lr
评论列表
文章目录