def get_learning_rate(self):
if hasattr(self.model, 'optimizer'):
config = self.model.optimizer.get_config()
from keras.optimizers import Adadelta, Adam, Adamax, Adagrad, RMSprop, SGD
if isinstance(self.model.optimizer, Adadelta) or isinstance(self.model.optimizer, Adam) \
or isinstance(self.model.optimizer, Adamax) or isinstance(self.model.optimizer, Adagrad)\
or isinstance(self.model.optimizer, RMSprop) or isinstance(self.model.optimizer, SGD):
return config['lr'] * (1. / (1. + config['decay'] * float(K.get_value(self.model.optimizer.iterations))))
elif 'lr' in config:
return config['lr']
评论列表
文章目录