def make_lr_scheduler(base_lr, decay_rate, epoch_rate):
def lr_schedule(epoch):
if epoch + 1 < epoch_rate:
lr = base_lr
else:
lr = base_lr / (decay_rate * np.floor(epoch + 1 / rate_epochs))
return lr
return keras.callbacks.LearningRateScheduler(lr_schedule)
评论列表
文章目录