def on_epoch_end(self, epoch, logs={}):
loss = logs.items()[1][1] # get loss
print "loss: ", loss
old_lr = self.model.optimizer.lr.get_value() # get old lr
new_lr = old_lr * np.exp(loss) # lr*exp(loss)
k.set_value(self.model.optimizer.lr, new_lr)
# decaylr=LearningRateScheduler(decay_sch)
# checkpoint=ModelCheckpoint("weights/adam_noep{0}_batch{1}_seq_{2}.hdf5".format(\
# no_epochs,batch, seq_length), monitor='loss', verbose=0,
# save_best_only=True, save_weights_only=False, mode='min')
评论列表
文章目录