def on_epoch_begin(self, epoch, logs=None):
super(MyLearningRateScheduler, self).on_epoch_begin(epoch, logs=logs)
if epoch > self.epoch_unfreeze:
for i, layer in enumerate(self.model.layers[1:]):
layer.trainable = i >= self.num_layers_to_freeze
else:
for layer in self.model.layers[1:-1]:
layer.trainable = False
self.model.layers[-1].trainable = True
if not self.recompiled_first or (not self.recompiled and epoch > self.epoch_unfreeze):
adam = keras.optimizers.Adam(lr=self.step_decay(epoch))
self.model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy',
custom_metrics.f2score_samples])
print self.model.summary()
if not self.recompiled_first:
self.recompiled_first = True
else:
self.recompiled = True
评论列表
文章目录