def fit(self, train_X, val_X, nb_epoch=50, batch_size=100, contractive=None):
optimizer = Adadelta(lr=2.)
# optimizer = Adam()
# optimizer = Adagrad()
if contractive:
print 'Using contractive loss, lambda: %s' % contractive
self.autoencoder.compile(optimizer=optimizer, loss=contractive_loss(self, contractive))
else:
print 'Using binary crossentropy'
self.autoencoder.compile(optimizer=optimizer, loss='binary_crossentropy') # kld, binary_crossentropy, mse
self.autoencoder.fit(train_X[0], train_X[1],
epochs=nb_epoch,
batch_size=batch_size,
shuffle=True,
validation_data=(val_X[0], val_X[1]),
callbacks=[
ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.01),
EarlyStopping(monitor='val_loss', min_delta=1e-5, patience=5, verbose=1, mode='auto'),
CustomModelCheckpoint(self.encoder, self.save_model, monitor='val_loss', save_best_only=True, mode='auto')
]
)
return self
评论列表
文章目录