def fit_cnn1(self, X33_train, Y_train, X33_unif_train, Y_unif_train):
# Create temp cnn with input shape=(4,33,33,)
input33 = Input(shape=(4, 33, 33))
output_cnn = self.one_block_model(input33)
output_cnn = Reshape((5,))(output_cnn)
# Cnn compiling
temp_cnn = Model(inputs=input33, outputs=output_cnn)
sgd = SGD(lr=self.learning_rate, momentum=self.momentum_rate, decay=self.decay_rate, nesterov=False)
temp_cnn.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
# Stop the training if the monitor function doesn't change after patience epochs
earlystopping = EarlyStopping(monitor='val_loss', patience=2, verbose=1, mode='auto')
# Save model after each epoch to check/bm_epoch#-val_loss
checkpointer = ModelCheckpoint(filepath="/home/ixb3/Scrivania/check/bm_{epoch:02d}-{val_loss:.2f}.hdf5", verbose=1)
# First-phase training with uniformly distribuited training set
temp_cnn.fit(x=X33_train, y=Y_train, batch_size=self.batch_size, epochs=self.nb_epoch,
callbacks=[earlystopping, checkpointer], validation_split=0.3, verbose=1)
# fix all the layers of the temporary cnn except the output layer for the second-phase
temp_cnn = self.freeze_model(temp_cnn, freeze_output=False)
# Second-phase training of the output layer with training set with real distribution probabily
temp_cnn.fit(x=X33_unif_train, y=Y_unif_train, batch_size=self.batch_size, epochs=self.nb_epoch,
callbacks=[earlystopping, checkpointer], validation_split=0.3, verbose=1)
# set the weights of the first cnn to the trained weights of the temporary cnn
self.cnn1.set_weights(temp_cnn.get_weights())
评论列表
文章目录