def my_model(X_train, y_train, X_test, y_test):
############ model params ################
line_length = 248 # seq size
train_char = 58
hidden_neurons = 512 # hidden neurons
batch = 64 # batch_size
no_epochs = 3
################### Model ################
######### begin model ########
model = Sequential()
# layer 1
model.add(LSTM(hidden_neurons, return_sequences=True,
input_shape=(line_length, train_char)))
model.add(Dropout({{choice([0.4, 0.5, 0.6, 0.7, 0.8])}}))
# layer 2
model.add(LSTM(hidden_neurons, return_sequences=True))
model.add(Dropout({{choice([0.4, 0.5, 0.6, 0.7, 0.8])}}))
# layer 3
model.add(LSTM(hidden_neurons, return_sequences=True))
model.add(Dropout({{choice([0.4, 0.5, 0.6, 0.7, 0.8])}}))
# fc layer
model.add(TimeDistributed(Dense(train_char, activation='softmax')))
model.load_weights("weights/model_maha1_noep50_batch64_seq_248.hdf5")
########################################################################
checkpoint = ModelCheckpoint("weights/hypmodel2_maha1_noep{0}_batch{1}_seq_{2}.hdf5".format(
no_epochs, batch, line_length), monitor='val_loss', verbose=0, save_best_only=True, save_weights_only=False, mode='min')
initlr = 0.00114
adagrad = Adagrad(lr=initlr, epsilon=1e-08,
clipvalue={{choice([0, 1, 2, 3, 4, 5, 6, 7])}})
model.compile(optimizer=adagrad,
loss='categorical_crossentropy', metrics=['accuracy'])
history = History()
# fit model
model.fit(X_train, y_train, batch_size=batch, nb_epoch=no_epochs,
validation_split=0.2, callbacks=[history, checkpoint])
score, acc = model.evaluate(X_test, y_test, verbose=0)
print('Test accuracy:', acc)
return {'loss': -acc, 'status': STATUS_OK, 'model': model}
评论列表
文章目录