def LSTMLayer(embed_matrix, embed_input, sequence_length, dropout_prob, hidden_dims, embedding_dim=300, lstm_dim=100):
model = Sequential()
model.add(Embedding(embed_input, embedding_dim, input_length=sequence_length, weights=[embed_matrix]))
model.add(Bidirectional(MGU(lstm_dim, return_sequences=True)))
#model.add(AttentionLayer(lstm_dim))
model.add(GlobalMaxPooling1D())
# 3. Hidden Layer
model.add(Dense(hidden_dims))
model.add(Dropout(dropout_prob[1]))
model.add(Activation('relu'))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='RMSprop', metrics=['accuracy'])
return model
评论列表
文章目录