def build_model(cat, loss):
print('Build model...')
model = Sequential()
# we start off with an efficient embedding layer which maps
# our vocab indices into embedding_dims dimensions
model.add(Embedding(max_features,
embedding_dims,
input_length=maxlen))
model.add(Dropout(0.5))
# we add a GlobalAveragePooling1D, which will average the embeddings
# of all words in the document
model.add(GlobalAveragePooling1D())
model.add(Dropout(0.5))
# We project onto a single unit output layer, and squash it with a sigmoid:
model.add(Dense(cat, activation='softmax'))
model.compile(loss=loss,
optimizer='adam',
metrics=['accuracy'])
return model
评论列表
文章目录