def setup_model(embeddings, seq_len, vocab_size):
# Add input
inputs = Input(shape=(seq_len, ), dtype='int32', name='inputs')
# Add word vector embeddings
embedding = Embedding(input_dim=vocab_size, output_dim=embedding_size,
input_length=seq_len, name='embedding',
trainable=True)(inputs)
h = GlobalAveragePooling1D()(embedding)
# Add output layer
output = Dense(units=output_size,
activation='sigmoid',
kernel_initializer='he_normal',
# kernel_regularizer=regularizers.l2(l2_reg_lambda),
# kernel_constraint=maxnorm(max_norm),
# bias_constraint=maxnorm(max_norm),
name='output')(h)
# build the model
model = Model(inputs=inputs, outputs=output)
model.compile(loss={'output':'binary_crossentropy'},
optimizer=Adam(lr=base_lr, epsilon=1e-6, decay=decay_rate),
metrics=["accuracy"])
return model
评论列表
文章目录