def create_concat_model(self, emb_dim, emb_path, vocab_word,
vocab_word_size, word_maxlen, vocab_char_size,
char_maxlen):
from aes.layers import Conv1DMask, MaxPooling1DMask
from keras.layers import concatenate
logger.info('Building concatenation model')
input_char = Input(shape=(char_maxlen, ), name='input_char')
char_emb = Embedding(
vocab_char_size, emb_dim, mask_zero=True)(input_char)
char_cnn = Conv1DMask(
filters=emb_dim, kernel_size=3, padding='same')(char_emb)
char_input = MaxPooling1DMask(
pool_size=char_maxlen / word_maxlen, padding='same')(char_cnn)
input_word = Input(shape=(word_maxlen, ), name='input_word')
word_input = Embedding(
vocab_word_size, emb_dim, mask_zero=True,
name='word_emb')(input_word)
merged = concatenate([char_input, word_input], axis=1)
merged_dropped = Dropout(0.5)(merged)
final_input = Dense(50)(merged_dropped)
cnn = Conv1DMask(
filters=emb_dim, kernel_size=3, padding='same')(final_input)
dropped = Dropout(0.5)(cnn)
mot = MeanOverTime(mask_zero=True)(dropped)
densed = Dense(self.num_outputs, name='dense')(mot)
output = Activation('sigmoid')(densed)
model = Model(inputs=[input_char, input_word], outputs=output)
model.get_layer('dense').bias.set_value(self.bias)
if emb_path:
from emb_reader import EmbReader as EmbReader
logger.info('Initializing lookup table')
emb_reader = EmbReader(emb_path, emb_dim=emb_dim)
model.get_layer('word_emb').embeddings.set_value(
emb_reader.get_emb_matrix_given_vocab(
vocab_word,
model.get_layer('word_emb').embeddings.get_value()))
logger.info(' Done')
return model
评论列表
文章目录