def __init__(self, word_index, embedding_matrix):
embedding_layer_c = Embedding(len(word_index) + 1,
EMBEDDING_DIM,
weights=[embedding_matrix],
input_length=MAX_SEQUENCE_LENGTH_C,
trainable=False)
embedding_layer_q = Embedding(len(word_index) + 1,
EMBEDDING_DIM,
weights=[embedding_matrix],
input_length=MAX_SEQUENCE_LENGTH_Q,
trainable=False)
embedding_layer_a = Embedding(len(word_index) + 1,
EMBEDDING_DIM,
weights=[embedding_matrix],
input_length=MAX_SEQUENCE_LENGTH_A,
trainable=False)
context = Input(shape=(MAX_SEQUENCE_LENGTH_C,), dtype='int32', name='context')
question = Input(shape=(MAX_SEQUENCE_LENGTH_Q,), dtype='int32', name='question')
answer = Input(shape=(MAX_SEQUENCE_LENGTH_A,), dtype='int32', name='answer')
embedded_context = embedding_layer_c(context)
embedded_question = embedding_layer_q(question)
embedded_answer = embedding_layer_a(answer)
l_lstm_c = Bidirectional(LSTM(60))(embedded_context)
l_lstm_q = Bidirectional(LSTM(60))(embedded_question)
l_lstm_a = Bidirectional(LSTM(60))(embedded_answer)
concat_c_q = concatenate([l_lstm_q, l_lstm_c], axis=1)
relu_c_q = Dense(100, activation='tanh')(concat_c_q)
concat_c_a = concatenate([l_lstm_a, l_lstm_c], axis=1)
relu_c_a = Dense(100, activation='tanh')(concat_c_a)
relu_c_q = Dropout(0.5)(relu_c_q)
relu_c_a = Dropout(0.5)(relu_c_a)
concat_c_q_a = merge([relu_c_a, relu_c_q], mode='cos')
softmax_c_q_a = Dense(2, activation='softmax')(concat_c_q_a)
self.model = Model([question, answer, context], softmax_c_q_a)
opt = Nadam()
self.model.compile(loss='categorical_crossentropy',
optimizer=opt,
metrics=['acc'])
评论列表
文章目录