def build_model(self, x):
rnn = self.rnn_class(self.encoder_dims, return_sequences=True, **self.rnn_kwargs)
if self.bidirectional:
word_activations = Bidirectional(rnn)(x)
else:
word_activations = rnn(x)
attention_layer = AttentionLayer()
doc_vector = attention_layer(word_activations)
self.attention_tensor = attention_layer.get_attention_tensor()
return doc_vector
评论列表
文章目录