def _get_encoder_layer(self):
if self.encoder_layer is None:
self.encoder_layer = OntoAttentionLSTM(input_dim=self.embed_dim, output_dim=self.embed_dim,
num_senses=self.num_senses, num_hyps=self.num_hyps,
use_attention=self.use_attention, consume_less="gpu",
return_sequences=self.return_sequences, name="onto_lstm")
if self.bidirectional:
self.encoder_layer = Bidirectional(self.encoder_layer, name="onto_lstm")
return self.encoder_layer
评论列表
文章目录