def __init__(self, num_senses, num_hyps, use_attention=False, return_attention=False, **kwargs):
assert "output_dim" in kwargs
output_dim = kwargs.pop("output_dim")
super(OntoAttentionNSE, self).__init__(output_dim, **kwargs)
self.input_spec = [InputSpec(ndim=5)]
# TODO: Define an attention output method that rebuilds the reader.
self.return_attention = return_attention
self.reader = OntoAttentionLSTM(self.output_dim, num_senses, num_hyps, use_attention=use_attention,
consume_less='gpu', return_attention=False)
评论列表
文章目录