def __init__(self,output_dim,att_dim,attn_activation='tanh',
attn_inner_activation='tanh',
single_attn=False,**kwargs):
'''
attention_vec: ???????attention????????????????attention??
single_attention_param: ????t,??????????????attention?
'''
self.attn_activation=activations.get(attn_activation)
self.attn_inner_activation=activations.get(attn_inner_activation)
self.single_attention_param=single_attn
self.input_spec=None
self.att_dim=att_dim
super(AttentionLSTM,self).__init__(output_dim,**kwargs)
评论列表
文章目录