def __init__(self,
src_vcb_num,
trg_vcb_num,
dim_emb,
dim_hid,
attention_type='dot'):
super().__init__(src_vcb_num,
trg_vcb_num,
dim_emb,
dim_hid)
self.add_link('w_c', L.Linear(2*dim_hid, dim_hid))
atten_components = get_attention_components(attention_type, dim_hid)
for k, v in atten_components.items():
self.add_link(k, v)
self.attention_type = attention_type
评论列表
文章目录