def __init__(self, config):
super(SelfAttentiveEncoder, self).__init__()
self.bilstm = BiLSTM(config)
self.drop = nn.Dropout(config['dropout'])
self.ws1 = nn.Linear(config['nhid'] * 2, config['attention-unit'], bias=False)
self.ws2 = nn.Linear(config['attention-unit'], config['attention-hops'], bias=False)
self.tanh = nn.Tanh()
self.softmax = nn.Softmax()
self.dictionary = config['dictionary']
# self.init_weights()
self.attention_hops = config['attention-hops']
models.py 文件源码
python
阅读 21
收藏 0
点赞 0
评论 0
评论列表
文章目录