def __init__(self):
super(Chunking, self).__init__()
self.input_size = embedding_size \
+ nb_postags \
+ postag_hn_size * 2
self.w = nn.Parameter(torch.randn(chunking_nb_layers * 2,
max_sentence_size,
chunking_hn_size))
self.h = nn.Parameter(torch.randn(chunking_nb_layers * 2,
max_sentence_size,
chunking_hn_size))
self.embedding = nn.Embedding(nb_postags, chunking_postag_emb_size)
self.aux_emb = torch.arange(0, nb_postags)
self.aux_emb = Variable(self.aux_emb).long()
self.bi_lstm = nn.LSTM(self.input_size,
chunking_hn_size,
chunking_nb_layers,
bidirectional=True)
self.fc = nn.Linear(chunking_hn_size * 2, nb_chunktags)
chunking.py 文件源码
python
阅读 26
收藏 0
点赞 0
评论 0
评论列表
文章目录