def forward(self, x):
embed = self.embed(x)
embed = self.dropout(embed)
x = embed.view(len(x), embed.size(1), -1)
bilstm_out, self.hidden = self.bilstm(x, self.hidden)
# print("bbb {}".format(self.hidden[0]))
hidden = torch.cat(self.hidden, 0)
# print("ccc {}".format(hidden.size()))
hidden = torch.cat(hidden, 1)
# print("ddd {}".format(hidden.size()))
# bilstm_out = torch.transpose(bilstm_out, 0, 1)
# bilstm_out = torch.transpose(bilstm_out, 1, 2)
# print("aaa {}".format(bilstm_out.size()))
# bilstm_out = F.max_pool1d(bilstm_out, bilstm_out.size(2)).squeeze(2)
# bilstm_out = F.avg_pool1d(bilstm_out, bilstm_out.size(2)).squeeze(2)
# print("sss {}".format(bilstm_out.size()))
# print("Hidden {} ".format(hidden))
logit = self.hidden2label(F.tanh(hidden))
# print("Logit {} ".format(logit))
return logit
model_BiLSTM_lexicon.py 文件源码
python
阅读 19
收藏 0
点赞 0
评论 0
评论列表
文章目录