def forward(self, x, lengths):
batch_size = x.size(0)
max_len = max(lengths)
emb = Variable(torch.from_numpy(
self.initial_embeddings.take(x.numpy(), 0)),
volatile=not self.training)
for t in range(max_len):
indices = []
for i, l in enumerate(lengths):
if l >= max(lengths) - t:
indices.append(i)
# Build batch.
dynamic_batch_size = len(indices)
inp = Variable(torch.FloatTensor(dynamic_batch_size, self.word_embedding_dim), volatile=not self.training)
h = Variable(torch.FloatTensor(dynamic_batch_size, self.model_dim), volatile=not self.training)
output = self.rnn(inp, h)
hn = output
h = F.relu(self.l0(F.dropout(hn.squeeze(), 0.5, self.training)))
h = F.relu(self.l1(F.dropout(h, 0.5, self.training)))
y = F.log_softmax(h)
return y
fakedynamic.py 文件源码
python
阅读 30
收藏 0
点赞 0
评论 0
评论列表
文章目录