def _forward_alg(self, feats):
init_alphas = torch.Tensor(self.tagset_size, 1).fill_(0.).type(self.dtype)
forward_var = autograd.Variable(init_alphas).type(self.dtype)
for ix,feat in enumerate(feats):
if ix == 0:
forward_var += feat.view(self.tagset_size,1) + self.initial_weights
else:
forward_var = feat.view(self.tagset_size,1) + log_sum_exp_mat( self.transitions + torch.transpose(forward_var.repeat(1, self.tagset_size), 0, 1), 1)
terminal_var = forward_var + self.final_weights
alpha = log_sum_exp_mat(terminal_var, 0 )
return alpha
评论列表
文章目录