def from_sentence(sent):
tokens = nltk.word_tokenize(sent)
tagged = nltk.pos_tag(tokens)
dg = DependencyGraph()
for (index, (word, tag)) in enumerate(tagged):
dg.nodes[index + 1] = {
'word': word,
'lemma': '_',
'ctag': tag,
'tag': tag,
'feats': '_',
'rel': '_',
'deps': defaultdict(),
'head': '_',
'address': index + 1,
}
dg.connect_graph()
return dg
dependencygraph.py 文件源码
python
阅读 21
收藏 0
点赞 0
评论 0
评论列表
文章目录