def tokenize(self, language_tag, query):
id = self._next_id
self._next_id += 1
req = dict(req=id, utterance=query, languageTag=language_tag)
outer = Future()
self._requests[id] = outer
def then(future):
if future.exception():
outer.set_exception(future.exception())
del self._requests[id]
future = self._socket.write(json.dumps(req).encode())
future.add_done_callback(then)
return outer
tokenizer.py 文件源码
python
阅读 27
收藏 0
点赞 0
评论 0
评论列表
文章目录