def tokenize(text, lexer=None):
"""
Split text into (token_type, token_text) pairs using the given lexer
When there is no lexer, it will split by words instead.
"""
if lexer is None:
return [(pygments.token.Text, word) for word in split_words(text)]
tokens = lexer.get_tokens(text)
tokens = group_tokens(tokens)
return tokens
working_with_tokens.py 文件源码
python
阅读 19
收藏 0
点赞 0
评论 0
评论列表
文章目录