def token(self):
"""
Returns the next LexToken. Returns None when all tokens have been
exhausted.
"""
if self.tokens_queue:
self.last_token = self.tokens_queue.pop(0)
else:
r = self.lex.token()
if isinstance(r, MultiToken):
self.tokens_queue.extend(r.tokens)
self.last_token = self.tokens_queue.pop(0)
else:
if r is None and self.cur_indent > 0:
if self.last_token and self.last_token.type not in ('NEWLINE', 'LINE'):
newline_token = self._create_token('NEWLINE', '\n', self.lex.lineno,
self.lex.lexpos)
self.tokens_queue.append(newline_token)
dedent_count = self.cur_indent // 4
dedent_token = self._create_token('DEDENT', '\t', self.lex.lineno,
self.lex.lexpos)
self.tokens_queue.extend([dedent_token] * dedent_count)
self.cur_indent = 0
self.last_token = self.tokens_queue.pop(0)
else:
self.last_token = r
return self.last_token
评论列表
文章目录