python类test_tokenize()的实例源码

test_tokenize.py 文件源码 项目:zippy 作者: securesystemslab 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def test_tokenize(self):
        import tokenize as tokenize_module
        encoding = object()
        encoding_used = None
        def mock_detect_encoding(readline):
            return encoding, ['first', 'second']

        def mock__tokenize(readline, encoding):
            nonlocal encoding_used
            encoding_used = encoding
            out = []
            while True:
                next_line = readline()
                if next_line:
                    out.append(next_line)
                    continue
                return out

        counter = 0
        def mock_readline():
            nonlocal counter
            counter += 1
            if counter == 5:
                return b''
            return counter

        orig_detect_encoding = tokenize_module.detect_encoding
        orig__tokenize = tokenize_module._tokenize
        tokenize_module.detect_encoding = mock_detect_encoding
        tokenize_module._tokenize = mock__tokenize
        try:
            results = tokenize(mock_readline)
            self.assertEqual(list(results), ['first', 'second', 1, 2, 3, 4])
        finally:
            tokenize_module.detect_encoding = orig_detect_encoding
            tokenize_module._tokenize = orig__tokenize

        self.assertTrue(encoding_used, encoding)
test_tokenize.py 文件源码 项目:zippy 作者: securesystemslab 项目源码 文件源码 阅读 16 收藏 0 点赞 0 评论 0
def test_main():
    from test import test_tokenize
    support.run_doctest(test_tokenize, True)
    support.run_unittest(TestTokenizerAdheresToPep0263)
    support.run_unittest(Test_Tokenize)
    support.run_unittest(TestDetectEncoding)
    support.run_unittest(TestTokenize)
test_tokenize.py 文件源码 项目:web_ctp 作者: molebot 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def test_tokenize(self):
        import tokenize as tokenize_module
        encoding = object()
        encoding_used = None
        def mock_detect_encoding(readline):
            return encoding, ['first', 'second']

        def mock__tokenize(readline, encoding):
            nonlocal encoding_used
            encoding_used = encoding
            out = []
            while True:
                next_line = readline()
                if next_line:
                    out.append(next_line)
                    continue
                return out

        counter = 0
        def mock_readline():
            nonlocal counter
            counter += 1
            if counter == 5:
                return b''
            return counter

        orig_detect_encoding = tokenize_module.detect_encoding
        orig__tokenize = tokenize_module._tokenize
        tokenize_module.detect_encoding = mock_detect_encoding
        tokenize_module._tokenize = mock__tokenize
        try:
            results = tokenize(mock_readline)
            self.assertEqual(list(results), ['first', 'second', 1, 2, 3, 4])
        finally:
            tokenize_module.detect_encoding = orig_detect_encoding
            tokenize_module._tokenize = orig__tokenize

        self.assertTrue(encoding_used, encoding)
test_tokenize.py 文件源码 项目:web_ctp 作者: molebot 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def test_main():
    from test import test_tokenize
    support.run_doctest(test_tokenize, True)
    support.run_unittest(TestTokenizerAdheresToPep0263)
    support.run_unittest(Test_Tokenize)
    support.run_unittest(TestDetectEncoding)
    support.run_unittest(TestTokenize)
test_tokenize.py 文件源码 项目:pefile.pypy 作者: cloudtracer 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def test_main():
    from test import test_tokenize
    test_support.run_doctest(test_tokenize, True)
    test_support.run_unittest(UntokenizeTest)
test_tokenize.py 文件源码 项目:ndk-python 作者: gittor 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def test_main():
    from test import test_tokenize
    test_support.run_doctest(test_tokenize, True)
test_tokenize.py 文件源码 项目:kbe_server 作者: xiaohaoppy 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def test_tokenize(self):
        import tokenize as tokenize_module
        encoding = object()
        encoding_used = None
        def mock_detect_encoding(readline):
            return encoding, ['first', 'second']

        def mock__tokenize(readline, encoding):
            nonlocal encoding_used
            encoding_used = encoding
            out = []
            while True:
                next_line = readline()
                if next_line:
                    out.append(next_line)
                    continue
                return out

        counter = 0
        def mock_readline():
            nonlocal counter
            counter += 1
            if counter == 5:
                return b''
            return counter

        orig_detect_encoding = tokenize_module.detect_encoding
        orig__tokenize = tokenize_module._tokenize
        tokenize_module.detect_encoding = mock_detect_encoding
        tokenize_module._tokenize = mock__tokenize
        try:
            results = tokenize(mock_readline)
            self.assertEqual(list(results), ['first', 'second', 1, 2, 3, 4])
        finally:
            tokenize_module.detect_encoding = orig_detect_encoding
            tokenize_module._tokenize = orig__tokenize

        self.assertTrue(encoding_used, encoding)
test_tokenize.py 文件源码 项目:kbe_server 作者: xiaohaoppy 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def test_main():
    from test import test_tokenize
    support.run_doctest(test_tokenize, True)
    support.run_unittest(TestTokenizerAdheresToPep0263)
    support.run_unittest(Test_Tokenize)
    support.run_unittest(TestDetectEncoding)
    support.run_unittest(TestTokenize)
    support.run_unittest(UntokenizeTest)


问题


面经


文章

微信
公众号

扫码关注公众号