bpo-12486: Document tokenize.generate_tokens() as public API (#6957) · python/cpython@c56b17b (original) (raw)

Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
1 1 from test import support
2 2 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
3 3 STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
4 -open as tokenize_open, Untokenizer)
5 -from io import BytesIO
4 +open as tokenize_open, Untokenizer, generate_tokens)
5 +from io import BytesIO, StringIO
6 6 import unittest
7 7 from unittest import TestCase, mock
8 8 from test.test_grammar import (VALID_UNDERSCORE_LITERALS,
@@ -919,6 +919,19 @@ async def bar(): pass
919 919 DEDENT '' (7, 0) (7, 0)
920 920 """)
921 921
922 +class GenerateTokensTest(TokenizeTest):
923 +def check_tokenize(self, s, expected):
924 +# Format the tokens in s in a table format.
925 +# The ENDMARKER is omitted.
926 +result = []
927 +f = StringIO(s)
928 +for type, token, start, end, line in generate_tokens(f.readline):
929 +if type == ENDMARKER:
930 +break
931 +type = tok_name[type]
932 +result.append(f" {type:10} {token!r:13} {start} {end}")
933 +self.assertEqual(result, expected.rstrip().splitlines())
934 +
922 935
923 936 def decistmt(s):
924 937 result = []