mirror of https://github.com/python/cpython
Issue 25422: Add tests for multi-line string tokenization. Also remove truncated tokens.
This commit is contained in:
parent
9b63868f77
commit
6731774216
|
@ -24,8 +24,7 @@ class TokenizeTest(TestCase):
|
|||
if type == ENDMARKER:
|
||||
break
|
||||
type = tok_name[type]
|
||||
result.append(" %(type)-10.10s %(token)-13.13r %(start)s %(end)s" %
|
||||
locals())
|
||||
result.append(f" {type:10} {token!r:13} {start} {end}")
|
||||
self.assertEqual(result,
|
||||
[" ENCODING 'utf-8' (0, 0) (0, 0)"] +
|
||||
expected.rstrip().splitlines())
|
||||
|
@ -132,18 +131,18 @@ def k(x):
|
|||
self.check_tokenize("x = 0xfffffffffff", """\
|
||||
NAME 'x' (1, 0) (1, 1)
|
||||
OP '=' (1, 2) (1, 3)
|
||||
NUMBER '0xffffffffff (1, 4) (1, 17)
|
||||
NUMBER '0xfffffffffff' (1, 4) (1, 17)
|
||||
""")
|
||||
self.check_tokenize("x = 123141242151251616110", """\
|
||||
NAME 'x' (1, 0) (1, 1)
|
||||
OP '=' (1, 2) (1, 3)
|
||||
NUMBER '123141242151 (1, 4) (1, 25)
|
||||
NUMBER '123141242151251616110' (1, 4) (1, 25)
|
||||
""")
|
||||
self.check_tokenize("x = -15921590215012591", """\
|
||||
NAME 'x' (1, 0) (1, 1)
|
||||
OP '=' (1, 2) (1, 3)
|
||||
OP '-' (1, 4) (1, 5)
|
||||
NUMBER '159215902150 (1, 5) (1, 22)
|
||||
NUMBER '15921590215012591' (1, 5) (1, 22)
|
||||
""")
|
||||
|
||||
def test_float(self):
|
||||
|
@ -306,6 +305,33 @@ def k(x):
|
|||
STRING 'Rb"abc"' (1, 20) (1, 27)
|
||||
OP '+' (1, 28) (1, 29)
|
||||
STRING 'RB"abc"' (1, 30) (1, 37)
|
||||
""")
|
||||
# Check 0, 1, and 2 character string prefixes.
|
||||
self.check_tokenize(r'"a\
|
||||
de\
|
||||
fg"', """\
|
||||
STRING '"a\\\\\\nde\\\\\\nfg"\' (1, 0) (3, 3)
|
||||
""")
|
||||
self.check_tokenize(r'u"a\
|
||||
de"', """\
|
||||
STRING 'u"a\\\\\\nde"\' (1, 0) (2, 3)
|
||||
""")
|
||||
self.check_tokenize(r'rb"a\
|
||||
d"', """\
|
||||
STRING 'rb"a\\\\\\nd"\' (1, 0) (2, 2)
|
||||
""")
|
||||
self.check_tokenize(r'"""a\
|
||||
b"""', """\
|
||||
STRING '\"\""a\\\\\\nb\"\""' (1, 0) (2, 4)
|
||||
""")
|
||||
self.check_tokenize(r'u"""a\
|
||||
b"""', """\
|
||||
STRING 'u\"\""a\\\\\\nb\"\""' (1, 0) (2, 4)
|
||||
""")
|
||||
self.check_tokenize(r'rb"""a\
|
||||
b\
|
||||
c"""', """\
|
||||
STRING 'rb"\""a\\\\\\nb\\\\\\nc"\""' (1, 0) (3, 4)
|
||||
""")
|
||||
|
||||
def test_function(self):
|
||||
|
@ -505,7 +531,7 @@ def k(x):
|
|||
# Methods
|
||||
self.check_tokenize("@staticmethod\ndef foo(x,y): pass", """\
|
||||
OP '@' (1, 0) (1, 1)
|
||||
NAME 'staticmethod (1, 1) (1, 13)
|
||||
NAME 'staticmethod' (1, 1) (1, 13)
|
||||
NEWLINE '\\n' (1, 13) (1, 14)
|
||||
NAME 'def' (2, 0) (2, 3)
|
||||
NAME 'foo' (2, 4) (2, 7)
|
||||
|
|
Loading…
Reference in New Issue