Issue2495: tokenize.untokenize did not insert space between two consecutive string literals:
"" "" => """", which is invalid code. Will backport
This commit is contained in:
parent
853e44ca8c
commit
da0c025a43
|
@ -487,13 +487,18 @@ Backslash means line continuation, except for comments
|
||||||
>>> roundtrip("# Comment \\\\nx = 0")
|
>>> roundtrip("# Comment \\\\nx = 0")
|
||||||
True
|
True
|
||||||
|
|
||||||
|
Two string literals on the same line
|
||||||
|
|
||||||
|
>>> roundtrip("'' ''")
|
||||||
|
True
|
||||||
|
|
||||||
|
Test roundtrip on random python modules.
|
||||||
|
pass the '-ucompiler' option to process the full directory.
|
||||||
|
|
||||||
>>>
|
>>>
|
||||||
>>> tempdir = os.path.dirname(f) or os.curdir
|
>>> tempdir = os.path.dirname(f) or os.curdir
|
||||||
>>> testfiles = glob.glob(os.path.join(tempdir, "test*.py"))
|
>>> testfiles = glob.glob(os.path.join(tempdir, "test*.py"))
|
||||||
|
|
||||||
XXX: tokenize doesn not support __future__.unicode_literals yet
|
|
||||||
>>> blacklist = ("test_future4.py",)
|
|
||||||
>>> testfiles = [f for f in testfiles if not f.endswith(blacklist)]
|
|
||||||
>>> if not test_support.is_resource_enabled("compiler"):
|
>>> if not test_support.is_resource_enabled("compiler"):
|
||||||
... testfiles = random.sample(testfiles, 10)
|
... testfiles = random.sample(testfiles, 10)
|
||||||
...
|
...
|
||||||
|
|
|
@ -210,12 +210,21 @@ class Untokenizer:
|
||||||
tokval += ' '
|
tokval += ' '
|
||||||
if toknum in (NEWLINE, NL):
|
if toknum in (NEWLINE, NL):
|
||||||
startline = True
|
startline = True
|
||||||
|
prevstring = False
|
||||||
for tok in iterable:
|
for tok in iterable:
|
||||||
toknum, tokval = tok[:2]
|
toknum, tokval = tok[:2]
|
||||||
|
|
||||||
if toknum in (NAME, NUMBER):
|
if toknum in (NAME, NUMBER):
|
||||||
tokval += ' '
|
tokval += ' '
|
||||||
|
|
||||||
|
# Insert a space between two consecutive strings
|
||||||
|
if toknum == STRING:
|
||||||
|
if prevstring:
|
||||||
|
tokval = ' ' + tokval
|
||||||
|
prevstring = True
|
||||||
|
else:
|
||||||
|
prevstring = False
|
||||||
|
|
||||||
if toknum == INDENT:
|
if toknum == INDENT:
|
||||||
indents.append(tokval)
|
indents.append(tokval)
|
||||||
continue
|
continue
|
||||||
|
@ -244,7 +253,7 @@ def untokenize(iterable):
|
||||||
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
|
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
|
||||||
newcode = untokenize(t1)
|
newcode = untokenize(t1)
|
||||||
readline = iter(newcode.splitlines(1)).next
|
readline = iter(newcode.splitlines(1)).next
|
||||||
t2 = [tok[:2] for tokin generate_tokens(readline)]
|
t2 = [tok[:2] for tok in generate_tokens(readline)]
|
||||||
assert t1 == t2
|
assert t1 == t2
|
||||||
"""
|
"""
|
||||||
ut = Untokenizer()
|
ut = Untokenizer()
|
||||||
|
|
|
@ -76,6 +76,10 @@ Extensions Modules
|
||||||
Library
|
Library
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
- Issue #2495: tokenize.untokenize now inserts a space between two consecutive
|
||||||
|
string literals; previously, ["" ""] was rendered as [""""], which is
|
||||||
|
incorrect python code.
|
||||||
|
|
||||||
- Issue #2248: return the result of the QUIT command. from SMTP.quit().
|
- Issue #2248: return the result of the QUIT command. from SMTP.quit().
|
||||||
|
|
||||||
- Backport of Python 3.0's io module.
|
- Backport of Python 3.0's io module.
|
||||||
|
|
Loading…
Reference in New Issue