2008-03-19 02:04:44 -03:00
|
|
|
|
"""Test suite for 2to3's parser and grammar files.
|
|
|
|
|
|
|
|
|
|
This is the place to add tests for changes to 2to3's grammar, such as those
|
|
|
|
|
merging the grammars for Python 2 and 3. In addition to specific tests for
|
|
|
|
|
parts of the grammar we've changed, we also make sure we can parse the
|
|
|
|
|
test_grammar.py files from both Python 2 and Python 3.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# Testing imports
|
|
|
|
|
from . import support
|
2018-01-30 03:13:03 -04:00
|
|
|
|
from .support import driver, driver_no_print_statement
|
2008-03-19 02:04:44 -03:00
|
|
|
|
|
|
|
|
|
# Python imports
|
2018-01-30 13:03:12 -04:00
|
|
|
|
import difflib
|
2017-12-22 16:18:33 -04:00
|
|
|
|
import importlib
|
|
|
|
|
import operator
|
2008-03-19 02:04:44 -03:00
|
|
|
|
import os
|
2017-12-22 16:18:33 -04:00
|
|
|
|
import pickle
|
2016-09-07 21:40:07 -03:00
|
|
|
|
import shutil
|
|
|
|
|
import subprocess
|
2012-02-27 14:55:36 -04:00
|
|
|
|
import sys
|
2016-09-07 21:40:07 -03:00
|
|
|
|
import tempfile
|
2011-10-07 15:44:49 -03:00
|
|
|
|
import unittest
|
2008-03-19 02:04:44 -03:00
|
|
|
|
|
|
|
|
|
# Local imports
|
2016-09-07 21:40:07 -03:00
|
|
|
|
from lib2to3.pgen2 import driver as pgen2_driver
|
2009-05-09 16:42:23 -03:00
|
|
|
|
from lib2to3.pgen2 import tokenize
|
2008-03-19 02:04:44 -03:00
|
|
|
|
from ..pgen2.parse import ParseError
|
2011-10-07 15:44:49 -03:00
|
|
|
|
from lib2to3.pygram import python_symbols as syms
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestDriver(support.TestCase):
|
|
|
|
|
|
|
|
|
|
def test_formfeed(self):
|
|
|
|
|
s = """print 1\n\x0Cprint 2\n"""
|
|
|
|
|
t = driver.parse_string(s)
|
|
|
|
|
self.assertEqual(t.children[0].children[0].type, syms.print_stmt)
|
|
|
|
|
self.assertEqual(t.children[1].children[0].type, syms.print_stmt)
|
2008-03-19 02:04:44 -03:00
|
|
|
|
|
|
|
|
|
|
2016-09-07 21:40:07 -03:00
|
|
|
|
class TestPgen2Caching(support.TestCase):
|
|
|
|
|
def test_load_grammar_from_txt_file(self):
|
|
|
|
|
pgen2_driver.load_grammar(support.grammar_path, save=False, force=True)
|
|
|
|
|
|
|
|
|
|
def test_load_grammar_from_pickle(self):
|
|
|
|
|
# Make a copy of the grammar file in a temp directory we are
|
|
|
|
|
# guaranteed to be able to write to.
|
|
|
|
|
tmpdir = tempfile.mkdtemp()
|
|
|
|
|
try:
|
|
|
|
|
grammar_copy = os.path.join(
|
|
|
|
|
tmpdir, os.path.basename(support.grammar_path))
|
|
|
|
|
shutil.copy(support.grammar_path, grammar_copy)
|
|
|
|
|
pickle_name = pgen2_driver._generate_pickle_name(grammar_copy)
|
|
|
|
|
|
|
|
|
|
pgen2_driver.load_grammar(grammar_copy, save=True, force=True)
|
|
|
|
|
self.assertTrue(os.path.exists(pickle_name))
|
|
|
|
|
|
|
|
|
|
os.unlink(grammar_copy) # Only the pickle remains...
|
|
|
|
|
pgen2_driver.load_grammar(grammar_copy, save=False, force=False)
|
|
|
|
|
finally:
|
|
|
|
|
shutil.rmtree(tmpdir)
|
|
|
|
|
|
|
|
|
|
@unittest.skipIf(sys.executable is None, 'sys.executable required')
|
|
|
|
|
def test_load_grammar_from_subprocess(self):
|
|
|
|
|
tmpdir = tempfile.mkdtemp()
|
|
|
|
|
tmpsubdir = os.path.join(tmpdir, 'subdir')
|
|
|
|
|
try:
|
|
|
|
|
os.mkdir(tmpsubdir)
|
|
|
|
|
grammar_base = os.path.basename(support.grammar_path)
|
|
|
|
|
grammar_copy = os.path.join(tmpdir, grammar_base)
|
|
|
|
|
grammar_sub_copy = os.path.join(tmpsubdir, grammar_base)
|
|
|
|
|
shutil.copy(support.grammar_path, grammar_copy)
|
|
|
|
|
shutil.copy(support.grammar_path, grammar_sub_copy)
|
|
|
|
|
pickle_name = pgen2_driver._generate_pickle_name(grammar_copy)
|
|
|
|
|
pickle_sub_name = pgen2_driver._generate_pickle_name(
|
|
|
|
|
grammar_sub_copy)
|
|
|
|
|
self.assertNotEqual(pickle_name, pickle_sub_name)
|
|
|
|
|
|
|
|
|
|
# Generate a pickle file from this process.
|
|
|
|
|
pgen2_driver.load_grammar(grammar_copy, save=True, force=True)
|
|
|
|
|
self.assertTrue(os.path.exists(pickle_name))
|
|
|
|
|
|
|
|
|
|
# Generate a new pickle file in a subprocess with a most likely
|
|
|
|
|
# different hash randomization seed.
|
|
|
|
|
sub_env = dict(os.environ)
|
|
|
|
|
sub_env['PYTHONHASHSEED'] = 'random'
|
|
|
|
|
subprocess.check_call(
|
|
|
|
|
[sys.executable, '-c', """
|
|
|
|
|
from lib2to3.pgen2 import driver as pgen2_driver
|
|
|
|
|
pgen2_driver.load_grammar(%r, save=True, force=True)
|
|
|
|
|
""" % (grammar_sub_copy,)],
|
|
|
|
|
env=sub_env)
|
|
|
|
|
self.assertTrue(os.path.exists(pickle_sub_name))
|
|
|
|
|
|
|
|
|
|
with open(pickle_name, 'rb') as pickle_f_1, \
|
|
|
|
|
open(pickle_sub_name, 'rb') as pickle_f_2:
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
pickle_f_1.read(), pickle_f_2.read(),
|
|
|
|
|
msg='Grammar caches generated using different hash seeds'
|
|
|
|
|
' were not identical.')
|
|
|
|
|
finally:
|
|
|
|
|
shutil.rmtree(tmpdir)
|
|
|
|
|
|
2017-12-22 16:18:33 -04:00
|
|
|
|
def test_load_packaged_grammar(self):
|
|
|
|
|
modname = __name__ + '.load_test'
|
|
|
|
|
class MyLoader:
|
|
|
|
|
def get_data(self, where):
|
|
|
|
|
return pickle.dumps({'elephant': 19})
|
|
|
|
|
class MyModule:
|
|
|
|
|
__file__ = 'parsertestmodule'
|
|
|
|
|
__spec__ = importlib.util.spec_from_loader(modname, MyLoader())
|
|
|
|
|
sys.modules[modname] = MyModule()
|
|
|
|
|
self.addCleanup(operator.delitem, sys.modules, modname)
|
|
|
|
|
g = pgen2_driver.load_packaged_grammar(modname, 'Grammar.txt')
|
|
|
|
|
self.assertEqual(g.elephant, 19)
|
2016-09-07 21:40:07 -03:00
|
|
|
|
|
|
|
|
|
|
2008-03-19 02:04:44 -03:00
|
|
|
|
class GrammarTest(support.TestCase):
|
|
|
|
|
def validate(self, code):
|
|
|
|
|
support.parse_string(code)
|
|
|
|
|
|
|
|
|
|
def invalid_syntax(self, code):
|
|
|
|
|
try:
|
|
|
|
|
self.validate(code)
|
|
|
|
|
except ParseError:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
raise AssertionError("Syntax shouldn't have been valid")
|
|
|
|
|
|
|
|
|
|
|
2014-04-10 01:12:47 -03:00
|
|
|
|
class TestMatrixMultiplication(GrammarTest):
|
|
|
|
|
def test_matrix_multiplication_operator(self):
|
|
|
|
|
self.validate("a @ b")
|
|
|
|
|
self.validate("a @= b")
|
|
|
|
|
|
|
|
|
|
|
2014-04-10 01:23:18 -03:00
|
|
|
|
class TestYieldFrom(GrammarTest):
|
2015-05-11 23:57:16 -03:00
|
|
|
|
def test_yield_from(self):
|
2014-04-10 01:23:18 -03:00
|
|
|
|
self.validate("yield from x")
|
|
|
|
|
self.validate("(yield from x) + y")
|
|
|
|
|
self.invalid_syntax("yield from")
|
|
|
|
|
|
|
|
|
|
|
2015-05-11 23:57:16 -03:00
|
|
|
|
class TestAsyncAwait(GrammarTest):
|
|
|
|
|
def test_await_expr(self):
|
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
await x
|
|
|
|
|
""")
|
|
|
|
|
|
2016-09-09 14:36:01 -03:00
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
[i async for i in b]
|
|
|
|
|
""")
|
|
|
|
|
|
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
{i for i in b
|
|
|
|
|
async for i in a if await i
|
|
|
|
|
for b in i}
|
|
|
|
|
""")
|
|
|
|
|
|
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
[await i for i in b if await c]
|
|
|
|
|
""")
|
|
|
|
|
|
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
[ i for i in b if c]
|
|
|
|
|
""")
|
|
|
|
|
|
2015-07-23 09:01:58 -03:00
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
|
|
|
|
|
def foo(): pass
|
|
|
|
|
|
|
|
|
|
def foo(): pass
|
|
|
|
|
|
|
|
|
|
await x
|
|
|
|
|
""")
|
|
|
|
|
|
|
|
|
|
self.validate("""async def foo(): return await a""")
|
|
|
|
|
|
|
|
|
|
self.validate("""def foo():
|
|
|
|
|
def foo(): pass
|
|
|
|
|
async def foo(): await x
|
|
|
|
|
""")
|
|
|
|
|
|
2018-03-18 13:54:33 -03:00
|
|
|
|
self.invalid_syntax("await x")
|
|
|
|
|
self.invalid_syntax("""def foo():
|
|
|
|
|
await x""")
|
2015-05-11 23:57:16 -03:00
|
|
|
|
|
2018-03-18 13:54:33 -03:00
|
|
|
|
self.invalid_syntax("""def foo():
|
2015-07-23 09:01:58 -03:00
|
|
|
|
def foo(): pass
|
|
|
|
|
async def foo(): pass
|
|
|
|
|
await x
|
|
|
|
|
""")
|
|
|
|
|
|
2015-05-11 23:57:16 -03:00
|
|
|
|
def test_async_var(self):
|
2018-03-18 13:54:33 -03:00
|
|
|
|
self.validate("""async = 1""")
|
|
|
|
|
self.validate("""await = 1""")
|
|
|
|
|
self.validate("""def async(): pass""")
|
2015-05-11 23:57:16 -03:00
|
|
|
|
|
|
|
|
|
def test_async_with(self):
|
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
async for a in b: pass""")
|
|
|
|
|
|
2018-03-18 13:54:33 -03:00
|
|
|
|
self.invalid_syntax("""def foo():
|
|
|
|
|
async for a in b: pass""")
|
2015-05-11 23:57:16 -03:00
|
|
|
|
|
|
|
|
|
def test_async_for(self):
|
|
|
|
|
self.validate("""async def foo():
|
|
|
|
|
async with a: pass""")
|
|
|
|
|
|
2018-03-18 13:54:33 -03:00
|
|
|
|
self.invalid_syntax("""def foo():
|
|
|
|
|
async with a: pass""")
|
2015-05-11 23:57:16 -03:00
|
|
|
|
|
|
|
|
|
|
2008-03-19 02:04:44 -03:00
|
|
|
|
class TestRaiseChanges(GrammarTest):
|
|
|
|
|
def test_2x_style_1(self):
|
|
|
|
|
self.validate("raise")
|
|
|
|
|
|
|
|
|
|
def test_2x_style_2(self):
|
|
|
|
|
self.validate("raise E, V")
|
|
|
|
|
|
|
|
|
|
def test_2x_style_3(self):
|
|
|
|
|
self.validate("raise E, V, T")
|
|
|
|
|
|
|
|
|
|
def test_2x_style_invalid_1(self):
|
|
|
|
|
self.invalid_syntax("raise E, V, T, Z")
|
|
|
|
|
|
|
|
|
|
def test_3x_style(self):
|
|
|
|
|
self.validate("raise E1 from E2")
|
|
|
|
|
|
|
|
|
|
def test_3x_style_invalid_1(self):
|
|
|
|
|
self.invalid_syntax("raise E, V from E1")
|
|
|
|
|
|
|
|
|
|
def test_3x_style_invalid_2(self):
|
|
|
|
|
self.invalid_syntax("raise E from E1, E2")
|
|
|
|
|
|
|
|
|
|
def test_3x_style_invalid_3(self):
|
|
|
|
|
self.invalid_syntax("raise from E1, E2")
|
|
|
|
|
|
|
|
|
|
def test_3x_style_invalid_4(self):
|
|
|
|
|
self.invalid_syntax("raise E from")
|
|
|
|
|
|
|
|
|
|
|
2016-09-09 22:18:52 -03:00
|
|
|
|
# Modelled after Lib/test/test_grammar.py:TokenTests.test_funcdef issue2292
|
|
|
|
|
# and Lib/test/text_parser.py test_list_displays, test_set_displays,
|
|
|
|
|
# test_dict_displays, test_argument_unpacking, ... changes.
|
|
|
|
|
class TestUnpackingGeneralizations(GrammarTest):
|
|
|
|
|
def test_mid_positional_star(self):
|
|
|
|
|
self.validate("""func(1, *(2, 3), 4)""")
|
|
|
|
|
|
|
|
|
|
def test_double_star_dict_literal(self):
|
|
|
|
|
self.validate("""func(**{'eggs':'scrambled', 'spam':'fried'})""")
|
|
|
|
|
|
|
|
|
|
def test_double_star_dict_literal_after_keywords(self):
|
|
|
|
|
self.validate("""func(spam='fried', **{'eggs':'scrambled'})""")
|
|
|
|
|
|
2019-10-24 03:19:07 -03:00
|
|
|
|
def test_double_star_expression(self):
|
|
|
|
|
self.validate("""func(**{'a':2} or {})""")
|
|
|
|
|
self.validate("""func(**() or {})""")
|
|
|
|
|
|
|
|
|
|
def test_star_expression(self):
|
|
|
|
|
self.validate("""func(*[] or [2])""")
|
|
|
|
|
|
2016-09-09 22:18:52 -03:00
|
|
|
|
def test_list_display(self):
|
|
|
|
|
self.validate("""[*{2}, 3, *[4]]""")
|
|
|
|
|
|
|
|
|
|
def test_set_display(self):
|
|
|
|
|
self.validate("""{*{2}, 3, *[4]}""")
|
|
|
|
|
|
|
|
|
|
def test_dict_display_1(self):
|
|
|
|
|
self.validate("""{**{}}""")
|
|
|
|
|
|
|
|
|
|
def test_dict_display_2(self):
|
|
|
|
|
self.validate("""{**{}, 3:4, **{5:6, 7:8}}""")
|
|
|
|
|
|
2020-12-14 13:10:10 -04:00
|
|
|
|
def test_complex_star_expression(self):
|
|
|
|
|
self.validate("func(* [] or [1])")
|
|
|
|
|
|
|
|
|
|
def test_complex_double_star_expression(self):
|
|
|
|
|
self.validate("func(**{1: 3} if False else {x: x for x in range(3)})")
|
|
|
|
|
|
2016-09-09 22:18:52 -03:00
|
|
|
|
def test_argument_unpacking_1(self):
|
|
|
|
|
self.validate("""f(a, *b, *c, d)""")
|
|
|
|
|
|
|
|
|
|
def test_argument_unpacking_2(self):
|
|
|
|
|
self.validate("""f(**a, **b)""")
|
|
|
|
|
|
|
|
|
|
def test_argument_unpacking_3(self):
|
|
|
|
|
self.validate("""f(2, *a, *b, **b, **c, **d)""")
|
|
|
|
|
|
2018-03-13 14:53:22 -03:00
|
|
|
|
def test_trailing_commas_1(self):
|
|
|
|
|
self.validate("def f(a, b): call(a, b)")
|
|
|
|
|
self.validate("def f(a, b,): call(a, b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_2(self):
|
|
|
|
|
self.validate("def f(a, *b): call(a, *b)")
|
|
|
|
|
self.validate("def f(a, *b,): call(a, *b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_3(self):
|
|
|
|
|
self.validate("def f(a, b=1): call(a, b=1)")
|
|
|
|
|
self.validate("def f(a, b=1,): call(a, b=1,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_4(self):
|
|
|
|
|
self.validate("def f(a, **b): call(a, **b)")
|
|
|
|
|
self.validate("def f(a, **b,): call(a, **b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_5(self):
|
|
|
|
|
self.validate("def f(*a, b=1): call(*a, b=1)")
|
|
|
|
|
self.validate("def f(*a, b=1,): call(*a, b=1,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_6(self):
|
|
|
|
|
self.validate("def f(*a, **b): call(*a, **b)")
|
|
|
|
|
self.validate("def f(*a, **b,): call(*a, **b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_7(self):
|
|
|
|
|
self.validate("def f(*, b=1): call(*b)")
|
|
|
|
|
self.validate("def f(*, b=1,): call(*b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_8(self):
|
|
|
|
|
self.validate("def f(a=1, b=2): call(a=1, b=2)")
|
|
|
|
|
self.validate("def f(a=1, b=2,): call(a=1, b=2,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_9(self):
|
|
|
|
|
self.validate("def f(a=1, **b): call(a=1, **b)")
|
|
|
|
|
self.validate("def f(a=1, **b,): call(a=1, **b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_1(self):
|
|
|
|
|
self.validate("f = lambda a, b: call(a, b)")
|
|
|
|
|
self.validate("f = lambda a, b,: call(a, b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_2(self):
|
|
|
|
|
self.validate("f = lambda a, *b: call(a, *b)")
|
|
|
|
|
self.validate("f = lambda a, *b,: call(a, *b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_3(self):
|
|
|
|
|
self.validate("f = lambda a, b=1: call(a, b=1)")
|
|
|
|
|
self.validate("f = lambda a, b=1,: call(a, b=1,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_4(self):
|
|
|
|
|
self.validate("f = lambda a, **b: call(a, **b)")
|
|
|
|
|
self.validate("f = lambda a, **b,: call(a, **b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_5(self):
|
|
|
|
|
self.validate("f = lambda *a, b=1: call(*a, b=1)")
|
|
|
|
|
self.validate("f = lambda *a, b=1,: call(*a, b=1,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_6(self):
|
|
|
|
|
self.validate("f = lambda *a, **b: call(*a, **b)")
|
|
|
|
|
self.validate("f = lambda *a, **b,: call(*a, **b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_7(self):
|
|
|
|
|
self.validate("f = lambda *, b=1: call(*b)")
|
|
|
|
|
self.validate("f = lambda *, b=1,: call(*b,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_8(self):
|
|
|
|
|
self.validate("f = lambda a=1, b=2: call(a=1, b=2)")
|
|
|
|
|
self.validate("f = lambda a=1, b=2,: call(a=1, b=2,)")
|
|
|
|
|
|
|
|
|
|
def test_trailing_commas_lambda_9(self):
|
|
|
|
|
self.validate("f = lambda a=1, **b: call(a=1, **b)")
|
|
|
|
|
self.validate("f = lambda a=1, **b,: call(a=1, **b,)")
|
|
|
|
|
|
2016-09-09 22:18:52 -03:00
|
|
|
|
|
2014-10-19 12:04:38 -03:00
|
|
|
|
# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef
|
2008-03-19 02:04:44 -03:00
|
|
|
|
class TestFunctionAnnotations(GrammarTest):
|
|
|
|
|
def test_1(self):
|
|
|
|
|
self.validate("""def f(x) -> list: pass""")
|
|
|
|
|
|
|
|
|
|
def test_2(self):
|
|
|
|
|
self.validate("""def f(x:int): pass""")
|
|
|
|
|
|
|
|
|
|
def test_3(self):
|
|
|
|
|
self.validate("""def f(*x:str): pass""")
|
|
|
|
|
|
|
|
|
|
def test_4(self):
|
|
|
|
|
self.validate("""def f(**x:float): pass""")
|
|
|
|
|
|
|
|
|
|
def test_5(self):
|
|
|
|
|
self.validate("""def f(x, y:1+2): pass""")
|
|
|
|
|
|
|
|
|
|
def test_6(self):
|
|
|
|
|
self.validate("""def f(a, (b:1, c:2, d)): pass""")
|
|
|
|
|
|
|
|
|
|
def test_7(self):
|
|
|
|
|
self.validate("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""")
|
|
|
|
|
|
|
|
|
|
def test_8(self):
|
|
|
|
|
s = """def f(a, (b:1, c:2, d), e:3=4, f=5,
|
|
|
|
|
*g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
2018-03-13 04:44:49 -03:00
|
|
|
|
def test_9(self):
|
|
|
|
|
s = """def f(
|
|
|
|
|
a: str,
|
|
|
|
|
b: int,
|
|
|
|
|
*,
|
|
|
|
|
c: bool = False,
|
|
|
|
|
**kwargs,
|
|
|
|
|
) -> None:
|
|
|
|
|
call(c=c, **kwargs,)"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
def test_10(self):
|
|
|
|
|
s = """def f(
|
|
|
|
|
a: str,
|
|
|
|
|
) -> None:
|
|
|
|
|
call(a,)"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
def test_11(self):
|
|
|
|
|
s = """def f(
|
|
|
|
|
a: str = '',
|
|
|
|
|
) -> None:
|
|
|
|
|
call(a=a,)"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
def test_12(self):
|
|
|
|
|
s = """def f(
|
|
|
|
|
*args: str,
|
|
|
|
|
) -> None:
|
|
|
|
|
call(*args,)"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
2018-03-13 14:53:22 -03:00
|
|
|
|
def test_13(self):
|
|
|
|
|
self.validate("def f(a: str, b: int) -> None: call(a, b)")
|
|
|
|
|
self.validate("def f(a: str, b: int,) -> None: call(a, b,)")
|
|
|
|
|
|
|
|
|
|
def test_14(self):
|
|
|
|
|
self.validate("def f(a: str, *b: int) -> None: call(a, *b)")
|
|
|
|
|
self.validate("def f(a: str, *b: int,) -> None: call(a, *b,)")
|
|
|
|
|
|
|
|
|
|
def test_15(self):
|
|
|
|
|
self.validate("def f(a: str, b: int=1) -> None: call(a, b=1)")
|
|
|
|
|
self.validate("def f(a: str, b: int=1,) -> None: call(a, b=1,)")
|
|
|
|
|
|
|
|
|
|
def test_16(self):
|
|
|
|
|
self.validate("def f(a: str, **b: int) -> None: call(a, **b)")
|
|
|
|
|
self.validate("def f(a: str, **b: int,) -> None: call(a, **b,)")
|
|
|
|
|
|
|
|
|
|
def test_17(self):
|
|
|
|
|
self.validate("def f(*a: str, b: int=1) -> None: call(*a, b=1)")
|
|
|
|
|
self.validate("def f(*a: str, b: int=1,) -> None: call(*a, b=1,)")
|
|
|
|
|
|
|
|
|
|
def test_18(self):
|
|
|
|
|
self.validate("def f(*a: str, **b: int) -> None: call(*a, **b)")
|
|
|
|
|
self.validate("def f(*a: str, **b: int,) -> None: call(*a, **b,)")
|
|
|
|
|
|
|
|
|
|
def test_19(self):
|
|
|
|
|
self.validate("def f(*, b: int=1) -> None: call(*b)")
|
|
|
|
|
self.validate("def f(*, b: int=1,) -> None: call(*b,)")
|
|
|
|
|
|
|
|
|
|
def test_20(self):
|
|
|
|
|
self.validate("def f(a: str='', b: int=2) -> None: call(a=a, b=2)")
|
|
|
|
|
self.validate("def f(a: str='', b: int=2,) -> None: call(a=a, b=2,)")
|
|
|
|
|
|
|
|
|
|
def test_21(self):
|
|
|
|
|
self.validate("def f(a: str='', **b: int) -> None: call(a=a, **b)")
|
|
|
|
|
self.validate("def f(a: str='', **b: int,) -> None: call(a=a, **b,)")
|
|
|
|
|
|
2008-03-19 02:04:44 -03:00
|
|
|
|
|
2016-09-09 00:50:03 -03:00
|
|
|
|
# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.test_var_annot
|
2016-09-09 18:48:08 -03:00
|
|
|
|
class TestVarAnnotations(GrammarTest):
|
2016-09-09 00:50:03 -03:00
|
|
|
|
def test_1(self):
|
|
|
|
|
self.validate("var1: int = 5")
|
|
|
|
|
|
|
|
|
|
def test_2(self):
|
|
|
|
|
self.validate("var2: [int, str]")
|
|
|
|
|
|
|
|
|
|
def test_3(self):
|
|
|
|
|
self.validate("def f():\n"
|
|
|
|
|
" st: str = 'Hello'\n"
|
|
|
|
|
" a.b: int = (1, 2)\n"
|
|
|
|
|
" return st\n")
|
|
|
|
|
|
|
|
|
|
def test_4(self):
|
|
|
|
|
self.validate("def fbad():\n"
|
|
|
|
|
" x: int\n"
|
|
|
|
|
" print(x)\n")
|
|
|
|
|
|
|
|
|
|
def test_5(self):
|
|
|
|
|
self.validate("class C:\n"
|
|
|
|
|
" x: int\n"
|
|
|
|
|
" s: str = 'attr'\n"
|
|
|
|
|
" z = 2\n"
|
|
|
|
|
" def __init__(self, x):\n"
|
|
|
|
|
" self.x: int = x\n")
|
|
|
|
|
|
|
|
|
|
def test_6(self):
|
|
|
|
|
self.validate("lst: List[int] = []")
|
|
|
|
|
|
2017-05-22 19:19:09 -03:00
|
|
|
|
|
2008-03-19 02:04:44 -03:00
|
|
|
|
class TestExcept(GrammarTest):
|
|
|
|
|
def test_new(self):
|
|
|
|
|
s = """
|
|
|
|
|
try:
|
|
|
|
|
x
|
|
|
|
|
except E as N:
|
|
|
|
|
y"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
def test_old(self):
|
|
|
|
|
s = """
|
|
|
|
|
try:
|
|
|
|
|
x
|
|
|
|
|
except E, N:
|
|
|
|
|
y"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
|
2017-05-22 19:19:09 -03:00
|
|
|
|
class TestStringLiterals(GrammarTest):
|
|
|
|
|
prefixes = ("'", '"',
|
|
|
|
|
"r'", 'r"', "R'", 'R"',
|
|
|
|
|
"u'", 'u"', "U'", 'U"',
|
|
|
|
|
"b'", 'b"', "B'", 'B"',
|
2017-05-22 20:35:48 -03:00
|
|
|
|
"f'", 'f"', "F'", 'F"',
|
2017-05-22 19:19:09 -03:00
|
|
|
|
"ur'", 'ur"', "Ur'", 'Ur"',
|
|
|
|
|
"uR'", 'uR"', "UR'", 'UR"',
|
|
|
|
|
"br'", 'br"', "Br'", 'Br"',
|
|
|
|
|
"bR'", 'bR"', "BR'", 'BR"',
|
|
|
|
|
"rb'", 'rb"', "Rb'", 'Rb"',
|
|
|
|
|
"rB'", 'rB"', "RB'", 'RB"',)
|
|
|
|
|
|
|
|
|
|
def test_lit(self):
|
|
|
|
|
for pre in self.prefixes:
|
|
|
|
|
single = "{p}spamspamspam{s}".format(p=pre, s=pre[-1])
|
|
|
|
|
self.validate(single)
|
|
|
|
|
triple = "{p}{s}{s}eggs{s}{s}{s}".format(p=pre, s=pre[-1])
|
|
|
|
|
self.validate(triple)
|
|
|
|
|
|
|
|
|
|
|
2008-03-19 02:26:18 -03:00
|
|
|
|
# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testAtoms
|
2008-03-19 02:04:44 -03:00
|
|
|
|
class TestSetLiteral(GrammarTest):
|
|
|
|
|
def test_1(self):
|
|
|
|
|
self.validate("""x = {'one'}""")
|
|
|
|
|
|
|
|
|
|
def test_2(self):
|
|
|
|
|
self.validate("""x = {'one', 1,}""")
|
|
|
|
|
|
|
|
|
|
def test_3(self):
|
|
|
|
|
self.validate("""x = {'one', 'two', 'three'}""")
|
|
|
|
|
|
|
|
|
|
def test_4(self):
|
|
|
|
|
self.validate("""x = {2, 3, 4,}""")
|
|
|
|
|
|
|
|
|
|
|
2018-09-15 14:32:29 -03:00
|
|
|
|
# Adapted from Python 3's Lib/test/test_unicode_identifiers.py and
|
|
|
|
|
# Lib/test/test_tokenize.py:TokenizeTest.test_non_ascii_identifiers
|
2019-07-30 19:16:13 -03:00
|
|
|
|
class TestIdentifier(GrammarTest):
|
2018-09-15 14:32:29 -03:00
|
|
|
|
def test_non_ascii_identifiers(self):
|
|
|
|
|
self.validate("Örter = 'places'\ngrün = 'green'")
|
|
|
|
|
self.validate("蟒 = a蟒 = 锦蛇 = 1")
|
|
|
|
|
self.validate("µ = aµ = µµ = 1")
|
|
|
|
|
self.validate("𝔘𝔫𝔦𝔠𝔬𝔡𝔢 = a_𝔘𝔫𝔦𝔠𝔬𝔡𝔢 = 1")
|
|
|
|
|
|
|
|
|
|
|
2008-03-19 02:04:44 -03:00
|
|
|
|
class TestNumericLiterals(GrammarTest):
|
|
|
|
|
def test_new_octal_notation(self):
|
|
|
|
|
self.validate("""0o7777777777777""")
|
|
|
|
|
self.invalid_syntax("""0o7324528887""")
|
|
|
|
|
|
|
|
|
|
def test_new_binary_notation(self):
|
|
|
|
|
self.validate("""0b101010""")
|
|
|
|
|
self.invalid_syntax("""0b0101021""")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestClassDef(GrammarTest):
|
|
|
|
|
def test_new_syntax(self):
|
|
|
|
|
self.validate("class B(t=7): pass")
|
|
|
|
|
self.validate("class B(t, *args): pass")
|
|
|
|
|
self.validate("class B(t, **kwargs): pass")
|
|
|
|
|
self.validate("class B(t, *args, **kwargs): pass")
|
2018-03-13 04:44:49 -03:00
|
|
|
|
self.validate("class B(t, y=9, *args, **kwargs,): pass")
|
2008-03-19 02:04:44 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestParserIdempotency(support.TestCase):
|
|
|
|
|
|
|
|
|
|
"""A cut-down version of pytree_idempotency.py."""
|
|
|
|
|
|
|
|
|
|
def test_all_project_files(self):
|
|
|
|
|
for filepath in support.all_project_files():
|
2009-05-09 16:42:23 -03:00
|
|
|
|
with open(filepath, "rb") as fp:
|
|
|
|
|
encoding = tokenize.detect_encoding(fp.readline)[0]
|
2013-11-14 17:49:58 -04:00
|
|
|
|
self.assertIsNotNone(encoding,
|
|
|
|
|
"can't detect encoding for %s" % filepath)
|
2011-10-07 15:44:49 -03:00
|
|
|
|
with open(filepath, "r", encoding=encoding) as fp:
|
2009-05-09 16:42:23 -03:00
|
|
|
|
source = fp.read()
|
2011-10-07 15:44:49 -03:00
|
|
|
|
try:
|
|
|
|
|
tree = driver.parse_string(source)
|
2018-01-30 03:13:03 -04:00
|
|
|
|
except ParseError:
|
|
|
|
|
try:
|
|
|
|
|
tree = driver_no_print_statement.parse_string(source)
|
|
|
|
|
except ParseError as err:
|
|
|
|
|
self.fail('ParseError on file %s (%s)' % (filepath, err))
|
2009-05-09 16:42:23 -03:00
|
|
|
|
new = str(tree)
|
2018-01-30 13:03:12 -04:00
|
|
|
|
if new != source:
|
|
|
|
|
print(diff_texts(source, new, filepath))
|
2008-03-19 02:04:44 -03:00
|
|
|
|
self.fail("Idempotency failed: %s" % filepath)
|
|
|
|
|
|
Merged revisions 76062 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
................
r76062 | benjamin.peterson | 2009-11-02 12:12:12 -0600 (Mon, 02 Nov 2009) | 70 lines
Merged revisions 74359,75081,75088,75213,75278,75303,75427-75428,75734-75736,75865,76059-76061 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3
........
r74359 | benjamin.peterson | 2009-08-12 17:23:13 -0500 (Wed, 12 Aug 2009) | 1 line
don't pass the deprecated print_function option
........
r75081 | benjamin.peterson | 2009-09-26 22:02:57 -0500 (Sat, 26 Sep 2009) | 1 line
let 2to3 work with extended iterable unpacking
........
r75088 | benjamin.peterson | 2009-09-27 11:25:21 -0500 (Sun, 27 Sep 2009) | 1 line
look on the type only for __call__
........
r75213 | benjamin.peterson | 2009-10-03 10:09:46 -0500 (Sat, 03 Oct 2009) | 5 lines
revert 75212; it's not correct
People can use isinstance(x, collections.Callable) if they expect objects with
__call__ in their instance dictionaries.
........
r75278 | benjamin.peterson | 2009-10-07 16:25:56 -0500 (Wed, 07 Oct 2009) | 4 lines
fix whitespace problems with fix_idioms #3563
Patch by Joe Amenta.
........
r75303 | benjamin.peterson | 2009-10-09 16:59:11 -0500 (Fri, 09 Oct 2009) | 1 line
port latin-1 and utf-8 cookie improvements
........
r75427 | benjamin.peterson | 2009-10-14 20:35:57 -0500 (Wed, 14 Oct 2009) | 1 line
force floor division
........
r75428 | benjamin.peterson | 2009-10-14 20:39:21 -0500 (Wed, 14 Oct 2009) | 1 line
silence -3 warnings about __hash__
........
r75734 | benjamin.peterson | 2009-10-26 16:25:53 -0500 (Mon, 26 Oct 2009) | 2 lines
warn on map(None, ...) with more than 2 arguments #7203
........
r75735 | benjamin.peterson | 2009-10-26 16:28:25 -0500 (Mon, 26 Oct 2009) | 1 line
remove unused result
........
r75736 | benjamin.peterson | 2009-10-26 16:29:02 -0500 (Mon, 26 Oct 2009) | 1 line
using get() here is a bit pointless
........
r75865 | benjamin.peterson | 2009-10-27 15:49:00 -0500 (Tue, 27 Oct 2009) | 1 line
explain reason for warning
........
r76059 | benjamin.peterson | 2009-11-02 11:43:47 -0600 (Mon, 02 Nov 2009) | 1 line
tuples are no longer used for children
........
r76060 | benjamin.peterson | 2009-11-02 11:55:40 -0600 (Mon, 02 Nov 2009) | 1 line
revert r76059; apparently some fixers rely on Leaf no () for children
........
r76061 | benjamin.peterson | 2009-11-02 12:06:17 -0600 (Mon, 02 Nov 2009) | 1 line
make fix_tuple_params keep the tree valid #7253
........
................
2009-11-02 14:16:28 -04:00
|
|
|
|
def test_extended_unpacking(self):
|
|
|
|
|
driver.parse_string("a, *b, c = x\n")
|
|
|
|
|
driver.parse_string("[*a, b] = x\n")
|
|
|
|
|
driver.parse_string("(z, *y, w) = m\n")
|
|
|
|
|
driver.parse_string("for *z, m in d: pass\n")
|
2008-03-19 02:04:44 -03:00
|
|
|
|
|
2012-02-27 14:55:36 -04:00
|
|
|
|
|
2008-03-19 02:04:44 -03:00
|
|
|
|
class TestLiterals(GrammarTest):
|
|
|
|
|
|
2009-05-09 16:42:23 -03:00
|
|
|
|
def validate(self, s):
|
|
|
|
|
driver.parse_string(support.dedent(s) + "\n\n")
|
|
|
|
|
|
2008-03-19 02:04:44 -03:00
|
|
|
|
def test_multiline_bytes_literals(self):
|
|
|
|
|
s = """
|
|
|
|
|
md5test(b"\xaa" * 80,
|
|
|
|
|
(b"Test Using Larger Than Block-Size Key "
|
|
|
|
|
b"and Larger Than One Block-Size Data"),
|
|
|
|
|
"6f630fad67cda0ee1fb1f562db3aa53e")
|
|
|
|
|
"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
def test_multiline_bytes_tripquote_literals(self):
|
|
|
|
|
s = '''
|
|
|
|
|
b"""
|
|
|
|
|
<?xml version="1.0" encoding="UTF-8"?>
|
|
|
|
|
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN">
|
|
|
|
|
"""
|
|
|
|
|
'''
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
def test_multiline_str_literals(self):
|
|
|
|
|
s = """
|
|
|
|
|
md5test("\xaa" * 80,
|
|
|
|
|
("Test Using Larger Than Block-Size Key "
|
|
|
|
|
"and Larger Than One Block-Size Data"),
|
|
|
|
|
"6f630fad67cda0ee1fb1f562db3aa53e")
|
|
|
|
|
"""
|
|
|
|
|
self.validate(s)
|
|
|
|
|
|
|
|
|
|
|
2020-04-02 19:34:54 -03:00
|
|
|
|
class TestNamedAssignments(GrammarTest):
|
2020-12-14 13:10:10 -04:00
|
|
|
|
"""Also known as the walrus operator."""
|
2020-04-02 19:34:54 -03:00
|
|
|
|
|
|
|
|
|
def test_named_assignment_if(self):
|
|
|
|
|
driver.parse_string("if f := x(): pass\n")
|
|
|
|
|
|
|
|
|
|
def test_named_assignment_while(self):
|
|
|
|
|
driver.parse_string("while f := x(): pass\n")
|
|
|
|
|
|
|
|
|
|
def test_named_assignment_generator(self):
|
|
|
|
|
driver.parse_string("any((lastNum := num) == 1 for num in [1, 2, 3])\n")
|
|
|
|
|
|
|
|
|
|
def test_named_assignment_listcomp(self):
|
|
|
|
|
driver.parse_string("[(lastNum := num) == 1 for num in [1, 2, 3]]\n")
|
|
|
|
|
|
|
|
|
|
|
2020-12-14 13:10:10 -04:00
|
|
|
|
class TestPositionalOnlyArgs(GrammarTest):
|
|
|
|
|
|
|
|
|
|
def test_one_pos_only_arg(self):
|
|
|
|
|
driver.parse_string("def one_pos_only_arg(a, /): pass\n")
|
|
|
|
|
|
|
|
|
|
def test_all_markers(self):
|
|
|
|
|
driver.parse_string(
|
|
|
|
|
"def all_markers(a, b=2, /, c, d=4, *, e=5, f): pass\n")
|
|
|
|
|
|
|
|
|
|
def test_all_with_args_and_kwargs(self):
|
|
|
|
|
driver.parse_string(
|
|
|
|
|
"""def all_markers_with_args_and_kwargs(
|
|
|
|
|
aa, b, /, _cc, d, *args, e, f_f, **kwargs,
|
|
|
|
|
):
|
|
|
|
|
pass\n""")
|
|
|
|
|
|
|
|
|
|
def test_lambda_soup(self):
|
|
|
|
|
driver.parse_string(
|
|
|
|
|
"lambda a, b, /, c, d, *args, e, f, **kw: kw\n")
|
|
|
|
|
|
|
|
|
|
def test_only_positional_or_keyword(self):
|
|
|
|
|
driver.parse_string("def func(a,b,/,*,g,e=3): pass\n")
|
|
|
|
|
|
|
|
|
|
|
2018-11-27 14:39:49 -04:00
|
|
|
|
class TestPickleableException(unittest.TestCase):
|
|
|
|
|
def test_ParseError(self):
|
|
|
|
|
err = ParseError('msg', 2, None, (1, 'context'))
|
|
|
|
|
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
|
|
|
|
|
err2 = pickle.loads(pickle.dumps(err, protocol=proto))
|
|
|
|
|
self.assertEqual(err.args, err2.args)
|
|
|
|
|
self.assertEqual(err.msg, err2.msg)
|
|
|
|
|
self.assertEqual(err.type, err2.type)
|
|
|
|
|
self.assertEqual(err.value, err2.value)
|
|
|
|
|
self.assertEqual(err.context, err2.context)
|
|
|
|
|
|
|
|
|
|
|
2018-01-30 13:03:12 -04:00
|
|
|
|
def diff_texts(a, b, filename):
|
|
|
|
|
a = a.splitlines()
|
|
|
|
|
b = b.splitlines()
|
|
|
|
|
return difflib.unified_diff(a, b, filename, filename,
|
|
|
|
|
"(original)", "(reserialized)",
|
|
|
|
|
lineterm="")
|
2017-10-06 00:24:46 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
unittest.main()
|