gh-107015: Remove async_hacks from the tokenizer (#107018)

This commit is contained in:
Pablo Galindo Salgado 2023-07-26 16:34:15 +01:00 committed by GitHub
parent b0202a4e5d
commit da8f87b7ea
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 404 additions and 499 deletions

View File

@ -2146,7 +2146,7 @@ and classes for traversing abstract syntax trees:
Currently ``major`` must equal to ``3``. For example, setting
``feature_version=(3, 4)`` will allow the use of ``async`` and
``await`` as variable names. The lowest supported version is
``(3, 4)``; the highest is ``sys.version_info[0:2]``.
``(3, 7)``; the highest is ``sys.version_info[0:2]``.
If source contains a null character ('\0'), :exc:`ValueError` is raised.
@ -2169,6 +2169,9 @@ and classes for traversing abstract syntax trees:
.. versionchanged:: 3.8
Added ``type_comments``, ``mode='func_type'`` and ``feature_version``.
.. versionchanged:: 3.13
The minimum supported version for feature_version is now (3,7)
.. function:: unparse(ast_obj)

View File

@ -207,10 +207,6 @@
.. data:: OP
.. data:: AWAIT
.. data:: ASYNC
.. data:: TYPE_IGNORE
.. data:: TYPE_COMMENT

View File

@ -80,17 +80,21 @@ the :mod:`tokenize` module.
.. versionchanged:: 3.5
Added :data:`AWAIT` and :data:`ASYNC` tokens.
Added :data:`!AWAIT` and :data:`!ASYNC` tokens.
.. versionchanged:: 3.7
Added :data:`COMMENT`, :data:`NL` and :data:`ENCODING` tokens.
.. versionchanged:: 3.7
Removed :data:`AWAIT` and :data:`ASYNC` tokens. "async" and "await" are
Removed :data:`!AWAIT` and :data:`!ASYNC` tokens. "async" and "await" are
now tokenized as :data:`NAME` tokens.
.. versionchanged:: 3.8
Added :data:`TYPE_COMMENT`, :data:`TYPE_IGNORE`, :data:`COLONEQUAL`.
Added :data:`AWAIT` and :data:`ASYNC` tokens back (they're needed
Added :data:`!AWAIT` and :data:`!ASYNC` tokens back (they're needed
to support parsing older Python versions for :func:`ast.parse` with
``feature_version`` set to 6 or lower).
.. versionchanged:: 3.13
Removed :data:`!AWAIT` and :data:`!ASYNC` tokens again.

View File

@ -56,8 +56,6 @@ COLONEQUAL ':='
EXCLAMATION '!'
OP
AWAIT
ASYNC
TYPE_IGNORE
TYPE_COMMENT
SOFT_KEYWORD

View File

@ -127,11 +127,11 @@ simple_stmt[stmt_ty] (memo):
| &'nonlocal' nonlocal_stmt
compound_stmt[stmt_ty]:
| &('def' | '@' | ASYNC) function_def
| &('def' | '@' | 'async') function_def
| &'if' if_stmt
| &('class' | '@') class_def
| &('with' | ASYNC) with_stmt
| &('for' | ASYNC) for_stmt
| &('with' | 'async') with_stmt
| &('for' | 'async') for_stmt
| &'try' try_stmt
| &'while' while_stmt
| match_stmt
@ -272,7 +272,7 @@ function_def_raw[stmt_ty]:
_PyAST_FunctionDef(n->v.Name.id,
(params) ? params : CHECK(arguments_ty, _PyPegen_empty_arguments(p)),
b, NULL, a, NEW_TYPE_COMMENT(p, tc), t, EXTRA) }
| ASYNC 'def' n=NAME t=[type_params] &&'(' params=[params] ')' a=['->' z=expression { z }] &&':' tc=[func_type_comment] b=block {
| 'async' 'def' n=NAME t=[type_params] &&'(' params=[params] ')' a=['->' z=expression { z }] &&':' tc=[func_type_comment] b=block {
CHECK_VERSION(
stmt_ty,
5,
@ -385,7 +385,7 @@ for_stmt[stmt_ty]:
| invalid_for_stmt
| 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
_PyAST_For(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) }
| ASYNC 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
| 'async' 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
CHECK_VERSION(stmt_ty, 5, "Async for loops are", _PyAST_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) }
| invalid_for_target
@ -398,9 +398,9 @@ with_stmt[stmt_ty]:
CHECK_VERSION(stmt_ty, 9, "Parenthesized context managers are", _PyAST_With(a, b, NULL, EXTRA)) }
| 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
_PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
| ASYNC 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block {
| 'async' 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block {
CHECK_VERSION(stmt_ty, 5, "Async with statements are", _PyAST_AsyncWith(a, b, NULL, EXTRA)) }
| ASYNC 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
| 'async' 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
CHECK_VERSION(stmt_ty, 5, "Async with statements are", _PyAST_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) }
| invalid_with_stmt
@ -814,7 +814,7 @@ power[expr_ty]:
# Primary elements are things like "obj.something.something", "obj[something]", "obj(something)", "obj" ...
await_primary[expr_ty] (memo):
| AWAIT a=primary { CHECK_VERSION(expr_ty, 5, "Await expressions are", _PyAST_Await(a, EXTRA)) }
| 'await' a=primary { CHECK_VERSION(expr_ty, 5, "Await expressions are", _PyAST_Await(a, EXTRA)) }
| primary
primary[expr_ty]:
@ -966,7 +966,7 @@ for_if_clauses[asdl_comprehension_seq*]:
| a[asdl_comprehension_seq*]=for_if_clause+ { a }
for_if_clause[comprehension_ty]:
| ASYNC 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* {
| 'async' 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* {
CHECK_VERSION(comprehension_ty, 6, "Async comprehensions are", _PyAST_comprehension(a, b, c, 1, p->arena)) }
| 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* {
_PyAST_comprehension(a, b, c, 0, p->arena) }
@ -1284,7 +1284,7 @@ invalid_with_item:
RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) }
invalid_for_target:
| ASYNC? 'for' a=star_expressions {
| 'async'? 'for' a=star_expressions {
RAISE_SYNTAX_ERROR_INVALID_TARGET(FOR_TARGETS, a) }
invalid_group:
@ -1301,12 +1301,12 @@ invalid_import_from_targets:
RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") }
invalid_with_stmt:
| [ASYNC] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| [ASYNC] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| ['async'] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| ['async'] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
invalid_with_stmt_indent:
| [ASYNC] a='with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT {
| ['async'] a='with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'with' statement on line %d", a->lineno) }
| [ASYNC] a='with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT {
| ['async'] a='with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'with' statement on line %d", a->lineno) }
invalid_try_stmt:
@ -1367,11 +1367,11 @@ invalid_while_stmt:
| a='while' named_expression ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'while' statement on line %d", a->lineno) }
invalid_for_stmt:
| [ASYNC] 'for' star_targets 'in' star_expressions NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| [ASYNC] a='for' star_targets 'in' star_expressions ':' NEWLINE !INDENT {
| ['async'] 'for' star_targets 'in' star_expressions NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| ['async'] a='for' star_targets 'in' star_expressions ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after 'for' statement on line %d", a->lineno) }
invalid_def_raw:
| [ASYNC] a='def' NAME '(' [params] ')' ['->' expression] ':' NEWLINE !INDENT {
| ['async'] a='def' NAME '(' [params] ')' ['->' expression] ':' NEWLINE !INDENT {
RAISE_INDENTATION_ERROR("expected an indented block after function definition on line %d", a->lineno) }
invalid_class_def_raw:
| 'class' NAME ['(' [arguments] ')'] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }

View File

@ -69,18 +69,16 @@ extern "C" {
#define COLONEQUAL 53
#define EXCLAMATION 54
#define OP 55
#define AWAIT 56
#define ASYNC 57
#define TYPE_IGNORE 58
#define TYPE_COMMENT 59
#define SOFT_KEYWORD 60
#define FSTRING_START 61
#define FSTRING_MIDDLE 62
#define FSTRING_END 63
#define COMMENT 64
#define NL 65
#define ERRORTOKEN 66
#define N_TOKENS 68
#define TYPE_IGNORE 56
#define TYPE_COMMENT 57
#define SOFT_KEYWORD 58
#define FSTRING_START 59
#define FSTRING_MIDDLE 60
#define FSTRING_END 61
#define COMMENT 62
#define NL 63
#define ERRORTOKEN 64
#define N_TOKENS 66
#define NT_OFFSET 256
/* Special definitions for cooperation with parser */

View File

@ -404,7 +404,7 @@ class TestCParser(unittest.TestCase):
a='[' b=NAME c=for_if_clauses d=']' { _PyAST_ListComp(b, c, EXTRA) }
)
for_if_clauses[asdl_comprehension_seq*]: (
a[asdl_comprehension_seq*]=(y=[ASYNC] 'for' a=NAME 'in' b=NAME c[asdl_expr_seq*]=('if' z=NAME { z })*
a[asdl_comprehension_seq*]=(y=['async'] 'for' a=NAME 'in' b=NAME c[asdl_expr_seq*]=('if' z=NAME { z })*
{ _PyAST_comprehension(_PyAST_Name(((expr_ty) a)->v.Name.id, Store, EXTRA), b, c, (y == NULL) ? 0 : 1, p->arena) })+ { a }
)
"""

View File

@ -2521,7 +2521,7 @@ def"', """\
def test_async(self):
self.check_tokenize('async = 1', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
EQUAL '=' (1, 6) (1, 7)
NUMBER '1' (1, 8) (1, 9)
""")
@ -2530,21 +2530,21 @@ def"', """\
NAME 'a' (1, 0) (1, 1)
EQUAL '=' (1, 2) (1, 3)
LPAR '(' (1, 4) (1, 5)
ASYNC 'async' (1, 5) (1, 10)
NAME 'async' (1, 5) (1, 10)
EQUAL '=' (1, 11) (1, 12)
NUMBER '1' (1, 13) (1, 14)
RPAR ')' (1, 14) (1, 15)
""")
self.check_tokenize('async()', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
LPAR '(' (1, 5) (1, 6)
RPAR ')' (1, 6) (1, 7)
""")
self.check_tokenize('class async(Bar):pass', """\
NAME 'class' (1, 0) (1, 5)
ASYNC 'async' (1, 6) (1, 11)
NAME 'async' (1, 6) (1, 11)
LPAR '(' (1, 11) (1, 12)
NAME 'Bar' (1, 12) (1, 15)
RPAR ')' (1, 15) (1, 16)
@ -2554,13 +2554,13 @@ def"', """\
self.check_tokenize('class async:pass', """\
NAME 'class' (1, 0) (1, 5)
ASYNC 'async' (1, 6) (1, 11)
NAME 'async' (1, 6) (1, 11)
COLON ':' (1, 11) (1, 12)
NAME 'pass' (1, 12) (1, 16)
""")
self.check_tokenize('await = 1', """\
AWAIT 'await' (1, 0) (1, 5)
NAME 'await' (1, 0) (1, 5)
EQUAL '=' (1, 6) (1, 7)
NUMBER '1' (1, 8) (1, 9)
""")
@ -2568,11 +2568,11 @@ def"', """\
self.check_tokenize('foo.async', """\
NAME 'foo' (1, 0) (1, 3)
DOT '.' (1, 3) (1, 4)
ASYNC 'async' (1, 4) (1, 9)
NAME 'async' (1, 4) (1, 9)
""")
self.check_tokenize('async for a in b: pass', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NAME 'for' (1, 6) (1, 9)
NAME 'a' (1, 10) (1, 11)
NAME 'in' (1, 12) (1, 14)
@ -2582,7 +2582,7 @@ def"', """\
""")
self.check_tokenize('async with a as b: pass', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NAME 'with' (1, 6) (1, 10)
NAME 'a' (1, 11) (1, 12)
NAME 'as' (1, 13) (1, 15)
@ -2592,45 +2592,45 @@ def"', """\
""")
self.check_tokenize('async.foo', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
DOT '.' (1, 5) (1, 6)
NAME 'foo' (1, 6) (1, 9)
""")
self.check_tokenize('async', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
""")
self.check_tokenize('async\n#comment\nawait', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NEWLINE '' (1, 5) (1, 5)
AWAIT 'await' (3, 0) (3, 5)
NAME 'await' (3, 0) (3, 5)
""")
self.check_tokenize('async\n...\nawait', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NEWLINE '' (1, 5) (1, 5)
ELLIPSIS '...' (2, 0) (2, 3)
NEWLINE '' (2, 3) (2, 3)
AWAIT 'await' (3, 0) (3, 5)
NAME 'await' (3, 0) (3, 5)
""")
self.check_tokenize('async\nawait', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NEWLINE '' (1, 5) (1, 5)
AWAIT 'await' (2, 0) (2, 5)
NAME 'await' (2, 0) (2, 5)
""")
self.check_tokenize('foo.async + 1', """\
NAME 'foo' (1, 0) (1, 3)
DOT '.' (1, 3) (1, 4)
ASYNC 'async' (1, 4) (1, 9)
NAME 'async' (1, 4) (1, 9)
PLUS '+' (1, 10) (1, 11)
NUMBER '1' (1, 12) (1, 13)
""")
self.check_tokenize('async def foo(): pass', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
@ -2647,7 +2647,7 @@ async def foo():
await
async += 1
''', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
@ -2658,12 +2658,12 @@ async += 1
NAME 'def' (2, 2) (2, 5)
NAME 'foo' (2, 6) (2, 9)
LPAR '(' (2, 9) (2, 10)
AWAIT 'await' (2, 10) (2, 15)
NAME 'await' (2, 10) (2, 15)
RPAR ')' (2, 15) (2, 16)
COLON ':' (2, 16) (2, 17)
NEWLINE '' (2, 17) (2, 17)
INDENT '' (3, -1) (3, -1)
AWAIT 'await' (3, 4) (3, 9)
NAME 'await' (3, 4) (3, 9)
EQUAL '=' (3, 10) (3, 11)
NUMBER '1' (3, 12) (3, 13)
NEWLINE '' (3, 13) (3, 13)
@ -2673,18 +2673,18 @@ async += 1
COLON ':' (4, 6) (4, 7)
NEWLINE '' (4, 7) (4, 7)
INDENT '' (5, -1) (5, -1)
AWAIT 'await' (5, 4) (5, 9)
NAME 'await' (5, 4) (5, 9)
NEWLINE '' (5, 9) (5, 9)
DEDENT '' (6, -1) (6, -1)
DEDENT '' (6, -1) (6, -1)
ASYNC 'async' (6, 0) (6, 5)
NAME 'async' (6, 0) (6, 5)
PLUSEQUAL '+=' (6, 6) (6, 8)
NUMBER '1' (6, 9) (6, 10)
NEWLINE '' (6, 10) (6, 10)
""")
self.check_tokenize('async def foo():\n async for i in 1: pass', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
@ -2692,7 +2692,7 @@ async += 1
COLON ':' (1, 15) (1, 16)
NEWLINE '' (1, 16) (1, 16)
INDENT '' (2, -1) (2, -1)
ASYNC 'async' (2, 2) (2, 7)
NAME 'async' (2, 2) (2, 7)
NAME 'for' (2, 8) (2, 11)
NAME 'i' (2, 12) (2, 13)
NAME 'in' (2, 14) (2, 16)
@ -2703,14 +2703,14 @@ async += 1
""")
self.check_tokenize('async def foo(async): await', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'foo' (1, 10) (1, 13)
LPAR '(' (1, 13) (1, 14)
ASYNC 'async' (1, 14) (1, 19)
NAME 'async' (1, 14) (1, 19)
RPAR ')' (1, 19) (1, 20)
COLON ':' (1, 20) (1, 21)
AWAIT 'await' (1, 22) (1, 27)
NAME 'await' (1, 22) (1, 27)
""")
self.check_tokenize('''\
@ -2734,7 +2734,7 @@ def f():
COLON ':' (3, 11) (3, 12)
NAME 'pass' (3, 13) (3, 17)
NEWLINE '' (3, 17) (3, 17)
ASYNC 'async' (4, 2) (4, 7)
NAME 'async' (4, 2) (4, 7)
NAME 'def' (4, 8) (4, 11)
NAME 'bar' (4, 12) (4, 15)
LPAR '(' (4, 15) (4, 16)
@ -2742,7 +2742,7 @@ def f():
COLON ':' (4, 17) (4, 18)
NAME 'pass' (4, 19) (4, 23)
NEWLINE '' (4, 23) (4, 23)
AWAIT 'await' (6, 2) (6, 7)
NAME 'await' (6, 2) (6, 7)
EQUAL '=' (6, 8) (6, 9)
NUMBER '2' (6, 10) (6, 11)
DEDENT '' (6, -1) (6, -1)
@ -2755,7 +2755,7 @@ async def f():
async def bar(): pass
await = 2''', """\
ASYNC 'async' (1, 0) (1, 5)
NAME 'async' (1, 0) (1, 5)
NAME 'def' (1, 6) (1, 9)
NAME 'f' (1, 10) (1, 11)
LPAR '(' (1, 11) (1, 12)
@ -2770,7 +2770,7 @@ async def f():
COLON ':' (3, 11) (3, 12)
NAME 'pass' (3, 13) (3, 17)
NEWLINE '' (3, 17) (3, 17)
ASYNC 'async' (4, 2) (4, 7)
NAME 'async' (4, 2) (4, 7)
NAME 'def' (4, 8) (4, 11)
NAME 'bar' (4, 12) (4, 15)
LPAR '(' (4, 15) (4, 16)
@ -2778,7 +2778,7 @@ async def f():
COLON ':' (4, 17) (4, 18)
NAME 'pass' (4, 19) (4, 23)
NEWLINE '' (4, 23) (4, 23)
AWAIT 'await' (6, 2) (6, 7)
NAME 'await' (6, 2) (6, 7)
EQUAL '=' (6, 8) (6, 9)
NUMBER '2' (6, 10) (6, 11)
DEDENT '' (6, -1) (6, -1)

View File

@ -260,8 +260,8 @@ class TypeCommentTests(unittest.TestCase):
self.assertEqual(tree.body[1].type_comment, None)
def test_asyncvar(self):
for tree in self.parse_all(asyncvar, maxver=6):
pass
with self.assertRaises(SyntaxError):
self.classic_parse(asyncvar)
def test_asynccomp(self):
for tree in self.parse_all(asynccomp, minver=6):

24
Lib/token.py generated
View File

@ -59,20 +59,18 @@ ELLIPSIS = 52
COLONEQUAL = 53
EXCLAMATION = 54
OP = 55
AWAIT = 56
ASYNC = 57
TYPE_IGNORE = 58
TYPE_COMMENT = 59
SOFT_KEYWORD = 60
FSTRING_START = 61
FSTRING_MIDDLE = 62
FSTRING_END = 63
COMMENT = 64
NL = 65
TYPE_IGNORE = 56
TYPE_COMMENT = 57
SOFT_KEYWORD = 58
FSTRING_START = 59
FSTRING_MIDDLE = 60
FSTRING_END = 61
COMMENT = 62
NL = 63
# These aren't used by the C tokenizer but are needed for tokenize.py
ERRORTOKEN = 66
ENCODING = 67
N_TOKENS = 68
ERRORTOKEN = 64
ENCODING = 65
N_TOKENS = 66
# Special definitions for cooperation with parser
NT_OFFSET = 256

View File

@ -0,0 +1,3 @@
The ASYNC and AWAIT tokens are removed from the Grammar, which removes the
posibility of making ``async`` and ``await`` soft keywords when using
``feature_version<7`` in :func:`ast.parse`.

624
Parser/parser.c generated

File diff suppressed because it is too large Load Diff

View File

@ -734,9 +734,6 @@ compute_parser_flags(PyCompilerFlags *flags)
if (flags->cf_flags & PyCF_TYPE_COMMENTS) {
parser_flags |= PyPARSE_TYPE_COMMENTS;
}
if ((flags->cf_flags & PyCF_ONLY_AST) && flags->cf_feature_version < 7) {
parser_flags |= PyPARSE_ASYNC_HACKS;
}
if (flags->cf_flags & PyCF_ALLOW_INCOMPLETE_INPUT) {
parser_flags |= PyPARSE_ALLOW_INCOMPLETE_INPUT;
}
@ -755,7 +752,6 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags,
}
assert(tok != NULL);
tok->type_comments = (flags & PyPARSE_TYPE_COMMENTS) > 0;
tok->async_hacks = (flags & PyPARSE_ASYNC_HACKS) > 0;
p->tok = tok;
p->keywords = NULL;
p->n_keyword_lists = -1;

View File

@ -20,7 +20,6 @@
#define PyPARSE_IGNORE_COOKIE 0x0010
#define PyPARSE_BARRY_AS_BDFL 0x0020
#define PyPARSE_TYPE_COMMENTS 0x0040
#define PyPARSE_ASYNC_HACKS 0x0080
#define PyPARSE_ALLOW_INCOMPLETE_INPUT 0x0100
#define CURRENT_POS (-5)

2
Parser/token.c generated
View File

@ -62,8 +62,6 @@ const char * const _PyParser_TokenNames[] = {
"COLONEQUAL",
"EXCLAMATION",
"OP",
"AWAIT",
"ASYNC",
"TYPE_IGNORE",
"TYPE_COMMENT",
"SOFT_KEYWORD",

View File

@ -104,10 +104,6 @@ tok_new(void)
tok->decoding_buffer = NULL;
tok->readline = NULL;
tok->type_comments = 0;
tok->async_hacks = 0;
tok->async_def = 0;
tok->async_def_indent = 0;
tok->async_def_nl = 0;
tok->interactive_underflow = IUNDERFLOW_NORMAL;
tok->str = NULL;
tok->report_warnings = 1;
@ -1925,27 +1921,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
/* Peek ahead at the next character */
c = tok_nextc(tok);
tok_backup(tok, c);
/* Check if we are closing an async function */
if (tok->async_def
&& !blankline
/* Due to some implementation artifacts of type comments,
* a TYPE_COMMENT at the start of a function won't set an
* indentation level and it will produce a NEWLINE after it.
* To avoid spuriously ending an async function due to this,
* wait until we have some non-newline char in front of us. */
&& c != '\n'
&& tok->level == 0
/* There was a NEWLINE after ASYNC DEF,
so we're past the signature. */
&& tok->async_def_nl
/* Current indentation level is less than where
the async function was defined */
&& tok->async_def_indent >= tok->indent)
{
tok->async_def = 0;
tok->async_def_indent = 0;
tok->async_def_nl = 0;
}
again:
tok->start = NULL;
@ -2094,54 +2069,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
p_start = tok->start;
p_end = tok->cur;
/* async/await parsing block. */
if (tok->cur - tok->start == 5 && tok->start[0] == 'a') {
/* May be an 'async' or 'await' token. For Python 3.7 or
later we recognize them unconditionally. For Python
3.5 or 3.6 we recognize 'async' in front of 'def', and
either one inside of 'async def'. (Technically we
shouldn't recognize these at all for 3.4 or earlier,
but there's no *valid* Python 3.4 code that would be
rejected, and async functions will be rejected in a
later phase.) */
if (!tok->async_hacks || tok->async_def) {
/* Always recognize the keywords. */
if (memcmp(tok->start, "async", 5) == 0) {
return MAKE_TOKEN(ASYNC);
}
if (memcmp(tok->start, "await", 5) == 0) {
return MAKE_TOKEN(AWAIT);
}
}
else if (memcmp(tok->start, "async", 5) == 0) {
/* The current token is 'async'.
Look ahead one token to see if that is 'def'. */
struct tok_state ahead_tok;
struct token ahead_token;
_PyToken_Init(&ahead_token);
int ahead_tok_kind;
memcpy(&ahead_tok, tok, sizeof(ahead_tok));
ahead_tok_kind = tok_get_normal_mode(&ahead_tok,
current_tok,
&ahead_token);
if (ahead_tok_kind == NAME
&& ahead_tok.cur - ahead_tok.start == 3
&& memcmp(ahead_tok.start, "def", 3) == 0)
{
/* The next token is going to be 'def', so instead of
returning a plain NAME token, return ASYNC. */
tok->async_def_indent = tok->indent;
tok->async_def = 1;
_PyToken_Free(&ahead_token);
return MAKE_TOKEN(ASYNC);
}
_PyToken_Free(&ahead_token);
}
}
return MAKE_TOKEN(NAME);
}
@ -2172,11 +2099,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
p_start = tok->start;
p_end = tok->cur - 1; /* Leave '\n' out of the string */
tok->cont_line = 0;
if (tok->async_def) {
/* We're somewhere inside an 'async def' function, and
we've encountered a NEWLINE after its signature. */
tok->async_def_nl = 1;
}
return MAKE_TOKEN(NEWLINE);
}

View File

@ -116,12 +116,6 @@ struct tok_state {
int type_comments; /* Whether to look for type comments */
/* async/await related fields (still needed depending on feature_version) */
int async_hacks; /* =1 if async/await aren't always keywords */
int async_def; /* =1 if tokens are inside an 'async def' body. */
int async_def_indent; /* Indentation level of the outermost 'async def'. */
int async_def_nl; /* =1 if the outermost 'async def' had at least one
NEWLINE token after it. */
/* How to proceed when asked for a new token in interactive mode */
enum interactive_underflow_t interactive_underflow;
int report_warnings;

View File

@ -237,9 +237,6 @@ tokenizeriter_next(tokenizeriterobject *it)
if (type > DEDENT && type < OP) {
type = OP;
}
else if (type == ASYNC || type == AWAIT) {
type = NAME;
}
else if (type == NEWLINE) {
Py_DECREF(str);
if (!it->tok->implicit_newline) {

View File

@ -35,9 +35,6 @@ iskeyword = frozenset(kwlist).__contains__
issoftkeyword = frozenset(softkwlist).__contains__
'''.lstrip()
EXTRA_KEYWORDS = ["async", "await"]
def main() -> None:
parser = argparse.ArgumentParser(
description="Generate the Lib/keywords.py file from the grammar."
@ -62,7 +59,7 @@ def main() -> None:
gen.collect_rules()
with args.keyword_file as thefile:
all_keywords = sorted(list(gen.keywords.keys()) + EXTRA_KEYWORDS)
all_keywords = sorted(list(gen.keywords.keys()))
all_soft_keywords = sorted(gen.soft_keywords)
keywords = "" if not all_keywords else " " + ",\n ".join(map(repr, all_keywords))

View File

@ -102,7 +102,7 @@ class PythonCallMakerVisitor(GrammarVisitor):
if name in ("NAME", "NUMBER", "STRING", "OP", "TYPE_COMMENT"):
name = name.lower()
return name, f"self.{name}()"
if name in ("NEWLINE", "DEDENT", "INDENT", "ENDMARKER", "ASYNC", "AWAIT"):
if name in ("NEWLINE", "DEDENT", "INDENT", "ENDMARKER"):
# Avoid using names that can be Python keywords
return "_" + name.lower(), f"self.expect({name!r})"
return name, f"self.{name}()"