bpo-30406: Make async and await proper keywords (#1669)
Per PEP 492, 'async' and 'await' should become proper keywords in 3.7.
This commit is contained in:
parent
2084b30e54
commit
ac317700ce
|
@ -98,8 +98,6 @@ The token constants are:
|
|||
RARROW
|
||||
ELLIPSIS
|
||||
OP
|
||||
AWAIT
|
||||
ASYNC
|
||||
ERRORTOKEN
|
||||
N_TOKENS
|
||||
NT_OFFSET
|
||||
|
@ -129,9 +127,11 @@ the :mod:`tokenize` module.
|
|||
|
||||
|
||||
.. versionchanged:: 3.5
|
||||
Added :data:`AWAIT` and :data:`ASYNC` tokens. Starting with
|
||||
Python 3.7, "async" and "await" will be tokenized as :data:`NAME`
|
||||
tokens, and :data:`AWAIT` and :data:`ASYNC` will be removed.
|
||||
Added :data:`AWAIT` and :data:`ASYNC` tokens.
|
||||
|
||||
.. versionchanged:: 3.7
|
||||
Added :data:`COMMENT`, :data:`NL` and :data:`ENCODING` tokens.
|
||||
|
||||
.. versionchanged:: 3.7
|
||||
Removed :data:`AWAIT` and :data:`ASYNC` tokens. "async" and "await" are
|
||||
now tokenized as :data:`NAME` tokens.
|
||||
|
|
|
@ -272,9 +272,9 @@ class MiscNews(Directive):
|
|||
# Support for building "topic help" for pydoc
|
||||
|
||||
pydoc_topic_labels = [
|
||||
'assert', 'assignment', 'atom-identifiers', 'atom-literals',
|
||||
'attribute-access', 'attribute-references', 'augassign', 'binary',
|
||||
'bitwise', 'bltin-code-objects', 'bltin-ellipsis-object',
|
||||
'assert', 'assignment', 'async', 'atom-identifiers', 'atom-literals',
|
||||
'attribute-access', 'attribute-references', 'augassign', 'await',
|
||||
'binary', 'bitwise', 'bltin-code-objects', 'bltin-ellipsis-object',
|
||||
'bltin-null-object', 'bltin-type-objects', 'booleans',
|
||||
'break', 'callable-types', 'calls', 'class', 'comparisons', 'compound',
|
||||
'context-managers', 'continue', 'conversions', 'customization', 'debugger',
|
||||
|
|
|
@ -16,7 +16,7 @@ decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
|
|||
decorators: decorator+
|
||||
decorated: decorators (classdef | funcdef | async_funcdef)
|
||||
|
||||
async_funcdef: ASYNC funcdef
|
||||
async_funcdef: 'async' funcdef
|
||||
funcdef: 'def' NAME parameters ['->' test] ':' suite
|
||||
|
||||
parameters: '(' [typedargslist] ')'
|
||||
|
@ -68,7 +68,7 @@ nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
|
|||
assert_stmt: 'assert' test [',' test]
|
||||
|
||||
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
|
||||
async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
|
||||
async_stmt: 'async' (funcdef | with_stmt | for_stmt)
|
||||
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
|
||||
while_stmt: 'while' test ':' suite ['else' ':' suite]
|
||||
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
|
||||
|
@ -103,7 +103,7 @@ arith_expr: term (('+'|'-') term)*
|
|||
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
||||
factor: ('+'|'-'|'~') factor | power
|
||||
power: atom_expr ['**' factor]
|
||||
atom_expr: [AWAIT] atom trailer*
|
||||
atom_expr: ['await'] atom trailer*
|
||||
atom: ('(' [yield_expr|testlist_comp] ')' |
|
||||
'[' [testlist_comp] ']' |
|
||||
'{' [dictorsetmaker] '}' |
|
||||
|
@ -139,7 +139,8 @@ argument: ( test [comp_for] |
|
|||
'*' test )
|
||||
|
||||
comp_iter: comp_for | comp_if
|
||||
comp_for: [ASYNC] 'for' exprlist 'in' or_test [comp_iter]
|
||||
sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
|
||||
comp_for: ['async'] sync_comp_for
|
||||
comp_if: 'if' test_nocond [comp_iter]
|
||||
|
||||
# not used in grammar, but may appear in "node" passed from Parser to Compiler
|
||||
|
|
|
@ -81,8 +81,9 @@
|
|||
#define arglist 334
|
||||
#define argument 335
|
||||
#define comp_iter 336
|
||||
#define comp_for 337
|
||||
#define comp_if 338
|
||||
#define encoding_decl 339
|
||||
#define yield_expr 340
|
||||
#define yield_arg 341
|
||||
#define sync_comp_for 337
|
||||
#define comp_for 338
|
||||
#define comp_if 339
|
||||
#define encoding_decl 340
|
||||
#define yield_expr 341
|
||||
#define yield_arg 342
|
||||
|
|
120
Include/token.h
120
Include/token.h
|
@ -9,77 +9,75 @@ extern "C" {
|
|||
|
||||
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
|
||||
|
||||
#define ENDMARKER 0
|
||||
#define NAME 1
|
||||
#define NUMBER 2
|
||||
#define STRING 3
|
||||
#define NEWLINE 4
|
||||
#define INDENT 5
|
||||
#define DEDENT 6
|
||||
#define LPAR 7
|
||||
#define RPAR 8
|
||||
#define LSQB 9
|
||||
#define RSQB 10
|
||||
#define COLON 11
|
||||
#define COMMA 12
|
||||
#define SEMI 13
|
||||
#define PLUS 14
|
||||
#define MINUS 15
|
||||
#define STAR 16
|
||||
#define SLASH 17
|
||||
#define VBAR 18
|
||||
#define AMPER 19
|
||||
#define LESS 20
|
||||
#define GREATER 21
|
||||
#define EQUAL 22
|
||||
#define DOT 23
|
||||
#define PERCENT 24
|
||||
#define LBRACE 25
|
||||
#define RBRACE 26
|
||||
#define EQEQUAL 27
|
||||
#define NOTEQUAL 28
|
||||
#define LESSEQUAL 29
|
||||
#define GREATEREQUAL 30
|
||||
#define TILDE 31
|
||||
#define CIRCUMFLEX 32
|
||||
#define LEFTSHIFT 33
|
||||
#define RIGHTSHIFT 34
|
||||
#define DOUBLESTAR 35
|
||||
#define PLUSEQUAL 36
|
||||
#define MINEQUAL 37
|
||||
#define STAREQUAL 38
|
||||
#define SLASHEQUAL 39
|
||||
#define PERCENTEQUAL 40
|
||||
#define AMPEREQUAL 41
|
||||
#define VBAREQUAL 42
|
||||
#define CIRCUMFLEXEQUAL 43
|
||||
#define LEFTSHIFTEQUAL 44
|
||||
#define RIGHTSHIFTEQUAL 45
|
||||
#define DOUBLESTAREQUAL 46
|
||||
#define DOUBLESLASH 47
|
||||
#define ENDMARKER 0
|
||||
#define NAME 1
|
||||
#define NUMBER 2
|
||||
#define STRING 3
|
||||
#define NEWLINE 4
|
||||
#define INDENT 5
|
||||
#define DEDENT 6
|
||||
#define LPAR 7
|
||||
#define RPAR 8
|
||||
#define LSQB 9
|
||||
#define RSQB 10
|
||||
#define COLON 11
|
||||
#define COMMA 12
|
||||
#define SEMI 13
|
||||
#define PLUS 14
|
||||
#define MINUS 15
|
||||
#define STAR 16
|
||||
#define SLASH 17
|
||||
#define VBAR 18
|
||||
#define AMPER 19
|
||||
#define LESS 20
|
||||
#define GREATER 21
|
||||
#define EQUAL 22
|
||||
#define DOT 23
|
||||
#define PERCENT 24
|
||||
#define LBRACE 25
|
||||
#define RBRACE 26
|
||||
#define EQEQUAL 27
|
||||
#define NOTEQUAL 28
|
||||
#define LESSEQUAL 29
|
||||
#define GREATEREQUAL 30
|
||||
#define TILDE 31
|
||||
#define CIRCUMFLEX 32
|
||||
#define LEFTSHIFT 33
|
||||
#define RIGHTSHIFT 34
|
||||
#define DOUBLESTAR 35
|
||||
#define PLUSEQUAL 36
|
||||
#define MINEQUAL 37
|
||||
#define STAREQUAL 38
|
||||
#define SLASHEQUAL 39
|
||||
#define PERCENTEQUAL 40
|
||||
#define AMPEREQUAL 41
|
||||
#define VBAREQUAL 42
|
||||
#define CIRCUMFLEXEQUAL 43
|
||||
#define LEFTSHIFTEQUAL 44
|
||||
#define RIGHTSHIFTEQUAL 45
|
||||
#define DOUBLESTAREQUAL 46
|
||||
#define DOUBLESLASH 47
|
||||
#define DOUBLESLASHEQUAL 48
|
||||
#define AT 49
|
||||
#define ATEQUAL 50
|
||||
#define ATEQUAL 50
|
||||
#define RARROW 51
|
||||
#define ELLIPSIS 52
|
||||
/* Don't forget to update the table _PyParser_TokenNames in tokenizer.c! */
|
||||
#define OP 53
|
||||
#define AWAIT 54
|
||||
#define ASYNC 55
|
||||
#define ERRORTOKEN 56
|
||||
#define OP 53
|
||||
#define ERRORTOKEN 54
|
||||
/* These aren't used by the C tokenizer but are needed for tokenize.py */
|
||||
#define COMMENT 57
|
||||
#define NL 58
|
||||
#define ENCODING 59
|
||||
#define N_TOKENS 60
|
||||
#define COMMENT 55
|
||||
#define NL 56
|
||||
#define ENCODING 57
|
||||
#define N_TOKENS 58
|
||||
|
||||
/* Special definitions for cooperation with parser */
|
||||
|
||||
#define NT_OFFSET 256
|
||||
#define NT_OFFSET 256
|
||||
|
||||
#define ISTERMINAL(x) ((x) < NT_OFFSET)
|
||||
#define ISNONTERMINAL(x) ((x) >= NT_OFFSET)
|
||||
#define ISEOF(x) ((x) == ENDMARKER)
|
||||
#define ISTERMINAL(x) ((x) < NT_OFFSET)
|
||||
#define ISNONTERMINAL(x) ((x) >= NT_OFFSET)
|
||||
#define ISEOF(x) ((x) == ENDMARKER)
|
||||
|
||||
|
||||
PyAPI_DATA(const char *) _PyParser_TokenNames[]; /* Token names */
|
||||
|
|
|
@ -20,6 +20,8 @@ kwlist = [
|
|||
'and',
|
||||
'as',
|
||||
'assert',
|
||||
'async',
|
||||
'await',
|
||||
'break',
|
||||
'class',
|
||||
'continue',
|
||||
|
|
|
@ -34,7 +34,7 @@ eval_input: testlist NEWLINE* ENDMARKER
|
|||
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
|
||||
decorators: decorator+
|
||||
decorated: decorators (classdef | funcdef | async_funcdef)
|
||||
async_funcdef: ASYNC funcdef
|
||||
async_funcdef: 'async' funcdef
|
||||
funcdef: 'def' NAME parameters ['->' test] ':' suite
|
||||
parameters: '(' [typedargslist] ')'
|
||||
typedargslist: ((tfpdef ['=' test] ',')*
|
||||
|
@ -85,7 +85,7 @@ exec_stmt: 'exec' expr ['in' test [',' test]]
|
|||
assert_stmt: 'assert' test [',' test]
|
||||
|
||||
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
|
||||
async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
|
||||
async_stmt: 'async' (funcdef | with_stmt | for_stmt)
|
||||
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
|
||||
while_stmt: 'while' test ':' suite ['else' ':' suite]
|
||||
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
|
||||
|
@ -124,7 +124,7 @@ shift_expr: arith_expr (('<<'|'>>') arith_expr)*
|
|||
arith_expr: term (('+'|'-') term)*
|
||||
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
||||
factor: ('+'|'-'|'~') factor | power
|
||||
power: [AWAIT] atom trailer* ['**' factor]
|
||||
power: ['await'] atom trailer* ['**' factor]
|
||||
atom: ('(' [yield_expr|testlist_gexp] ')' |
|
||||
'[' [listmaker] ']' |
|
||||
'{' [dictsetmaker] '}' |
|
||||
|
@ -161,7 +161,7 @@ argument: ( test [comp_for] |
|
|||
star_expr )
|
||||
|
||||
comp_iter: comp_for | comp_if
|
||||
comp_for: [ASYNC] 'for' exprlist 'in' or_test [comp_iter]
|
||||
comp_for: ['async'] 'for' exprlist 'in' or_test [comp_iter]
|
||||
comp_if: 'if' old_test [comp_iter]
|
||||
|
||||
# As noted above, testlist_safe extends the syntax allowed in list
|
||||
|
@ -180,7 +180,7 @@ comp_if: 'if' old_test [comp_iter]
|
|||
#
|
||||
# See https://bugs.python.org/issue27494
|
||||
old_comp_iter: old_comp_for | old_comp_if
|
||||
old_comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [old_comp_iter]
|
||||
old_comp_for: ['async'] 'for' exprlist 'in' testlist_safe [old_comp_iter]
|
||||
old_comp_if: 'if' old_test [old_comp_iter]
|
||||
|
||||
testlist1: test (',' test)*
|
||||
|
|
|
@ -62,10 +62,8 @@ OP = 52
|
|||
COMMENT = 53
|
||||
NL = 54
|
||||
RARROW = 55
|
||||
AWAIT = 56
|
||||
ASYNC = 57
|
||||
ERRORTOKEN = 58
|
||||
N_TOKENS = 59
|
||||
ERRORTOKEN = 56
|
||||
N_TOKENS = 57
|
||||
NT_OFFSET = 256
|
||||
#--end constants--
|
||||
|
||||
|
|
|
@ -234,7 +234,7 @@ class Untokenizer:
|
|||
for tok in iterable:
|
||||
toknum, tokval = tok[:2]
|
||||
|
||||
if toknum in (NAME, NUMBER, ASYNC, AWAIT):
|
||||
if toknum in (NAME, NUMBER):
|
||||
tokval += ' '
|
||||
|
||||
if toknum == INDENT:
|
||||
|
@ -380,12 +380,6 @@ def generate_tokens(readline):
|
|||
contline = None
|
||||
indents = [0]
|
||||
|
||||
# 'stashed' and 'async_*' are used for async/await parsing
|
||||
stashed = None
|
||||
async_def = False
|
||||
async_def_indent = 0
|
||||
async_def_nl = False
|
||||
|
||||
while 1: # loop over lines in stream
|
||||
try:
|
||||
line = readline()
|
||||
|
@ -426,10 +420,6 @@ def generate_tokens(readline):
|
|||
pos = pos + 1
|
||||
if pos == max: break
|
||||
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
|
||||
if line[pos] in '#\r\n': # skip comments or blank lines
|
||||
if line[pos] == '#':
|
||||
comment_token = line[pos:].rstrip('\r\n')
|
||||
|
@ -453,18 +443,8 @@ def generate_tokens(readline):
|
|||
("<tokenize>", lnum, pos, line))
|
||||
indents = indents[:-1]
|
||||
|
||||
if async_def and async_def_indent >= indents[-1]:
|
||||
async_def = False
|
||||
async_def_nl = False
|
||||
async_def_indent = 0
|
||||
|
||||
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
|
||||
|
||||
if async_def and async_def_nl and async_def_indent >= indents[-1]:
|
||||
async_def = False
|
||||
async_def_nl = False
|
||||
async_def_indent = 0
|
||||
|
||||
else: # continued statement
|
||||
if not line:
|
||||
raise TokenError("EOF in multi-line statement", (lnum, 0))
|
||||
|
@ -484,18 +464,10 @@ def generate_tokens(readline):
|
|||
newline = NEWLINE
|
||||
if parenlev > 0:
|
||||
newline = NL
|
||||
elif async_def:
|
||||
async_def_nl = True
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield (newline, token, spos, epos, line)
|
||||
|
||||
elif initial == '#':
|
||||
assert not token.endswith("\n")
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield (COMMENT, token, spos, epos, line)
|
||||
elif token in triple_quoted:
|
||||
endprog = endprogs[token]
|
||||
|
@ -503,9 +475,6 @@ def generate_tokens(readline):
|
|||
if endmatch: # all on one line
|
||||
pos = endmatch.end(0)
|
||||
token = line[start:pos]
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield (STRING, token, spos, (lnum, pos), line)
|
||||
else:
|
||||
strstart = (lnum, start) # multiple lines
|
||||
|
@ -523,63 +492,22 @@ def generate_tokens(readline):
|
|||
contline = line
|
||||
break
|
||||
else: # ordinary string
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield (STRING, token, spos, epos, line)
|
||||
elif initial in namechars: # ordinary name
|
||||
if token in ('async', 'await'):
|
||||
if async_def:
|
||||
yield (ASYNC if token == 'async' else AWAIT,
|
||||
token, spos, epos, line)
|
||||
continue
|
||||
|
||||
tok = (NAME, token, spos, epos, line)
|
||||
if token == 'async' and not stashed:
|
||||
stashed = tok
|
||||
continue
|
||||
|
||||
if token == 'def':
|
||||
if (stashed
|
||||
and stashed[0] == NAME
|
||||
and stashed[1] == 'async'):
|
||||
|
||||
async_def = True
|
||||
async_def_indent = indents[-1]
|
||||
|
||||
yield (ASYNC, stashed[1],
|
||||
stashed[2], stashed[3],
|
||||
stashed[4])
|
||||
stashed = None
|
||||
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
|
||||
yield tok
|
||||
yield (NAME, token, spos, epos, line)
|
||||
elif initial == '\\': # continued stmt
|
||||
# This yield is new; needed for better idempotency:
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield (NL, token, spos, (lnum, pos), line)
|
||||
continued = 1
|
||||
else:
|
||||
if initial in '([{': parenlev = parenlev + 1
|
||||
elif initial in ')]}': parenlev = parenlev - 1
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield (OP, token, spos, epos, line)
|
||||
else:
|
||||
yield (ERRORTOKEN, line[pos],
|
||||
(lnum, pos), (lnum, pos+1), line)
|
||||
pos = pos + 1
|
||||
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
|
||||
for indent in indents[1:]: # pop remaining indent levels
|
||||
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
|
||||
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
|
||||
|
|
|
@ -167,34 +167,34 @@ class TestAsyncAwait(GrammarTest):
|
|||
async def foo(): await x
|
||||
""")
|
||||
|
||||
self.invalid_syntax("await x")
|
||||
self.invalid_syntax("""def foo():
|
||||
await x""")
|
||||
self.validate("await x")
|
||||
self.validate("""def foo():
|
||||
await x""")
|
||||
|
||||
self.invalid_syntax("""def foo():
|
||||
self.validate("""def foo():
|
||||
def foo(): pass
|
||||
async def foo(): pass
|
||||
await x
|
||||
""")
|
||||
|
||||
def test_async_var(self):
|
||||
self.validate("""async = 1""")
|
||||
self.validate("""await = 1""")
|
||||
self.validate("""def async(): pass""")
|
||||
self.invalid_syntax("""async = 1""")
|
||||
self.invalid_syntax("""await = 1""")
|
||||
self.invalid_syntax("""def async(): pass""")
|
||||
|
||||
def test_async_with(self):
|
||||
self.validate("""async def foo():
|
||||
async for a in b: pass""")
|
||||
|
||||
self.invalid_syntax("""def foo():
|
||||
async for a in b: pass""")
|
||||
self.validate("""def foo():
|
||||
async for a in b: pass""")
|
||||
|
||||
def test_async_for(self):
|
||||
self.validate("""async def foo():
|
||||
async with a: pass""")
|
||||
|
||||
self.invalid_syntax("""def foo():
|
||||
async with a: pass""")
|
||||
self.validate("""def foo():
|
||||
async with a: pass""")
|
||||
|
||||
|
||||
class TestRaiseChanges(GrammarTest):
|
||||
|
@ -477,3 +477,7 @@ def diff(fn, result):
|
|||
os.remove("@")
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -1703,7 +1703,7 @@ class Helper:
|
|||
# in pydoc_data/topics.py.
|
||||
#
|
||||
# CAUTION: if you change one of these dictionaries, be sure to adapt the
|
||||
# list of needed labels in Doc/tools/pyspecific.py and
|
||||
# list of needed labels in Doc/tools/extensions/pyspecific.py and
|
||||
# regenerate the pydoc_data/topics.py file by running
|
||||
# make pydoc-topics
|
||||
# in Doc/ and copying the output file into the Lib/ directory.
|
||||
|
@ -1715,6 +1715,8 @@ class Helper:
|
|||
'and': 'BOOLEAN',
|
||||
'as': 'with',
|
||||
'assert': ('assert', ''),
|
||||
'async': ('async', ''),
|
||||
'await': ('await', ''),
|
||||
'break': ('break', 'while for'),
|
||||
'class': ('class', 'CLASSES SPECIALMETHODS'),
|
||||
'continue': ('continue', 'while for'),
|
||||
|
|
|
@ -91,11 +91,12 @@ classdef = 333
|
|||
arglist = 334
|
||||
argument = 335
|
||||
comp_iter = 336
|
||||
comp_for = 337
|
||||
comp_if = 338
|
||||
encoding_decl = 339
|
||||
yield_expr = 340
|
||||
yield_arg = 341
|
||||
sync_comp_for = 337
|
||||
comp_for = 338
|
||||
comp_if = 339
|
||||
encoding_decl = 340
|
||||
yield_expr = 341
|
||||
yield_arg = 342
|
||||
#--end constants--
|
||||
|
||||
sym_name = {}
|
||||
|
|
|
@ -231,12 +231,6 @@ class BaseTaskTests:
|
|||
with self.assertRaises(TypeError):
|
||||
asyncio.ensure_future('ok')
|
||||
|
||||
def test_async_warning(self):
|
||||
f = self.new_future(self.loop)
|
||||
with self.assertWarnsRegex(DeprecationWarning,
|
||||
'function is deprecated, use ensure_'):
|
||||
self.assertIs(f, asyncio.async(f))
|
||||
|
||||
def test_get_stack(self):
|
||||
T = None
|
||||
|
||||
|
|
|
@ -394,20 +394,14 @@ class AsyncBadSyntaxTest(unittest.TestCase):
|
|||
]
|
||||
|
||||
for code in samples:
|
||||
with self.subTest(code=code), self.assertWarnsRegex(
|
||||
DeprecationWarning,
|
||||
"'await' will become reserved keywords"):
|
||||
with self.subTest(code=code), self.assertRaises(SyntaxError):
|
||||
compile(code, "<test>", "exec")
|
||||
|
||||
def test_badsyntax_3(self):
|
||||
with self.assertRaises(DeprecationWarning):
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error")
|
||||
compile("async = 1", "<test>", "exec")
|
||||
|
||||
def test_goodsyntax_1(self):
|
||||
# Tests for issue 24619
|
||||
with self.assertRaises(SyntaxError):
|
||||
compile("async = 1", "<test>", "exec")
|
||||
|
||||
def test_badsyntax_4(self):
|
||||
samples = [
|
||||
'''def foo(await):
|
||||
async def foo(): pass
|
||||
|
@ -454,14 +448,8 @@ class AsyncBadSyntaxTest(unittest.TestCase):
|
|||
]
|
||||
|
||||
for code in samples:
|
||||
with self.subTest(code=code):
|
||||
loc = {}
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore")
|
||||
exec(code, loc, loc)
|
||||
|
||||
self.assertEqual(loc['foo'](10), 11)
|
||||
with self.subTest(code=code), self.assertRaises(SyntaxError):
|
||||
compile(code, "<test>", "exec")
|
||||
|
||||
|
||||
class TokenizerRegrTest(unittest.TestCase):
|
||||
|
|
|
@ -679,16 +679,16 @@ class IllegalSyntaxTestCase(unittest.TestCase):
|
|||
def test_illegal_encoding(self):
|
||||
# Illegal encoding declaration
|
||||
tree = \
|
||||
(339,
|
||||
(340,
|
||||
(257, (0, '')))
|
||||
self.check_bad_tree(tree, "missed encoding")
|
||||
tree = \
|
||||
(339,
|
||||
(340,
|
||||
(257, (0, '')),
|
||||
b'iso-8859-1')
|
||||
self.check_bad_tree(tree, "non-string encoding")
|
||||
tree = \
|
||||
(339,
|
||||
(340,
|
||||
(257, (0, '')),
|
||||
'\udcff')
|
||||
with self.assertRaises(UnicodeEncodeError):
|
||||
|
|
|
@ -759,7 +759,7 @@ def"', """\
|
|||
""")
|
||||
|
||||
self.check_tokenize("async def foo(): pass", """\
|
||||
ASYNC 'async' (1, 0) (1, 5)
|
||||
NAME 'async' (1, 0) (1, 5)
|
||||
NAME 'def' (1, 6) (1, 9)
|
||||
NAME 'foo' (1, 10) (1, 13)
|
||||
OP '(' (1, 13) (1, 14)
|
||||
|
@ -776,7 +776,7 @@ async def foo():
|
|||
await
|
||||
async += 1
|
||||
''', """\
|
||||
ASYNC 'async' (1, 0) (1, 5)
|
||||
NAME 'async' (1, 0) (1, 5)
|
||||
NAME 'def' (1, 6) (1, 9)
|
||||
NAME 'foo' (1, 10) (1, 13)
|
||||
OP '(' (1, 13) (1, 14)
|
||||
|
@ -787,12 +787,12 @@ async += 1
|
|||
NAME 'def' (2, 2) (2, 5)
|
||||
NAME 'foo' (2, 6) (2, 9)
|
||||
OP '(' (2, 9) (2, 10)
|
||||
AWAIT 'await' (2, 10) (2, 15)
|
||||
NAME 'await' (2, 10) (2, 15)
|
||||
OP ')' (2, 15) (2, 16)
|
||||
OP ':' (2, 16) (2, 17)
|
||||
NEWLINE '\\n' (2, 17) (2, 18)
|
||||
INDENT ' ' (3, 0) (3, 4)
|
||||
AWAIT 'await' (3, 4) (3, 9)
|
||||
NAME 'await' (3, 4) (3, 9)
|
||||
OP '=' (3, 10) (3, 11)
|
||||
NUMBER '1' (3, 12) (3, 13)
|
||||
NEWLINE '\\n' (3, 13) (3, 14)
|
||||
|
@ -802,7 +802,7 @@ async += 1
|
|||
OP ':' (4, 6) (4, 7)
|
||||
NEWLINE '\\n' (4, 7) (4, 8)
|
||||
INDENT ' ' (5, 0) (5, 4)
|
||||
AWAIT 'await' (5, 4) (5, 9)
|
||||
NAME 'await' (5, 4) (5, 9)
|
||||
NEWLINE '\\n' (5, 9) (5, 10)
|
||||
DEDENT '' (6, 0) (6, 0)
|
||||
DEDENT '' (6, 0) (6, 0)
|
||||
|
@ -815,7 +815,7 @@ async += 1
|
|||
self.check_tokenize('''\
|
||||
async def foo():
|
||||
async for i in 1: pass''', """\
|
||||
ASYNC 'async' (1, 0) (1, 5)
|
||||
NAME 'async' (1, 0) (1, 5)
|
||||
NAME 'def' (1, 6) (1, 9)
|
||||
NAME 'foo' (1, 10) (1, 13)
|
||||
OP '(' (1, 13) (1, 14)
|
||||
|
@ -823,7 +823,7 @@ async def foo():
|
|||
OP ':' (1, 15) (1, 16)
|
||||
NEWLINE '\\n' (1, 16) (1, 17)
|
||||
INDENT ' ' (2, 0) (2, 2)
|
||||
ASYNC 'async' (2, 2) (2, 7)
|
||||
NAME 'async' (2, 2) (2, 7)
|
||||
NAME 'for' (2, 8) (2, 11)
|
||||
NAME 'i' (2, 12) (2, 13)
|
||||
NAME 'in' (2, 14) (2, 16)
|
||||
|
@ -834,14 +834,14 @@ async def foo():
|
|||
""")
|
||||
|
||||
self.check_tokenize('''async def foo(async): await''', """\
|
||||
ASYNC 'async' (1, 0) (1, 5)
|
||||
NAME 'async' (1, 0) (1, 5)
|
||||
NAME 'def' (1, 6) (1, 9)
|
||||
NAME 'foo' (1, 10) (1, 13)
|
||||
OP '(' (1, 13) (1, 14)
|
||||
ASYNC 'async' (1, 14) (1, 19)
|
||||
NAME 'async' (1, 14) (1, 19)
|
||||
OP ')' (1, 19) (1, 20)
|
||||
OP ':' (1, 20) (1, 21)
|
||||
AWAIT 'await' (1, 22) (1, 27)
|
||||
NAME 'await' (1, 22) (1, 27)
|
||||
""")
|
||||
|
||||
self.check_tokenize('''\
|
||||
|
@ -866,7 +866,7 @@ def f():
|
|||
OP ':' (3, 11) (3, 12)
|
||||
NAME 'pass' (3, 13) (3, 17)
|
||||
NEWLINE '\\n' (3, 17) (3, 18)
|
||||
ASYNC 'async' (4, 2) (4, 7)
|
||||
NAME 'async' (4, 2) (4, 7)
|
||||
NAME 'def' (4, 8) (4, 11)
|
||||
NAME 'bar' (4, 12) (4, 15)
|
||||
OP '(' (4, 15) (4, 16)
|
||||
|
@ -888,7 +888,7 @@ async def f():
|
|||
async def bar(): pass
|
||||
|
||||
await = 2''', """\
|
||||
ASYNC 'async' (1, 0) (1, 5)
|
||||
NAME 'async' (1, 0) (1, 5)
|
||||
NAME 'def' (1, 6) (1, 9)
|
||||
NAME 'f' (1, 10) (1, 11)
|
||||
OP '(' (1, 11) (1, 12)
|
||||
|
@ -904,7 +904,7 @@ async def f():
|
|||
OP ':' (3, 11) (3, 12)
|
||||
NAME 'pass' (3, 13) (3, 17)
|
||||
NEWLINE '\\n' (3, 17) (3, 18)
|
||||
ASYNC 'async' (4, 2) (4, 7)
|
||||
NAME 'async' (4, 2) (4, 7)
|
||||
NAME 'def' (4, 8) (4, 11)
|
||||
NAME 'bar' (4, 12) (4, 15)
|
||||
OP '(' (4, 15) (4, 16)
|
||||
|
@ -913,7 +913,7 @@ async def f():
|
|||
NAME 'pass' (4, 19) (4, 23)
|
||||
NEWLINE '\\n' (4, 23) (4, 24)
|
||||
NL '\\n' (5, 0) (5, 1)
|
||||
AWAIT 'await' (6, 2) (6, 7)
|
||||
NAME 'await' (6, 2) (6, 7)
|
||||
OP '=' (6, 8) (6, 9)
|
||||
NUMBER '2' (6, 10) (6, 11)
|
||||
DEDENT '' (7, 0) (7, 0)
|
||||
|
|
|
@ -491,12 +491,6 @@ def _tokenize(readline, encoding):
|
|||
contline = None
|
||||
indents = [0]
|
||||
|
||||
# 'stashed' and 'async_*' are used for async/await parsing
|
||||
stashed = None
|
||||
async_def = False
|
||||
async_def_indent = 0
|
||||
async_def_nl = False
|
||||
|
||||
if encoding is not None:
|
||||
if encoding == "utf-8-sig":
|
||||
# BOM will already have been stripped.
|
||||
|
@ -571,18 +565,8 @@ def _tokenize(readline, encoding):
|
|||
("<tokenize>", lnum, pos, line))
|
||||
indents = indents[:-1]
|
||||
|
||||
if async_def and async_def_indent >= indents[-1]:
|
||||
async_def = False
|
||||
async_def_nl = False
|
||||
async_def_indent = 0
|
||||
|
||||
yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
|
||||
|
||||
if async_def and async_def_nl and async_def_indent >= indents[-1]:
|
||||
async_def = False
|
||||
async_def_nl = False
|
||||
async_def_indent = 0
|
||||
|
||||
else: # continued statement
|
||||
if not line:
|
||||
raise TokenError("EOF in multi-line statement", (lnum, 0))
|
||||
|
@ -601,21 +585,13 @@ def _tokenize(readline, encoding):
|
|||
(initial == '.' and token != '.' and token != '...')):
|
||||
yield TokenInfo(NUMBER, token, spos, epos, line)
|
||||
elif initial in '\r\n':
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
if parenlev > 0:
|
||||
yield TokenInfo(NL, token, spos, epos, line)
|
||||
else:
|
||||
yield TokenInfo(NEWLINE, token, spos, epos, line)
|
||||
if async_def:
|
||||
async_def_nl = True
|
||||
|
||||
elif initial == '#':
|
||||
assert not token.endswith("\n")
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield TokenInfo(COMMENT, token, spos, epos, line)
|
||||
|
||||
elif token in triple_quoted:
|
||||
|
@ -662,36 +638,7 @@ def _tokenize(readline, encoding):
|
|||
yield TokenInfo(STRING, token, spos, epos, line)
|
||||
|
||||
elif initial.isidentifier(): # ordinary name
|
||||
if token in ('async', 'await'):
|
||||
if async_def:
|
||||
yield TokenInfo(
|
||||
ASYNC if token == 'async' else AWAIT,
|
||||
token, spos, epos, line)
|
||||
continue
|
||||
|
||||
tok = TokenInfo(NAME, token, spos, epos, line)
|
||||
if token == 'async' and not stashed:
|
||||
stashed = tok
|
||||
continue
|
||||
|
||||
if token == 'def':
|
||||
if (stashed
|
||||
and stashed.type == NAME
|
||||
and stashed.string == 'async'):
|
||||
|
||||
async_def = True
|
||||
async_def_indent = indents[-1]
|
||||
|
||||
yield TokenInfo(ASYNC, stashed.string,
|
||||
stashed.start, stashed.end,
|
||||
stashed.line)
|
||||
stashed = None
|
||||
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
|
||||
yield tok
|
||||
yield TokenInfo(NAME, token, spos, epos, line)
|
||||
elif initial == '\\': # continued stmt
|
||||
continued = 1
|
||||
else:
|
||||
|
@ -699,19 +646,12 @@ def _tokenize(readline, encoding):
|
|||
parenlev += 1
|
||||
elif initial in ')]}':
|
||||
parenlev -= 1
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
yield TokenInfo(OP, token, spos, epos, line)
|
||||
else:
|
||||
yield TokenInfo(ERRORTOKEN, line[pos],
|
||||
(lnum, pos), (lnum, pos+1), line)
|
||||
pos += 1
|
||||
|
||||
if stashed:
|
||||
yield stashed
|
||||
stashed = None
|
||||
|
||||
for indent in indents[1:]: # pop remaining indent levels
|
||||
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
|
||||
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Make ``async`` and ``await`` proper keywords, as specified in PEP 492.
|
|
@ -103,8 +103,6 @@ const char *_PyParser_TokenNames[] = {
|
|||
"ELLIPSIS",
|
||||
/* This table must match the #defines in token.h! */
|
||||
"OP",
|
||||
"AWAIT",
|
||||
"ASYNC",
|
||||
"<ERRORTOKEN>",
|
||||
"COMMENT",
|
||||
"NL",
|
||||
|
@ -151,10 +149,6 @@ tok_new(void)
|
|||
tok->decoding_buffer = NULL;
|
||||
#endif
|
||||
|
||||
tok->async_def = 0;
|
||||
tok->async_def_indent = 0;
|
||||
tok->async_def_nl = 0;
|
||||
|
||||
return tok;
|
||||
}
|
||||
|
||||
|
@ -1471,21 +1465,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
|
|||
}
|
||||
}
|
||||
|
||||
if (tok->async_def
|
||||
&& !blankline
|
||||
&& tok->level == 0
|
||||
/* There was a NEWLINE after ASYNC DEF,
|
||||
so we're past the signature. */
|
||||
&& tok->async_def_nl
|
||||
/* Current indentation level is less than where
|
||||
the async function was defined */
|
||||
&& tok->async_def_indent >= tok->indent)
|
||||
{
|
||||
tok->async_def = 0;
|
||||
tok->async_def_indent = 0;
|
||||
tok->async_def_nl = 0;
|
||||
}
|
||||
|
||||
again:
|
||||
tok->start = NULL;
|
||||
/* Skip spaces */
|
||||
|
@ -1550,43 +1529,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
|
|||
*p_start = tok->start;
|
||||
*p_end = tok->cur;
|
||||
|
||||
/* async/await parsing block. */
|
||||
if (tok->cur - tok->start == 5) {
|
||||
/* Current token length is 5. */
|
||||
if (tok->async_def) {
|
||||
/* We're inside an 'async def' function. */
|
||||
if (memcmp(tok->start, "async", 5) == 0) {
|
||||
return ASYNC;
|
||||
}
|
||||
if (memcmp(tok->start, "await", 5) == 0) {
|
||||
return AWAIT;
|
||||
}
|
||||
}
|
||||
else if (memcmp(tok->start, "async", 5) == 0) {
|
||||
/* The current token is 'async'.
|
||||
Look ahead one token.*/
|
||||
|
||||
struct tok_state ahead_tok;
|
||||
char *ahead_tok_start = NULL, *ahead_tok_end = NULL;
|
||||
int ahead_tok_kind;
|
||||
|
||||
memcpy(&ahead_tok, tok, sizeof(ahead_tok));
|
||||
ahead_tok_kind = tok_get(&ahead_tok, &ahead_tok_start,
|
||||
&ahead_tok_end);
|
||||
|
||||
if (ahead_tok_kind == NAME
|
||||
&& ahead_tok.cur - ahead_tok.start == 3
|
||||
&& memcmp(ahead_tok.start, "def", 3) == 0)
|
||||
{
|
||||
/* The next token is going to be 'def', so instead of
|
||||
returning 'async' NAME token, we return ASYNC. */
|
||||
tok->async_def_indent = tok->indent;
|
||||
tok->async_def = 1;
|
||||
return ASYNC;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return NAME;
|
||||
}
|
||||
|
||||
|
@ -1599,11 +1541,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
|
|||
*p_start = tok->start;
|
||||
*p_end = tok->cur - 1; /* Leave '\n' out of the string */
|
||||
tok->cont_line = 0;
|
||||
if (tok->async_def) {
|
||||
/* We're somewhere inside an 'async def' function, and
|
||||
we've encountered a NEWLINE after its signature. */
|
||||
tok->async_def_nl = 1;
|
||||
}
|
||||
return NEWLINE;
|
||||
}
|
||||
|
||||
|
|
|
@ -65,13 +65,6 @@ struct tok_state {
|
|||
const char* enc; /* Encoding for the current str. */
|
||||
const char* str;
|
||||
const char* input; /* Tokenizer's newline translated copy of the string. */
|
||||
|
||||
/* async/await related fields; can be removed in 3.7 when async and await
|
||||
become normal keywords. */
|
||||
int async_def; /* =1 if tokens are inside an 'async def' body. */
|
||||
int async_def_indent; /* Indentation level of the outermost 'async def'. */
|
||||
int async_def_nl; /* =1 if the outermost 'async def' had at least one
|
||||
NEWLINE token after it. */
|
||||
};
|
||||
|
||||
extern struct tok_state *PyTokenizer_FromString(const char *, int);
|
||||
|
|
75
Python/ast.c
75
Python/ast.c
|
@ -949,28 +949,6 @@ forbidden_name(struct compiling *c, identifier name, const node *n,
|
|||
ast_error(c, n, "assignment to keyword");
|
||||
return 1;
|
||||
}
|
||||
if (_PyUnicode_EqualToASCIIString(name, "async") ||
|
||||
_PyUnicode_EqualToASCIIString(name, "await"))
|
||||
{
|
||||
PyObject *message = PyUnicode_FromString(
|
||||
"'async' and 'await' will become reserved keywords"
|
||||
" in Python 3.7");
|
||||
int ret;
|
||||
if (message == NULL) {
|
||||
return 1;
|
||||
}
|
||||
ret = PyErr_WarnExplicitObject(
|
||||
PyExc_DeprecationWarning,
|
||||
message,
|
||||
c->c_filename,
|
||||
LINENO(n),
|
||||
NULL,
|
||||
NULL);
|
||||
Py_DECREF(message);
|
||||
if (ret < 0) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
if (full_checks) {
|
||||
const char * const *p;
|
||||
for (p = FORBIDDEN; *p; p++) {
|
||||
|
@ -1642,9 +1620,10 @@ ast_for_funcdef_impl(struct compiling *c, const node *n,
|
|||
static stmt_ty
|
||||
ast_for_async_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
|
||||
{
|
||||
/* async_funcdef: ASYNC funcdef */
|
||||
/* async_funcdef: 'async' funcdef */
|
||||
REQ(n, async_funcdef);
|
||||
REQ(CHILD(n, 0), ASYNC);
|
||||
REQ(CHILD(n, 0), NAME);
|
||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
||||
REQ(CHILD(n, 1), funcdef);
|
||||
|
||||
return ast_for_funcdef_impl(c, CHILD(n, 1), decorator_seq,
|
||||
|
@ -1663,9 +1642,10 @@ ast_for_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
|
|||
static stmt_ty
|
||||
ast_for_async_stmt(struct compiling *c, const node *n)
|
||||
{
|
||||
/* async_stmt: ASYNC (funcdef | with_stmt | for_stmt) */
|
||||
/* async_stmt: 'async' (funcdef | with_stmt | for_stmt) */
|
||||
REQ(n, async_stmt);
|
||||
REQ(CHILD(n, 0), ASYNC);
|
||||
REQ(CHILD(n, 0), NAME);
|
||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
||||
|
||||
switch (TYPE(CHILD(n, 1))) {
|
||||
case funcdef:
|
||||
|
@ -1778,17 +1758,23 @@ static int
|
|||
count_comp_fors(struct compiling *c, const node *n)
|
||||
{
|
||||
int n_fors = 0;
|
||||
int is_async;
|
||||
|
||||
count_comp_for:
|
||||
is_async = 0;
|
||||
n_fors++;
|
||||
REQ(n, comp_for);
|
||||
if (TYPE(CHILD(n, 0)) == ASYNC) {
|
||||
is_async = 1;
|
||||
if (NCH(n) == 2) {
|
||||
REQ(CHILD(n, 0), NAME);
|
||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
||||
n = CHILD(n, 1);
|
||||
}
|
||||
if (NCH(n) == (5 + is_async)) {
|
||||
n = CHILD(n, 4 + is_async);
|
||||
else if (NCH(n) == 1) {
|
||||
n = CHILD(n, 0);
|
||||
}
|
||||
else {
|
||||
goto error;
|
||||
}
|
||||
if (NCH(n) == (5)) {
|
||||
n = CHILD(n, 4);
|
||||
}
|
||||
else {
|
||||
return n_fors;
|
||||
|
@ -1807,6 +1793,7 @@ count_comp_fors(struct compiling *c, const node *n)
|
|||
return n_fors;
|
||||
}
|
||||
|
||||
error:
|
||||
/* Should never be reached */
|
||||
PyErr_SetString(PyExc_SystemError,
|
||||
"logic error in count_comp_fors");
|
||||
|
@ -1855,19 +1842,27 @@ ast_for_comprehension(struct compiling *c, const node *n)
|
|||
asdl_seq *t;
|
||||
expr_ty expression, first;
|
||||
node *for_ch;
|
||||
node *sync_n;
|
||||
int is_async = 0;
|
||||
|
||||
REQ(n, comp_for);
|
||||
|
||||
if (TYPE(CHILD(n, 0)) == ASYNC) {
|
||||
if (NCH(n) == 2) {
|
||||
is_async = 1;
|
||||
REQ(CHILD(n, 0), NAME);
|
||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
||||
sync_n = CHILD(n, 1);
|
||||
}
|
||||
else {
|
||||
sync_n = CHILD(n, 0);
|
||||
}
|
||||
REQ(sync_n, sync_comp_for);
|
||||
|
||||
for_ch = CHILD(n, 1 + is_async);
|
||||
for_ch = CHILD(sync_n, 1);
|
||||
t = ast_for_exprlist(c, for_ch, Store);
|
||||
if (!t)
|
||||
return NULL;
|
||||
expression = ast_for_expr(c, CHILD(n, 3 + is_async));
|
||||
expression = ast_for_expr(c, CHILD(sync_n, 3));
|
||||
if (!expression)
|
||||
return NULL;
|
||||
|
||||
|
@ -1884,11 +1879,11 @@ ast_for_comprehension(struct compiling *c, const node *n)
|
|||
if (!comp)
|
||||
return NULL;
|
||||
|
||||
if (NCH(n) == (5 + is_async)) {
|
||||
if (NCH(sync_n) == 5) {
|
||||
int j, n_ifs;
|
||||
asdl_seq *ifs;
|
||||
|
||||
n = CHILD(n, 4 + is_async);
|
||||
n = CHILD(sync_n, 4);
|
||||
n_ifs = count_comp_ifs(c, n);
|
||||
if (n_ifs == -1)
|
||||
return NULL;
|
||||
|
@ -2470,7 +2465,7 @@ ast_for_atom_expr(struct compiling *c, const node *n)
|
|||
REQ(n, atom_expr);
|
||||
nch = NCH(n);
|
||||
|
||||
if (TYPE(CHILD(n, 0)) == AWAIT) {
|
||||
if (TYPE(CHILD(n, 0)) == NAME && strcmp(STR(CHILD(n, 0)), "await") == 0) {
|
||||
start = 1;
|
||||
assert(nch > 1);
|
||||
}
|
||||
|
@ -2497,7 +2492,7 @@ ast_for_atom_expr(struct compiling *c, const node *n)
|
|||
}
|
||||
|
||||
if (start) {
|
||||
/* there was an AWAIT */
|
||||
/* there was an 'await' */
|
||||
return Await(e, LINENO(n), n->n_col_offset, c->c_arena);
|
||||
}
|
||||
else {
|
||||
|
@ -2562,7 +2557,7 @@ ast_for_expr(struct compiling *c, const node *n)
|
|||
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
||||
factor: ('+'|'-'|'~') factor | power
|
||||
power: atom_expr ['**' factor]
|
||||
atom_expr: [AWAIT] atom trailer*
|
||||
atom_expr: ['await'] atom trailer*
|
||||
yield_expr: 'yield' [yield_arg]
|
||||
*/
|
||||
|
||||
|
|
|
@ -1812,272 +1812,284 @@ static state states_80[2] = {
|
|||
{2, arcs_80_0},
|
||||
{1, arcs_80_1},
|
||||
};
|
||||
static arc arcs_81_0[2] = {
|
||||
{21, 1},
|
||||
{101, 2},
|
||||
static arc arcs_81_0[1] = {
|
||||
{101, 1},
|
||||
};
|
||||
static arc arcs_81_1[1] = {
|
||||
{101, 2},
|
||||
{66, 2},
|
||||
};
|
||||
static arc arcs_81_2[1] = {
|
||||
{66, 3},
|
||||
{102, 3},
|
||||
};
|
||||
static arc arcs_81_3[1] = {
|
||||
{102, 4},
|
||||
{112, 4},
|
||||
};
|
||||
static arc arcs_81_4[1] = {
|
||||
{112, 5},
|
||||
static arc arcs_81_4[2] = {
|
||||
{171, 5},
|
||||
{0, 4},
|
||||
};
|
||||
static arc arcs_81_5[2] = {
|
||||
{171, 6},
|
||||
static arc arcs_81_5[1] = {
|
||||
{0, 5},
|
||||
};
|
||||
static arc arcs_81_6[1] = {
|
||||
{0, 6},
|
||||
};
|
||||
static state states_81[7] = {
|
||||
{2, arcs_81_0},
|
||||
static state states_81[6] = {
|
||||
{1, arcs_81_0},
|
||||
{1, arcs_81_1},
|
||||
{1, arcs_81_2},
|
||||
{1, arcs_81_3},
|
||||
{1, arcs_81_4},
|
||||
{2, arcs_81_5},
|
||||
{1, arcs_81_6},
|
||||
{2, arcs_81_4},
|
||||
{1, arcs_81_5},
|
||||
};
|
||||
static arc arcs_82_0[1] = {
|
||||
{97, 1},
|
||||
static arc arcs_82_0[2] = {
|
||||
{21, 1},
|
||||
{173, 2},
|
||||
};
|
||||
static arc arcs_82_1[1] = {
|
||||
{173, 2},
|
||||
};
|
||||
static arc arcs_82_2[1] = {
|
||||
{0, 2},
|
||||
};
|
||||
static state states_82[3] = {
|
||||
{2, arcs_82_0},
|
||||
{1, arcs_82_1},
|
||||
{1, arcs_82_2},
|
||||
};
|
||||
static arc arcs_83_0[1] = {
|
||||
{97, 1},
|
||||
};
|
||||
static arc arcs_83_1[1] = {
|
||||
{114, 2},
|
||||
};
|
||||
static arc arcs_82_2[2] = {
|
||||
static arc arcs_83_2[2] = {
|
||||
{171, 3},
|
||||
{0, 2},
|
||||
};
|
||||
static arc arcs_82_3[1] = {
|
||||
static arc arcs_83_3[1] = {
|
||||
{0, 3},
|
||||
};
|
||||
static state states_82[4] = {
|
||||
{1, arcs_82_0},
|
||||
{1, arcs_82_1},
|
||||
{2, arcs_82_2},
|
||||
{1, arcs_82_3},
|
||||
};
|
||||
static arc arcs_83_0[1] = {
|
||||
{23, 1},
|
||||
};
|
||||
static arc arcs_83_1[1] = {
|
||||
{0, 1},
|
||||
};
|
||||
static state states_83[2] = {
|
||||
static state states_83[4] = {
|
||||
{1, arcs_83_0},
|
||||
{1, arcs_83_1},
|
||||
{2, arcs_83_2},
|
||||
{1, arcs_83_3},
|
||||
};
|
||||
static arc arcs_84_0[1] = {
|
||||
{174, 1},
|
||||
{23, 1},
|
||||
};
|
||||
static arc arcs_84_1[2] = {
|
||||
{175, 2},
|
||||
static arc arcs_84_1[1] = {
|
||||
{0, 1},
|
||||
};
|
||||
static arc arcs_84_2[1] = {
|
||||
{0, 2},
|
||||
};
|
||||
static state states_84[3] = {
|
||||
static state states_84[2] = {
|
||||
{1, arcs_84_0},
|
||||
{2, arcs_84_1},
|
||||
{1, arcs_84_2},
|
||||
{1, arcs_84_1},
|
||||
};
|
||||
static arc arcs_85_0[2] = {
|
||||
{77, 1},
|
||||
{9, 2},
|
||||
static arc arcs_85_0[1] = {
|
||||
{175, 1},
|
||||
};
|
||||
static arc arcs_85_1[1] = {
|
||||
{26, 2},
|
||||
static arc arcs_85_1[2] = {
|
||||
{176, 2},
|
||||
{0, 1},
|
||||
};
|
||||
static arc arcs_85_2[1] = {
|
||||
{0, 2},
|
||||
};
|
||||
static state states_85[3] = {
|
||||
{2, arcs_85_0},
|
||||
{1, arcs_85_1},
|
||||
{1, arcs_85_0},
|
||||
{2, arcs_85_1},
|
||||
{1, arcs_85_2},
|
||||
};
|
||||
static dfa dfas[86] = {
|
||||
{256, "single_input", 0, 3, states_0,
|
||||
"\004\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\102"},
|
||||
{257, "file_input", 0, 2, states_1,
|
||||
"\204\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\102"},
|
||||
{258, "eval_input", 0, 3, states_2,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{259, "decorator", 0, 7, states_3,
|
||||
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{260, "decorators", 0, 2, states_4,
|
||||
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{261, "decorated", 0, 3, states_5,
|
||||
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{262, "async_funcdef", 0, 3, states_6,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{263, "funcdef", 0, 8, states_7,
|
||||
"\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{264, "parameters", 0, 4, states_8,
|
||||
"\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{265, "typedargslist", 0, 19, states_9,
|
||||
"\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{266, "tfpdef", 0, 4, states_10,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{267, "varargslist", 0, 19, states_11,
|
||||
"\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{268, "vfpdef", 0, 2, states_12,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{269, "stmt", 0, 2, states_13,
|
||||
"\000\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\102"},
|
||||
{270, "simple_stmt", 0, 4, states_14,
|
||||
"\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\100"},
|
||||
{271, "small_stmt", 0, 2, states_15,
|
||||
"\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\100"},
|
||||
{272, "expr_stmt", 0, 6, states_16,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{273, "annassign", 0, 5, states_17,
|
||||
"\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{274, "testlist_star_expr", 0, 3, states_18,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{275, "augassign", 0, 2, states_19,
|
||||
"\000\000\000\000\000\000\360\377\001\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{276, "del_stmt", 0, 3, states_20,
|
||||
"\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{277, "pass_stmt", 0, 2, states_21,
|
||||
"\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{278, "flow_stmt", 0, 2, states_22,
|
||||
"\000\000\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000\000\000\000\000\100"},
|
||||
{279, "break_stmt", 0, 2, states_23,
|
||||
"\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{280, "continue_stmt", 0, 2, states_24,
|
||||
"\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{281, "return_stmt", 0, 3, states_25,
|
||||
"\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{282, "yield_stmt", 0, 2, states_26,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
|
||||
{283, "raise_stmt", 0, 5, states_27,
|
||||
"\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{284, "import_stmt", 0, 2, states_28,
|
||||
"\000\000\000\000\000\000\000\000\000\040\001\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{285, "import_name", 0, 3, states_29,
|
||||
"\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{286, "import_from", 0, 8, states_30,
|
||||
"\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{287, "import_as_name", 0, 4, states_31,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{288, "dotted_as_name", 0, 4, states_32,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{289, "import_as_names", 0, 3, states_33,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{290, "dotted_as_names", 0, 2, states_34,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{291, "dotted_name", 0, 2, states_35,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{292, "global_stmt", 0, 3, states_36,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000"},
|
||||
{293, "nonlocal_stmt", 0, 3, states_37,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000"},
|
||||
{294, "assert_stmt", 0, 5, states_38,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000"},
|
||||
{295, "compound_stmt", 0, 2, states_39,
|
||||
"\000\010\140\000\000\000\000\000\000\000\000\000\262\004\000\000\000\000\000\000\000\002"},
|
||||
{296, "async_stmt", 0, 3, states_40,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{297, "if_stmt", 0, 8, states_41,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000"},
|
||||
{298, "while_stmt", 0, 8, states_42,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000"},
|
||||
{299, "for_stmt", 0, 10, states_43,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000"},
|
||||
{300, "try_stmt", 0, 13, states_44,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000"},
|
||||
{301, "with_stmt", 0, 5, states_45,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000"},
|
||||
{302, "with_item", 0, 4, states_46,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{303, "except_clause", 0, 5, states_47,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000"},
|
||||
{304, "suite", 0, 5, states_48,
|
||||
"\004\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\100"},
|
||||
{305, "test", 0, 6, states_49,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{306, "test_nocond", 0, 2, states_50,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{307, "lambdef", 0, 5, states_51,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"},
|
||||
{308, "lambdef_nocond", 0, 5, states_52,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"},
|
||||
{309, "or_test", 0, 2, states_53,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000"},
|
||||
{310, "and_test", 0, 2, states_54,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000"},
|
||||
{311, "not_test", 0, 3, states_55,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000"},
|
||||
{312, "comparison", 0, 2, states_56,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{313, "comp_op", 0, 4, states_57,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\362\017\000\000\000\000\000"},
|
||||
{314, "star_expr", 0, 3, states_58,
|
||||
"\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{315, "expr", 0, 2, states_59,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{316, "xor_expr", 0, 2, states_60,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{317, "and_expr", 0, 2, states_61,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{318, "shift_expr", 0, 2, states_62,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{319, "arith_expr", 0, 2, states_63,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{320, "term", 0, 2, states_64,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{321, "factor", 0, 3, states_65,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{322, "power", 0, 4, states_66,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000"},
|
||||
{323, "atom_expr", 0, 3, states_67,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000"},
|
||||
{324, "atom", 0, 9, states_68,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\050\037\000"},
|
||||
{325, "testlist_comp", 0, 5, states_69,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{326, "trailer", 0, 7, states_70,
|
||||
"\000\040\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\010\000\000"},
|
||||
{327, "subscriptlist", 0, 3, states_71,
|
||||
"\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{328, "subscript", 0, 5, states_72,
|
||||
"\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{329, "sliceop", 0, 3, states_73,
|
||||
"\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{330, "exprlist", 0, 3, states_74,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000"},
|
||||
{331, "testlist", 0, 3, states_75,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{332, "dictorsetmaker", 0, 14, states_76,
|
||||
"\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{333, "classdef", 0, 8, states_77,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"},
|
||||
{334, "arglist", 0, 3, states_78,
|
||||
"\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{335, "argument", 0, 4, states_79,
|
||||
"\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
{336, "comp_iter", 0, 2, states_80,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\042\000\000\000\000\000\000\000\000\000"},
|
||||
{337, "comp_for", 0, 7, states_81,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000"},
|
||||
{338, "comp_if", 0, 4, states_82,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000"},
|
||||
{339, "encoding_decl", 0, 2, states_83,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{340, "yield_expr", 0, 3, states_84,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"},
|
||||
{341, "yield_arg", 0, 3, states_85,
|
||||
"\000\040\200\000\000\000\000\000\000\040\010\000\000\000\020\002\000\300\220\050\037\000"},
|
||||
static arc arcs_86_0[2] = {
|
||||
{77, 1},
|
||||
{9, 2},
|
||||
};
|
||||
static label labels[176] = {
|
||||
static arc arcs_86_1[1] = {
|
||||
{26, 2},
|
||||
};
|
||||
static arc arcs_86_2[1] = {
|
||||
{0, 2},
|
||||
};
|
||||
static state states_86[3] = {
|
||||
{2, arcs_86_0},
|
||||
{1, arcs_86_1},
|
||||
{1, arcs_86_2},
|
||||
};
|
||||
static dfa dfas[87] = {
|
||||
{256, "single_input", 0, 3, states_0,
|
||||
"\004\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\202\000"},
|
||||
{257, "file_input", 0, 2, states_1,
|
||||
"\204\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\202\000"},
|
||||
{258, "eval_input", 0, 3, states_2,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{259, "decorator", 0, 7, states_3,
|
||||
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{260, "decorators", 0, 2, states_4,
|
||||
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{261, "decorated", 0, 3, states_5,
|
||||
"\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{262, "async_funcdef", 0, 3, states_6,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{263, "funcdef", 0, 8, states_7,
|
||||
"\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{264, "parameters", 0, 4, states_8,
|
||||
"\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{265, "typedargslist", 0, 19, states_9,
|
||||
"\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{266, "tfpdef", 0, 4, states_10,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{267, "varargslist", 0, 19, states_11,
|
||||
"\000\000\200\000\006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{268, "vfpdef", 0, 2, states_12,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{269, "stmt", 0, 2, states_13,
|
||||
"\000\050\340\000\002\000\000\000\012\076\011\007\262\004\020\002\000\300\220\050\037\202\000"},
|
||||
{270, "simple_stmt", 0, 4, states_14,
|
||||
"\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\200\000"},
|
||||
{271, "small_stmt", 0, 2, states_15,
|
||||
"\000\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\200\000"},
|
||||
{272, "expr_stmt", 0, 6, states_16,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{273, "annassign", 0, 5, states_17,
|
||||
"\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{274, "testlist_star_expr", 0, 3, states_18,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{275, "augassign", 0, 2, states_19,
|
||||
"\000\000\000\000\000\000\360\377\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{276, "del_stmt", 0, 3, states_20,
|
||||
"\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{277, "pass_stmt", 0, 2, states_21,
|
||||
"\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{278, "flow_stmt", 0, 2, states_22,
|
||||
"\000\000\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000\000\000\000\000\200\000"},
|
||||
{279, "break_stmt", 0, 2, states_23,
|
||||
"\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{280, "continue_stmt", 0, 2, states_24,
|
||||
"\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{281, "return_stmt", 0, 3, states_25,
|
||||
"\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{282, "yield_stmt", 0, 2, states_26,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"},
|
||||
{283, "raise_stmt", 0, 5, states_27,
|
||||
"\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{284, "import_stmt", 0, 2, states_28,
|
||||
"\000\000\000\000\000\000\000\000\000\040\001\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{285, "import_name", 0, 3, states_29,
|
||||
"\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{286, "import_from", 0, 8, states_30,
|
||||
"\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{287, "import_as_name", 0, 4, states_31,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{288, "dotted_as_name", 0, 4, states_32,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{289, "import_as_names", 0, 3, states_33,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{290, "dotted_as_names", 0, 2, states_34,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{291, "dotted_name", 0, 2, states_35,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{292, "global_stmt", 0, 3, states_36,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{293, "nonlocal_stmt", 0, 3, states_37,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{294, "assert_stmt", 0, 5, states_38,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{295, "compound_stmt", 0, 2, states_39,
|
||||
"\000\010\140\000\000\000\000\000\000\000\000\000\262\004\000\000\000\000\000\000\000\002\000"},
|
||||
{296, "async_stmt", 0, 3, states_40,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{297, "if_stmt", 0, 8, states_41,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000"},
|
||||
{298, "while_stmt", 0, 8, states_42,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
|
||||
{299, "for_stmt", 0, 10, states_43,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000"},
|
||||
{300, "try_stmt", 0, 13, states_44,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000"},
|
||||
{301, "with_stmt", 0, 5, states_45,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000"},
|
||||
{302, "with_item", 0, 4, states_46,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{303, "except_clause", 0, 5, states_47,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000"},
|
||||
{304, "suite", 0, 5, states_48,
|
||||
"\004\040\200\000\002\000\000\000\012\076\011\007\000\000\020\002\000\300\220\050\037\200\000"},
|
||||
{305, "test", 0, 6, states_49,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{306, "test_nocond", 0, 2, states_50,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{307, "lambdef", 0, 5, states_51,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000"},
|
||||
{308, "lambdef_nocond", 0, 5, states_52,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000"},
|
||||
{309, "or_test", 0, 2, states_53,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000\000"},
|
||||
{310, "and_test", 0, 2, states_54,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000\000"},
|
||||
{311, "not_test", 0, 3, states_55,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\002\000\300\220\050\037\000\000"},
|
||||
{312, "comparison", 0, 2, states_56,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{313, "comp_op", 0, 4, states_57,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\362\017\000\000\000\000\000\000"},
|
||||
{314, "star_expr", 0, 3, states_58,
|
||||
"\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{315, "expr", 0, 2, states_59,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{316, "xor_expr", 0, 2, states_60,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{317, "and_expr", 0, 2, states_61,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{318, "shift_expr", 0, 2, states_62,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{319, "arith_expr", 0, 2, states_63,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{320, "term", 0, 2, states_64,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{321, "factor", 0, 3, states_65,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{322, "power", 0, 4, states_66,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000\000"},
|
||||
{323, "atom_expr", 0, 3, states_67,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\200\050\037\000\000"},
|
||||
{324, "atom", 0, 9, states_68,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\050\037\000\000"},
|
||||
{325, "testlist_comp", 0, 5, states_69,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{326, "trailer", 0, 7, states_70,
|
||||
"\000\040\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\010\000\000\000"},
|
||||
{327, "subscriptlist", 0, 3, states_71,
|
||||
"\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{328, "subscript", 0, 5, states_72,
|
||||
"\000\040\200\010\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{329, "sliceop", 0, 3, states_73,
|
||||
"\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{330, "exprlist", 0, 3, states_74,
|
||||
"\000\040\200\000\002\000\000\000\000\000\010\000\000\000\000\000\000\300\220\050\037\000\000"},
|
||||
{331, "testlist", 0, 3, states_75,
|
||||
"\000\040\200\000\000\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{332, "dictorsetmaker", 0, 14, states_76,
|
||||
"\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{333, "classdef", 0, 8, states_77,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000"},
|
||||
{334, "arglist", 0, 3, states_78,
|
||||
"\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{335, "argument", 0, 4, states_79,
|
||||
"\000\040\200\000\006\000\000\000\000\000\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
{336, "comp_iter", 0, 2, states_80,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\042\000\000\000\000\000\000\000\000\000\000"},
|
||||
{337, "sync_comp_for", 0, 6, states_81,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000"},
|
||||
{338, "comp_for", 0, 3, states_82,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000"},
|
||||
{339, "comp_if", 0, 4, states_83,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000"},
|
||||
{340, "encoding_decl", 0, 2, states_84,
|
||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{341, "yield_expr", 0, 3, states_85,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"},
|
||||
{342, "yield_arg", 0, 3, states_86,
|
||||
"\000\040\200\000\000\000\000\000\000\040\010\000\000\000\020\002\000\300\220\050\037\000\000"},
|
||||
};
|
||||
static label labels[177] = {
|
||||
{0, "EMPTY"},
|
||||
{256, 0},
|
||||
{4, 0},
|
||||
|
@ -2099,7 +2111,7 @@ static label labels[176] = {
|
|||
{333, 0},
|
||||
{263, 0},
|
||||
{262, 0},
|
||||
{55, 0},
|
||||
{1, "async"},
|
||||
{1, "def"},
|
||||
{1, 0},
|
||||
{264, 0},
|
||||
|
@ -2128,7 +2140,7 @@ static label labels[176] = {
|
|||
{274, 0},
|
||||
{273, 0},
|
||||
{275, 0},
|
||||
{340, 0},
|
||||
{341, 0},
|
||||
{314, 0},
|
||||
{36, 0},
|
||||
{37, 0},
|
||||
|
@ -2229,7 +2241,7 @@ static label labels[176] = {
|
|||
{31, 0},
|
||||
{322, 0},
|
||||
{323, 0},
|
||||
{54, 0},
|
||||
{1, "await"},
|
||||
{324, 0},
|
||||
{326, 0},
|
||||
{325, 0},
|
||||
|
@ -2243,21 +2255,22 @@ static label labels[176] = {
|
|||
{1, "None"},
|
||||
{1, "True"},
|
||||
{1, "False"},
|
||||
{337, 0},
|
||||
{338, 0},
|
||||
{327, 0},
|
||||
{328, 0},
|
||||
{329, 0},
|
||||
{1, "class"},
|
||||
{335, 0},
|
||||
{336, 0},
|
||||
{338, 0},
|
||||
{339, 0},
|
||||
{337, 0},
|
||||
{340, 0},
|
||||
{1, "yield"},
|
||||
{341, 0},
|
||||
{342, 0},
|
||||
};
|
||||
grammar _PyParser_Grammar = {
|
||||
86,
|
||||
87,
|
||||
dfas,
|
||||
{176, labels},
|
||||
{177, labels},
|
||||
256
|
||||
};
|
||||
|
|
Loading…
Reference in New Issue