bpo-35975: Support parsing earlier minor versions of Python 3 (GH-12086)
This adds a `feature_version` flag to `ast.parse()` (documented) and `compile()` (hidden) that allow tweaking the parser to support older versions of the grammar. In particular if `feature_version` is 5 or 6, the hacks for the `async` and `await` keyword from PEP 492 are reinstated. (For 7 or higher, these are unconditionally treated as keywords, but they are still special tokens rather than `NAME` tokens that the parser driver recognizes.) https://bugs.python.org/issue35975
This commit is contained in:
parent
bf94cc7b49
commit
495da29225
|
@ -126,7 +126,7 @@ The abstract grammar is currently defined as follows:
|
||||||
Apart from the node classes, the :mod:`ast` module defines these utility functions
|
Apart from the node classes, the :mod:`ast` module defines these utility functions
|
||||||
and classes for traversing abstract syntax trees:
|
and classes for traversing abstract syntax trees:
|
||||||
|
|
||||||
.. function:: parse(source, filename='<unknown>', mode='exec', *, type_comments=False)
|
.. function:: parse(source, filename='<unknown>', mode='exec', *, type_comments=False, feature_version=-1)
|
||||||
|
|
||||||
Parse the source into an AST node. Equivalent to ``compile(source,
|
Parse the source into an AST node. Equivalent to ``compile(source,
|
||||||
filename, mode, ast.PyCF_ONLY_AST)``.
|
filename, mode, ast.PyCF_ONLY_AST)``.
|
||||||
|
@ -145,13 +145,19 @@ and classes for traversing abstract syntax trees:
|
||||||
modified to correspond to :pep:`484` "signature type comments",
|
modified to correspond to :pep:`484` "signature type comments",
|
||||||
e.g. ``(str, int) -> List[str]``.
|
e.g. ``(str, int) -> List[str]``.
|
||||||
|
|
||||||
|
Also, setting ``feature_version`` to the minor version of an
|
||||||
|
earlier Python 3 version will attempt to parse using that version's
|
||||||
|
grammar. For example, setting ``feature_version=4`` will allow
|
||||||
|
the use of ``async`` and ``await`` as variable names. The lowest
|
||||||
|
supported value is 4; the highest is ``sys.version_info[1]``.
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
It is possible to crash the Python interpreter with a
|
It is possible to crash the Python interpreter with a
|
||||||
sufficiently large/complex string due to stack depth limitations
|
sufficiently large/complex string due to stack depth limitations
|
||||||
in Python's AST compiler.
|
in Python's AST compiler.
|
||||||
|
|
||||||
.. versionchanged:: 3.8
|
.. versionchanged:: 3.8
|
||||||
Added ``type_comments=True`` and ``mode='func_type'``.
|
Added ``type_comments``, ``mode='func_type'`` and ``feature_version``.
|
||||||
|
|
||||||
|
|
||||||
.. function:: literal_eval(node_or_string)
|
.. function:: literal_eval(node_or_string)
|
||||||
|
|
|
@ -203,6 +203,10 @@
|
||||||
|
|
||||||
.. data:: OP
|
.. data:: OP
|
||||||
|
|
||||||
|
.. data:: AWAIT
|
||||||
|
|
||||||
|
.. data:: ASYNC
|
||||||
|
|
||||||
.. data:: TYPE_IGNORE
|
.. data:: TYPE_IGNORE
|
||||||
|
|
||||||
.. data:: TYPE_COMMENT
|
.. data:: TYPE_COMMENT
|
||||||
|
|
|
@ -88,3 +88,6 @@ the :mod:`tokenize` module.
|
||||||
|
|
||||||
.. versionchanged:: 3.8
|
.. versionchanged:: 3.8
|
||||||
Added :data:`TYPE_COMMENT`.
|
Added :data:`TYPE_COMMENT`.
|
||||||
|
Added :data:`AWAIT` and :data:`ASYNC` tokens back (they're needed
|
||||||
|
to support parsing older Python versions for :func:`ast.parse` with
|
||||||
|
``feature_version`` set to 6 or lower).
|
||||||
|
|
|
@ -18,7 +18,7 @@ decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
|
||||||
decorators: decorator+
|
decorators: decorator+
|
||||||
decorated: decorators (classdef | funcdef | async_funcdef)
|
decorated: decorators (classdef | funcdef | async_funcdef)
|
||||||
|
|
||||||
async_funcdef: 'async' funcdef
|
async_funcdef: ASYNC funcdef
|
||||||
funcdef: 'def' NAME parameters ['->' test] ':' [TYPE_COMMENT] func_body_suite
|
funcdef: 'def' NAME parameters ['->' test] ':' [TYPE_COMMENT] func_body_suite
|
||||||
|
|
||||||
parameters: '(' [typedargslist] ')'
|
parameters: '(' [typedargslist] ')'
|
||||||
|
@ -70,7 +70,7 @@ nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
|
||||||
assert_stmt: 'assert' test [',' test]
|
assert_stmt: 'assert' test [',' test]
|
||||||
|
|
||||||
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
|
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
|
||||||
async_stmt: 'async' (funcdef | with_stmt | for_stmt)
|
async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
|
||||||
if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite]
|
if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite]
|
||||||
while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite]
|
while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite]
|
||||||
for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite]
|
for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite]
|
||||||
|
@ -106,7 +106,7 @@ arith_expr: term (('+'|'-') term)*
|
||||||
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
||||||
factor: ('+'|'-'|'~') factor | power
|
factor: ('+'|'-'|'~') factor | power
|
||||||
power: atom_expr ['**' factor]
|
power: atom_expr ['**' factor]
|
||||||
atom_expr: ['await'] atom trailer*
|
atom_expr: [AWAIT] atom trailer*
|
||||||
atom: ('(' [yield_expr|testlist_comp] ')' |
|
atom: ('(' [yield_expr|testlist_comp] ')' |
|
||||||
'[' [testlist_comp] ']' |
|
'[' [testlist_comp] ']' |
|
||||||
'{' [dictorsetmaker] '}' |
|
'{' [dictorsetmaker] '}' |
|
||||||
|
@ -144,7 +144,7 @@ argument: ( test [comp_for] |
|
||||||
|
|
||||||
comp_iter: comp_for | comp_if
|
comp_iter: comp_for | comp_if
|
||||||
sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
|
sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
|
||||||
comp_for: ['async'] sync_comp_for
|
comp_for: [ASYNC] sync_comp_for
|
||||||
comp_if: 'if' test_nocond [comp_iter]
|
comp_if: 'if' test_nocond [comp_iter]
|
||||||
|
|
||||||
# not used in grammar, but may appear in "node" passed from Parser to Compiler
|
# not used in grammar, but may appear in "node" passed from Parser to Compiler
|
||||||
|
|
|
@ -55,6 +55,8 @@ ELLIPSIS '...'
|
||||||
COLONEQUAL ':='
|
COLONEQUAL ':='
|
||||||
|
|
||||||
OP
|
OP
|
||||||
|
AWAIT
|
||||||
|
ASYNC
|
||||||
TYPE_IGNORE
|
TYPE_IGNORE
|
||||||
TYPE_COMMENT
|
TYPE_COMMENT
|
||||||
ERRORTOKEN
|
ERRORTOKEN
|
||||||
|
|
|
@ -703,6 +703,7 @@ type_ignore_ty _Py_TypeIgnore(int lineno, PyArena *arena);
|
||||||
|
|
||||||
PyObject* PyAST_mod2obj(mod_ty t);
|
PyObject* PyAST_mod2obj(mod_ty t);
|
||||||
mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);
|
mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);
|
||||||
|
mod_ty PyAST_obj2mod_ex(PyObject* ast, PyArena* arena, int mode, int feature_version);
|
||||||
int PyAST_Check(PyObject* obj);
|
int PyAST_Check(PyObject* obj);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
|
|
|
@ -27,6 +27,7 @@ PyAPI_FUNC(PyCodeObject *) PyNode_Compile(struct _node *, const char *);
|
||||||
#ifndef Py_LIMITED_API
|
#ifndef Py_LIMITED_API
|
||||||
typedef struct {
|
typedef struct {
|
||||||
int cf_flags; /* bitmask of CO_xxx flags relevant to future */
|
int cf_flags; /* bitmask of CO_xxx flags relevant to future */
|
||||||
|
int cf_feature_version; /* minor Python version (PyCF_ONLY_AST) */
|
||||||
} PyCompilerFlags;
|
} PyCompilerFlags;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,7 @@ typedef struct {
|
||||||
#define PyPARSE_IGNORE_COOKIE 0x0010
|
#define PyPARSE_IGNORE_COOKIE 0x0010
|
||||||
#define PyPARSE_BARRY_AS_BDFL 0x0020
|
#define PyPARSE_BARRY_AS_BDFL 0x0020
|
||||||
#define PyPARSE_TYPE_COMMENTS 0x0040
|
#define PyPARSE_TYPE_COMMENTS 0x0040
|
||||||
|
#define PyPARSE_ASYNC_HACKS 0x0080
|
||||||
|
|
||||||
PyAPI_FUNC(node *) PyParser_ParseString(const char *, grammar *, int,
|
PyAPI_FUNC(node *) PyParser_ParseString(const char *, grammar *, int,
|
||||||
perrdetail *);
|
perrdetail *);
|
||||||
|
|
|
@ -65,10 +65,12 @@ extern "C" {
|
||||||
#define ELLIPSIS 52
|
#define ELLIPSIS 52
|
||||||
#define COLONEQUAL 53
|
#define COLONEQUAL 53
|
||||||
#define OP 54
|
#define OP 54
|
||||||
#define TYPE_IGNORE 55
|
#define AWAIT 55
|
||||||
#define TYPE_COMMENT 56
|
#define ASYNC 56
|
||||||
#define ERRORTOKEN 57
|
#define TYPE_IGNORE 57
|
||||||
#define N_TOKENS 61
|
#define TYPE_COMMENT 58
|
||||||
|
#define ERRORTOKEN 59
|
||||||
|
#define N_TOKENS 63
|
||||||
#define NT_OFFSET 256
|
#define NT_OFFSET 256
|
||||||
|
|
||||||
/* Special definitions for cooperation with parser */
|
/* Special definitions for cooperation with parser */
|
||||||
|
|
|
@ -27,7 +27,8 @@
|
||||||
from _ast import *
|
from _ast import *
|
||||||
|
|
||||||
|
|
||||||
def parse(source, filename='<unknown>', mode='exec', *, type_comments=False):
|
def parse(source, filename='<unknown>', mode='exec', *,
|
||||||
|
type_comments=False, feature_version=-1):
|
||||||
"""
|
"""
|
||||||
Parse the source into an AST node.
|
Parse the source into an AST node.
|
||||||
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
|
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
|
||||||
|
@ -36,7 +37,8 @@ def parse(source, filename='<unknown>', mode='exec', *, type_comments=False):
|
||||||
flags = PyCF_ONLY_AST
|
flags = PyCF_ONLY_AST
|
||||||
if type_comments:
|
if type_comments:
|
||||||
flags |= PyCF_TYPE_COMMENTS
|
flags |= PyCF_TYPE_COMMENTS
|
||||||
return compile(source, filename, mode, flags)
|
return compile(source, filename, mode, flags,
|
||||||
|
feature_version=feature_version)
|
||||||
|
|
||||||
|
|
||||||
def literal_eval(node_or_string):
|
def literal_eval(node_or_string):
|
||||||
|
|
|
@ -20,8 +20,6 @@ kwlist = [
|
||||||
'and',
|
'and',
|
||||||
'as',
|
'as',
|
||||||
'assert',
|
'assert',
|
||||||
'async',
|
|
||||||
'await',
|
|
||||||
'break',
|
'break',
|
||||||
'class',
|
'class',
|
||||||
'continue',
|
'continue',
|
||||||
|
@ -52,6 +50,10 @@ kwlist = [
|
||||||
#--end keywords--
|
#--end keywords--
|
||||||
]
|
]
|
||||||
|
|
||||||
|
kwlist.append('async')
|
||||||
|
kwlist.append('await')
|
||||||
|
kwlist.sort()
|
||||||
|
|
||||||
iskeyword = frozenset(kwlist).__contains__
|
iskeyword = frozenset(kwlist).__contains__
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -916,7 +916,7 @@ class STObjectTestCase(unittest.TestCase):
|
||||||
return (n + 3) & ~3
|
return (n + 3) & ~3
|
||||||
return 1 << (n - 1).bit_length()
|
return 1 << (n - 1).bit_length()
|
||||||
|
|
||||||
basesize = support.calcobjsize('Pii')
|
basesize = support.calcobjsize('Piii')
|
||||||
nodesize = struct.calcsize('hP3iP0h2i')
|
nodesize = struct.calcsize('hP3iP0h2i')
|
||||||
def sizeofchildren(node):
|
def sizeofchildren(node):
|
||||||
if node is None:
|
if node is None:
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import ast
|
import ast
|
||||||
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,6 +21,29 @@ async def bar(): # type: () -> int
|
||||||
return await bar()
|
return await bar()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
asyncvar = """\
|
||||||
|
async = 12
|
||||||
|
await = 13
|
||||||
|
"""
|
||||||
|
|
||||||
|
asynccomp = """\
|
||||||
|
async def foo(xs):
|
||||||
|
[x async for x in xs]
|
||||||
|
"""
|
||||||
|
|
||||||
|
matmul = """\
|
||||||
|
a = b @ c
|
||||||
|
"""
|
||||||
|
|
||||||
|
fstring = """\
|
||||||
|
a = 42
|
||||||
|
f"{a}"
|
||||||
|
"""
|
||||||
|
|
||||||
|
underscorednumber = """\
|
||||||
|
a = 42_42_42
|
||||||
|
"""
|
||||||
|
|
||||||
redundantdef = """\
|
redundantdef = """\
|
||||||
def foo(): # type: () -> int
|
def foo(): # type: () -> int
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
|
@ -155,80 +179,117 @@ def favk(
|
||||||
|
|
||||||
class TypeCommentTests(unittest.TestCase):
|
class TypeCommentTests(unittest.TestCase):
|
||||||
|
|
||||||
def parse(self, source):
|
lowest = 4 # Lowest minor version supported
|
||||||
return ast.parse(source, type_comments=True)
|
highest = sys.version_info[1] # Highest minor version
|
||||||
|
|
||||||
|
def parse(self, source, feature_version=highest):
|
||||||
|
return ast.parse(source, type_comments=True,
|
||||||
|
feature_version=feature_version)
|
||||||
|
|
||||||
|
def parse_all(self, source, minver=lowest, maxver=highest, expected_regex=""):
|
||||||
|
for feature_version in range(self.lowest, self.highest + 1):
|
||||||
|
if minver <= feature_version <= maxver:
|
||||||
|
try:
|
||||||
|
yield self.parse(source, feature_version)
|
||||||
|
except SyntaxError as err:
|
||||||
|
raise SyntaxError(str(err) + f" feature_version={feature_version}")
|
||||||
|
else:
|
||||||
|
with self.assertRaisesRegex(SyntaxError, expected_regex,
|
||||||
|
msg=f"feature_version={feature_version}"):
|
||||||
|
self.parse(source, feature_version)
|
||||||
|
|
||||||
def classic_parse(self, source):
|
def classic_parse(self, source):
|
||||||
return ast.parse(source)
|
return ast.parse(source)
|
||||||
|
|
||||||
def test_funcdef(self):
|
def test_funcdef(self):
|
||||||
tree = self.parse(funcdef)
|
for tree in self.parse_all(funcdef):
|
||||||
self.assertEqual(tree.body[0].type_comment, "() -> int")
|
self.assertEqual(tree.body[0].type_comment, "() -> int")
|
||||||
self.assertEqual(tree.body[1].type_comment, "() -> None")
|
self.assertEqual(tree.body[1].type_comment, "() -> None")
|
||||||
tree = self.classic_parse(funcdef)
|
tree = self.classic_parse(funcdef)
|
||||||
self.assertEqual(tree.body[0].type_comment, None)
|
self.assertEqual(tree.body[0].type_comment, None)
|
||||||
self.assertEqual(tree.body[1].type_comment, None)
|
self.assertEqual(tree.body[1].type_comment, None)
|
||||||
|
|
||||||
def test_asyncdef(self):
|
def test_asyncdef(self):
|
||||||
tree = self.parse(asyncdef)
|
for tree in self.parse_all(asyncdef, minver=5):
|
||||||
self.assertEqual(tree.body[0].type_comment, "() -> int")
|
self.assertEqual(tree.body[0].type_comment, "() -> int")
|
||||||
self.assertEqual(tree.body[1].type_comment, "() -> int")
|
self.assertEqual(tree.body[1].type_comment, "() -> int")
|
||||||
tree = self.classic_parse(asyncdef)
|
tree = self.classic_parse(asyncdef)
|
||||||
self.assertEqual(tree.body[0].type_comment, None)
|
self.assertEqual(tree.body[0].type_comment, None)
|
||||||
self.assertEqual(tree.body[1].type_comment, None)
|
self.assertEqual(tree.body[1].type_comment, None)
|
||||||
|
|
||||||
|
def test_asyncvar(self):
|
||||||
|
for tree in self.parse_all(asyncvar, maxver=6):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_asynccomp(self):
|
||||||
|
for tree in self.parse_all(asynccomp, minver=6):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_matmul(self):
|
||||||
|
for tree in self.parse_all(matmul, minver=5):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_fstring(self):
|
||||||
|
for tree in self.parse_all(fstring, minver=6):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_underscorednumber(self):
|
||||||
|
for tree in self.parse_all(underscorednumber, minver=6):
|
||||||
|
pass
|
||||||
|
|
||||||
def test_redundantdef(self):
|
def test_redundantdef(self):
|
||||||
with self.assertRaisesRegex(SyntaxError, "^Cannot have two type comments on def"):
|
for tree in self.parse_all(redundantdef, maxver=0,
|
||||||
tree = self.parse(redundantdef)
|
expected_regex="^Cannot have two type comments on def"):
|
||||||
|
pass
|
||||||
|
|
||||||
def test_nonasciidef(self):
|
def test_nonasciidef(self):
|
||||||
tree = self.parse(nonasciidef)
|
for tree in self.parse_all(nonasciidef):
|
||||||
self.assertEqual(tree.body[0].type_comment, "() -> àçčéñt")
|
self.assertEqual(tree.body[0].type_comment, "() -> àçčéñt")
|
||||||
|
|
||||||
def test_forstmt(self):
|
def test_forstmt(self):
|
||||||
tree = self.parse(forstmt)
|
for tree in self.parse_all(forstmt):
|
||||||
self.assertEqual(tree.body[0].type_comment, "int")
|
self.assertEqual(tree.body[0].type_comment, "int")
|
||||||
tree = self.classic_parse(forstmt)
|
tree = self.classic_parse(forstmt)
|
||||||
self.assertEqual(tree.body[0].type_comment, None)
|
self.assertEqual(tree.body[0].type_comment, None)
|
||||||
|
|
||||||
def test_withstmt(self):
|
def test_withstmt(self):
|
||||||
tree = self.parse(withstmt)
|
for tree in self.parse_all(withstmt):
|
||||||
self.assertEqual(tree.body[0].type_comment, "int")
|
self.assertEqual(tree.body[0].type_comment, "int")
|
||||||
tree = self.classic_parse(withstmt)
|
tree = self.classic_parse(withstmt)
|
||||||
self.assertEqual(tree.body[0].type_comment, None)
|
self.assertEqual(tree.body[0].type_comment, None)
|
||||||
|
|
||||||
def test_vardecl(self):
|
def test_vardecl(self):
|
||||||
tree = self.parse(vardecl)
|
for tree in self.parse_all(vardecl):
|
||||||
self.assertEqual(tree.body[0].type_comment, "int")
|
self.assertEqual(tree.body[0].type_comment, "int")
|
||||||
tree = self.classic_parse(vardecl)
|
tree = self.classic_parse(vardecl)
|
||||||
self.assertEqual(tree.body[0].type_comment, None)
|
self.assertEqual(tree.body[0].type_comment, None)
|
||||||
|
|
||||||
def test_ignores(self):
|
def test_ignores(self):
|
||||||
tree = self.parse(ignores)
|
for tree in self.parse_all(ignores):
|
||||||
self.assertEqual([ti.lineno for ti in tree.type_ignores], [2, 5])
|
self.assertEqual([ti.lineno for ti in tree.type_ignores], [2, 5])
|
||||||
tree = self.classic_parse(ignores)
|
tree = self.classic_parse(ignores)
|
||||||
self.assertEqual(tree.type_ignores, [])
|
self.assertEqual(tree.type_ignores, [])
|
||||||
|
|
||||||
def test_longargs(self):
|
def test_longargs(self):
|
||||||
tree = self.parse(longargs)
|
for tree in self.parse_all(longargs):
|
||||||
for t in tree.body:
|
for t in tree.body:
|
||||||
# The expected args are encoded in the function name
|
# The expected args are encoded in the function name
|
||||||
todo = set(t.name[1:])
|
todo = set(t.name[1:])
|
||||||
self.assertEqual(len(t.args.args),
|
self.assertEqual(len(t.args.args),
|
||||||
len(todo) - bool(t.args.vararg) - bool(t.args.kwarg))
|
len(todo) - bool(t.args.vararg) - bool(t.args.kwarg))
|
||||||
self.assertTrue(t.name.startswith('f'), t.name)
|
self.assertTrue(t.name.startswith('f'), t.name)
|
||||||
for c in t.name[1:]:
|
for c in t.name[1:]:
|
||||||
todo.remove(c)
|
todo.remove(c)
|
||||||
if c == 'v':
|
if c == 'v':
|
||||||
arg = t.args.vararg
|
arg = t.args.vararg
|
||||||
elif c == 'k':
|
elif c == 'k':
|
||||||
arg = t.args.kwarg
|
arg = t.args.kwarg
|
||||||
else:
|
else:
|
||||||
assert 0 <= ord(c) - ord('a') < len(t.args.args)
|
assert 0 <= ord(c) - ord('a') < len(t.args.args)
|
||||||
arg = t.args.args[ord(c) - ord('a')]
|
arg = t.args.args[ord(c) - ord('a')]
|
||||||
self.assertEqual(arg.arg, c) # That's the argument name
|
self.assertEqual(arg.arg, c) # That's the argument name
|
||||||
self.assertEqual(arg.type_comment, arg.arg.upper())
|
self.assertEqual(arg.type_comment, arg.arg.upper())
|
||||||
assert not todo
|
assert not todo
|
||||||
tree = self.classic_parse(longargs)
|
tree = self.classic_parse(longargs)
|
||||||
for t in tree.body:
|
for t in tree.body:
|
||||||
for arg in t.args.args + [t.args.vararg, t.args.kwarg]:
|
for arg in t.args.args + [t.args.vararg, t.args.kwarg]:
|
||||||
|
@ -247,8 +308,8 @@ class TypeCommentTests(unittest.TestCase):
|
||||||
|
|
||||||
def check_both_ways(source):
|
def check_both_ways(source):
|
||||||
ast.parse(source, type_comments=False)
|
ast.parse(source, type_comments=False)
|
||||||
with self.assertRaises(SyntaxError):
|
for tree in self.parse_all(source, maxver=0):
|
||||||
ast.parse(source, type_comments=True)
|
pass
|
||||||
|
|
||||||
check_both_ways("pass # type: int\n")
|
check_both_ways("pass # type: int\n")
|
||||||
check_both_ways("foo() # type: int\n")
|
check_both_ways("foo() # type: int\n")
|
||||||
|
|
|
@ -58,14 +58,16 @@ RARROW = 51
|
||||||
ELLIPSIS = 52
|
ELLIPSIS = 52
|
||||||
COLONEQUAL = 53
|
COLONEQUAL = 53
|
||||||
OP = 54
|
OP = 54
|
||||||
TYPE_IGNORE = 55
|
AWAIT = 55
|
||||||
TYPE_COMMENT = 56
|
ASYNC = 56
|
||||||
|
TYPE_IGNORE = 57
|
||||||
|
TYPE_COMMENT = 58
|
||||||
# These aren't used by the C tokenizer but are needed for tokenize.py
|
# These aren't used by the C tokenizer but are needed for tokenize.py
|
||||||
ERRORTOKEN = 57
|
ERRORTOKEN = 59
|
||||||
COMMENT = 58
|
COMMENT = 60
|
||||||
NL = 59
|
NL = 61
|
||||||
ENCODING = 60
|
ENCODING = 62
|
||||||
N_TOKENS = 61
|
N_TOKENS = 63
|
||||||
# Special definitions for cooperation with parser
|
# Special definitions for cooperation with parser
|
||||||
NT_OFFSET = 256
|
NT_OFFSET = 256
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
Add a ``feature_version`` flag to ``ast.parse()`` (documented) and
|
||||||
|
``compile()`` (hidden) that allows tweaking the parser to support older
|
||||||
|
versions of the grammar. In particular, if ``feature_version`` is 5 or 6,
|
||||||
|
the hacks for the ``async`` and ``await`` keyword from PEP 492 are
|
||||||
|
reinstated. (For 7 or higher, these are unconditionally treated as keywords,
|
||||||
|
but they are still special tokens rather than ``NAME`` tokens that the
|
||||||
|
parser driver recognizes.)
|
|
@ -799,7 +799,7 @@ pymain_run_python(PyInterpreterState *interp, int *exitcode)
|
||||||
Py_DECREF(path0);
|
Py_DECREF(path0);
|
||||||
}
|
}
|
||||||
|
|
||||||
PyCompilerFlags cf = {.cf_flags = 0};
|
PyCompilerFlags cf = {.cf_flags = 0, .cf_feature_version = PY_MINOR_VERSION};
|
||||||
|
|
||||||
pymain_header(config);
|
pymain_header(config);
|
||||||
pymain_import_readline(config);
|
pymain_import_readline(config);
|
||||||
|
|
|
@ -341,6 +341,7 @@ parser_newstobject(node *st, int type)
|
||||||
o->st_node = st;
|
o->st_node = st;
|
||||||
o->st_type = type;
|
o->st_type = type;
|
||||||
o->st_flags.cf_flags = 0;
|
o->st_flags.cf_flags = 0;
|
||||||
|
o->st_flags.cf_feature_version = PY_MINOR_VERSION;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
PyNode_Free(st);
|
PyNode_Free(st);
|
||||||
|
@ -584,8 +585,10 @@ parser_do_parse(PyObject *args, PyObject *kw, const char *argspec, int type)
|
||||||
|
|
||||||
if (n) {
|
if (n) {
|
||||||
res = parser_newstobject(n, type);
|
res = parser_newstobject(n, type);
|
||||||
if (res)
|
if (res) {
|
||||||
((PyST_Object *)res)->st_flags.cf_flags = flags & PyCF_MASK;
|
((PyST_Object *)res)->st_flags.cf_flags = flags & PyCF_MASK;
|
||||||
|
((PyST_Object *)res)->st_flags.cf_feature_version = PY_MINOR_VERSION;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
PyParser_SetError(&err);
|
PyParser_SetError(&err);
|
||||||
|
|
|
@ -1188,6 +1188,11 @@ PyObject* PyAST_mod2obj(mod_ty t)
|
||||||
|
|
||||||
/* mode is 0 for "exec", 1 for "eval" and 2 for "single" input */
|
/* mode is 0 for "exec", 1 for "eval" and 2 for "single" input */
|
||||||
mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode)
|
mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode)
|
||||||
|
{
|
||||||
|
return PyAST_obj2mod_ex(ast, arena, mode, PY_MINOR_VERSION);
|
||||||
|
}
|
||||||
|
|
||||||
|
mod_ty PyAST_obj2mod_ex(PyObject* ast, PyArena* arena, int mode, int feature_version)
|
||||||
{
|
{
|
||||||
mod_ty res;
|
mod_ty res;
|
||||||
PyObject *req_type[3];
|
PyObject *req_type[3];
|
||||||
|
@ -1269,6 +1274,7 @@ def main(srcfile, dump_module=False):
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
f.write("PyObject* PyAST_mod2obj(mod_ty t);\n")
|
f.write("PyObject* PyAST_mod2obj(mod_ty t);\n")
|
||||||
f.write("mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n")
|
f.write("mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n")
|
||||||
|
f.write("mod_ty PyAST_obj2mod_ex(PyObject* ast, PyArena* arena, int mode, int feature_version);\n")
|
||||||
f.write("int PyAST_Check(PyObject* obj);\n")
|
f.write("int PyAST_Check(PyObject* obj);\n")
|
||||||
f.write('\n')
|
f.write('\n')
|
||||||
f.write('#ifdef __cplusplus\n')
|
f.write('#ifdef __cplusplus\n')
|
||||||
|
|
|
@ -101,6 +101,8 @@ PyParser_ParseStringObject(const char *s, PyObject *filename,
|
||||||
|
|
||||||
Py_INCREF(err_ret->filename);
|
Py_INCREF(err_ret->filename);
|
||||||
tok->filename = err_ret->filename;
|
tok->filename = err_ret->filename;
|
||||||
|
if (*flags & PyPARSE_ASYNC_HACKS)
|
||||||
|
tok->async_hacks = 1;
|
||||||
return parsetok(tok, g, start, err_ret, flags);
|
return parsetok(tok, g, start, err_ret, flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -61,6 +61,8 @@ const char * const _PyParser_TokenNames[] = {
|
||||||
"ELLIPSIS",
|
"ELLIPSIS",
|
||||||
"COLONEQUAL",
|
"COLONEQUAL",
|
||||||
"OP",
|
"OP",
|
||||||
|
"AWAIT",
|
||||||
|
"ASYNC",
|
||||||
"TYPE_IGNORE",
|
"TYPE_IGNORE",
|
||||||
"TYPE_COMMENT",
|
"TYPE_COMMENT",
|
||||||
"<ERRORTOKEN>",
|
"<ERRORTOKEN>",
|
||||||
|
|
|
@ -84,6 +84,11 @@ tok_new(void)
|
||||||
tok->decoding_buffer = NULL;
|
tok->decoding_buffer = NULL;
|
||||||
tok->type_comments = 0;
|
tok->type_comments = 0;
|
||||||
|
|
||||||
|
tok->async_hacks = 0;
|
||||||
|
tok->async_def = 0;
|
||||||
|
tok->async_def_indent = 0;
|
||||||
|
tok->async_def_nl = 0;
|
||||||
|
|
||||||
return tok;
|
return tok;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1196,6 +1201,31 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Peek ahead at the next character */
|
||||||
|
c = tok_nextc(tok);
|
||||||
|
tok_backup(tok, c);
|
||||||
|
/* Check if we are closing an async function */
|
||||||
|
if (tok->async_def
|
||||||
|
&& !blankline
|
||||||
|
/* Due to some implementation artifacts of type comments,
|
||||||
|
* a TYPE_COMMENT at the start of a function won't set an
|
||||||
|
* indentation level and it will produce a NEWLINE after it.
|
||||||
|
* To avoid spuriously ending an async function due to this,
|
||||||
|
* wait until we have some non-newline char in front of us. */
|
||||||
|
&& c != '\n'
|
||||||
|
&& tok->level == 0
|
||||||
|
/* There was a NEWLINE after ASYNC DEF,
|
||||||
|
so we're past the signature. */
|
||||||
|
&& tok->async_def_nl
|
||||||
|
/* Current indentation level is less than where
|
||||||
|
the async function was defined */
|
||||||
|
&& tok->async_def_indent >= tok->indent)
|
||||||
|
{
|
||||||
|
tok->async_def = 0;
|
||||||
|
tok->async_def_indent = 0;
|
||||||
|
tok->async_def_nl = 0;
|
||||||
|
}
|
||||||
|
|
||||||
again:
|
again:
|
||||||
tok->start = NULL;
|
tok->start = NULL;
|
||||||
/* Skip spaces */
|
/* Skip spaces */
|
||||||
|
@ -1310,6 +1340,50 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
|
||||||
*p_start = tok->start;
|
*p_start = tok->start;
|
||||||
*p_end = tok->cur;
|
*p_end = tok->cur;
|
||||||
|
|
||||||
|
/* async/await parsing block. */
|
||||||
|
if (tok->cur - tok->start == 5 && tok->start[0] == 'a') {
|
||||||
|
/* May be an 'async' or 'await' token. For Python 3.7 or
|
||||||
|
later we recognize them unconditionally. For Python
|
||||||
|
3.5 or 3.6 we recognize 'async' in front of 'def', and
|
||||||
|
either one inside of 'async def'. (Technically we
|
||||||
|
shouldn't recognize these at all for 3.4 or earlier,
|
||||||
|
but there's no *valid* Python 3.4 code that would be
|
||||||
|
rejected, and async functions will be rejected in a
|
||||||
|
later phase.) */
|
||||||
|
if (!tok->async_hacks || tok->async_def) {
|
||||||
|
/* Always recognize the keywords. */
|
||||||
|
if (memcmp(tok->start, "async", 5) == 0) {
|
||||||
|
return ASYNC;
|
||||||
|
}
|
||||||
|
if (memcmp(tok->start, "await", 5) == 0) {
|
||||||
|
return AWAIT;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (memcmp(tok->start, "async", 5) == 0) {
|
||||||
|
/* The current token is 'async'.
|
||||||
|
Look ahead one token to see if that is 'def'. */
|
||||||
|
|
||||||
|
struct tok_state ahead_tok;
|
||||||
|
char *ahead_tok_start = NULL, *ahead_tok_end = NULL;
|
||||||
|
int ahead_tok_kind;
|
||||||
|
|
||||||
|
memcpy(&ahead_tok, tok, sizeof(ahead_tok));
|
||||||
|
ahead_tok_kind = tok_get(&ahead_tok, &ahead_tok_start,
|
||||||
|
&ahead_tok_end);
|
||||||
|
|
||||||
|
if (ahead_tok_kind == NAME
|
||||||
|
&& ahead_tok.cur - ahead_tok.start == 3
|
||||||
|
&& memcmp(ahead_tok.start, "def", 3) == 0)
|
||||||
|
{
|
||||||
|
/* The next token is going to be 'def', so instead of
|
||||||
|
returning a plain NAME token, return ASYNC. */
|
||||||
|
tok->async_def_indent = tok->indent;
|
||||||
|
tok->async_def = 1;
|
||||||
|
return ASYNC;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1322,6 +1396,11 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
|
||||||
*p_start = tok->start;
|
*p_start = tok->start;
|
||||||
*p_end = tok->cur - 1; /* Leave '\n' out of the string */
|
*p_end = tok->cur - 1; /* Leave '\n' out of the string */
|
||||||
tok->cont_line = 0;
|
tok->cont_line = 0;
|
||||||
|
if (tok->async_def) {
|
||||||
|
/* We're somewhere inside an 'async def' function, and
|
||||||
|
we've encountered a NEWLINE after its signature. */
|
||||||
|
tok->async_def_nl = 1;
|
||||||
|
}
|
||||||
return NEWLINE;
|
return NEWLINE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,6 +64,13 @@ struct tok_state {
|
||||||
const char* input; /* Tokenizer's newline translated copy of the string. */
|
const char* input; /* Tokenizer's newline translated copy of the string. */
|
||||||
|
|
||||||
int type_comments; /* Whether to look for type comments */
|
int type_comments; /* Whether to look for type comments */
|
||||||
|
|
||||||
|
/* async/await related fields (still needed depending on feature_version) */
|
||||||
|
int async_hacks; /* =1 if async/await aren't always keywords */
|
||||||
|
int async_def; /* =1 if tokens are inside an 'async def' body. */
|
||||||
|
int async_def_indent; /* Indentation level of the outermost 'async def'. */
|
||||||
|
int async_def_nl; /* =1 if the outermost 'async def' had at least one
|
||||||
|
NEWLINE token after it. */
|
||||||
};
|
};
|
||||||
|
|
||||||
extern struct tok_state *PyTokenizer_FromString(const char *, int);
|
extern struct tok_state *PyTokenizer_FromString(const char *, int);
|
||||||
|
|
|
@ -8899,6 +8899,11 @@ PyObject* PyAST_mod2obj(mod_ty t)
|
||||||
|
|
||||||
/* mode is 0 for "exec", 1 for "eval" and 2 for "single" input */
|
/* mode is 0 for "exec", 1 for "eval" and 2 for "single" input */
|
||||||
mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode)
|
mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode)
|
||||||
|
{
|
||||||
|
return PyAST_obj2mod_ex(ast, arena, mode, PY_MINOR_VERSION);
|
||||||
|
}
|
||||||
|
|
||||||
|
mod_ty PyAST_obj2mod_ex(PyObject* ast, PyArena* arena, int mode, int feature_version)
|
||||||
{
|
{
|
||||||
mod_ty res;
|
mod_ty res;
|
||||||
PyObject *req_type[3];
|
PyObject *req_type[3];
|
||||||
|
|
100
Python/ast.c
100
Python/ast.c
|
@ -564,6 +564,7 @@ struct compiling {
|
||||||
PyArena *c_arena; /* Arena for allocating memory. */
|
PyArena *c_arena; /* Arena for allocating memory. */
|
||||||
PyObject *c_filename; /* filename */
|
PyObject *c_filename; /* filename */
|
||||||
PyObject *c_normalize; /* Normalization function from unicodedata. */
|
PyObject *c_normalize; /* Normalization function from unicodedata. */
|
||||||
|
int c_feature_version; /* Latest minor version of Python for allowed features */
|
||||||
};
|
};
|
||||||
|
|
||||||
static asdl_seq *seq_for_testlist(struct compiling *, const node *);
|
static asdl_seq *seq_for_testlist(struct compiling *, const node *);
|
||||||
|
@ -783,6 +784,7 @@ PyAST_FromNodeObject(const node *n, PyCompilerFlags *flags,
|
||||||
/* borrowed reference */
|
/* borrowed reference */
|
||||||
c.c_filename = filename;
|
c.c_filename = filename;
|
||||||
c.c_normalize = NULL;
|
c.c_normalize = NULL;
|
||||||
|
c.c_feature_version = flags->cf_feature_version;
|
||||||
|
|
||||||
if (TYPE(n) == encoding_decl)
|
if (TYPE(n) == encoding_decl)
|
||||||
n = CHILD(n, 0);
|
n = CHILD(n, 0);
|
||||||
|
@ -955,7 +957,7 @@ PyAST_FromNode(const node *n, PyCompilerFlags *flags, const char *filename_str,
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static operator_ty
|
static operator_ty
|
||||||
get_operator(const node *n)
|
get_operator(struct compiling *c, const node *n)
|
||||||
{
|
{
|
||||||
switch (TYPE(n)) {
|
switch (TYPE(n)) {
|
||||||
case VBAR:
|
case VBAR:
|
||||||
|
@ -975,6 +977,11 @@ get_operator(const node *n)
|
||||||
case STAR:
|
case STAR:
|
||||||
return Mult;
|
return Mult;
|
||||||
case AT:
|
case AT:
|
||||||
|
if (c->c_feature_version < 5) {
|
||||||
|
ast_error(c, n,
|
||||||
|
"The '@' operator is only supported in Python 3.5 and greater");
|
||||||
|
return (operator_ty)0;
|
||||||
|
}
|
||||||
return MatMult;
|
return MatMult;
|
||||||
case SLASH:
|
case SLASH:
|
||||||
return Div;
|
return Div;
|
||||||
|
@ -1209,6 +1216,11 @@ ast_for_augassign(struct compiling *c, const node *n)
|
||||||
else
|
else
|
||||||
return Mult;
|
return Mult;
|
||||||
case '@':
|
case '@':
|
||||||
|
if (c->c_feature_version < 5) {
|
||||||
|
ast_error(c, n,
|
||||||
|
"The '@' operator is only supported in Python 3.5 and greater");
|
||||||
|
return (operator_ty)0;
|
||||||
|
}
|
||||||
return MatMult;
|
return MatMult;
|
||||||
default:
|
default:
|
||||||
PyErr_Format(PyExc_SystemError, "invalid augassign: %s", STR(n));
|
PyErr_Format(PyExc_SystemError, "invalid augassign: %s", STR(n));
|
||||||
|
@ -1518,7 +1530,7 @@ ast_for_arguments(struct compiling *c, const node *n)
|
||||||
}
|
}
|
||||||
else if (found_default) {
|
else if (found_default) {
|
||||||
ast_error(c, n,
|
ast_error(c, n,
|
||||||
"non-default argument follows default argument");
|
"non-default argument follows default argument");
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
arg = ast_for_arg(c, ch);
|
arg = ast_for_arg(c, ch);
|
||||||
|
@ -1719,6 +1731,12 @@ ast_for_funcdef_impl(struct compiling *c, const node *n0,
|
||||||
node *tc;
|
node *tc;
|
||||||
string type_comment = NULL;
|
string type_comment = NULL;
|
||||||
|
|
||||||
|
if (is_async && c->c_feature_version < 5) {
|
||||||
|
ast_error(c, n,
|
||||||
|
"Async functions are only supported in Python 3.5 and greater");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
REQ(n, funcdef);
|
REQ(n, funcdef);
|
||||||
|
|
||||||
name = NEW_IDENTIFIER(CHILD(n, name_i));
|
name = NEW_IDENTIFIER(CHILD(n, name_i));
|
||||||
|
@ -1772,10 +1790,9 @@ ast_for_funcdef_impl(struct compiling *c, const node *n0,
|
||||||
static stmt_ty
|
static stmt_ty
|
||||||
ast_for_async_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
|
ast_for_async_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
|
||||||
{
|
{
|
||||||
/* async_funcdef: 'async' funcdef */
|
/* async_funcdef: ASYNC funcdef */
|
||||||
REQ(n, async_funcdef);
|
REQ(n, async_funcdef);
|
||||||
REQ(CHILD(n, 0), NAME);
|
REQ(CHILD(n, 0), ASYNC);
|
||||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
|
||||||
REQ(CHILD(n, 1), funcdef);
|
REQ(CHILD(n, 1), funcdef);
|
||||||
|
|
||||||
return ast_for_funcdef_impl(c, n, decorator_seq,
|
return ast_for_funcdef_impl(c, n, decorator_seq,
|
||||||
|
@ -1794,10 +1811,9 @@ ast_for_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq)
|
||||||
static stmt_ty
|
static stmt_ty
|
||||||
ast_for_async_stmt(struct compiling *c, const node *n)
|
ast_for_async_stmt(struct compiling *c, const node *n)
|
||||||
{
|
{
|
||||||
/* async_stmt: 'async' (funcdef | with_stmt | for_stmt) */
|
/* async_stmt: ASYNC (funcdef | with_stmt | for_stmt) */
|
||||||
REQ(n, async_stmt);
|
REQ(n, async_stmt);
|
||||||
REQ(CHILD(n, 0), NAME);
|
REQ(CHILD(n, 0), ASYNC);
|
||||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
|
||||||
|
|
||||||
switch (TYPE(CHILD(n, 1))) {
|
switch (TYPE(CHILD(n, 1))) {
|
||||||
case funcdef:
|
case funcdef:
|
||||||
|
@ -1948,8 +1964,7 @@ count_comp_fors(struct compiling *c, const node *n)
|
||||||
n_fors++;
|
n_fors++;
|
||||||
REQ(n, comp_for);
|
REQ(n, comp_for);
|
||||||
if (NCH(n) == 2) {
|
if (NCH(n) == 2) {
|
||||||
REQ(CHILD(n, 0), NAME);
|
REQ(CHILD(n, 0), ASYNC);
|
||||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
|
||||||
n = CHILD(n, 1);
|
n = CHILD(n, 1);
|
||||||
}
|
}
|
||||||
else if (NCH(n) == 1) {
|
else if (NCH(n) == 1) {
|
||||||
|
@ -2034,8 +2049,7 @@ ast_for_comprehension(struct compiling *c, const node *n)
|
||||||
|
|
||||||
if (NCH(n) == 2) {
|
if (NCH(n) == 2) {
|
||||||
is_async = 1;
|
is_async = 1;
|
||||||
REQ(CHILD(n, 0), NAME);
|
REQ(CHILD(n, 0), ASYNC);
|
||||||
assert(strcmp(STR(CHILD(n, 0)), "async") == 0);
|
|
||||||
sync_n = CHILD(n, 1);
|
sync_n = CHILD(n, 1);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -2043,6 +2057,13 @@ ast_for_comprehension(struct compiling *c, const node *n)
|
||||||
}
|
}
|
||||||
REQ(sync_n, sync_comp_for);
|
REQ(sync_n, sync_comp_for);
|
||||||
|
|
||||||
|
/* Async comprehensions only allowed in Python 3.6 and greater */
|
||||||
|
if (is_async && c->c_feature_version < 6) {
|
||||||
|
ast_error(c, n,
|
||||||
|
"Async comprehensions are only supported in Python 3.6 and greater");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
for_ch = CHILD(sync_n, 1);
|
for_ch = CHILD(sync_n, 1);
|
||||||
t = ast_for_exprlist(c, for_ch, Store);
|
t = ast_for_exprlist(c, for_ch, Store);
|
||||||
if (!t)
|
if (!t)
|
||||||
|
@ -2337,7 +2358,15 @@ ast_for_atom(struct compiling *c, const node *n)
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
case NUMBER: {
|
case NUMBER: {
|
||||||
PyObject *pynum = parsenumber(c, STR(ch));
|
PyObject *pynum;
|
||||||
|
/* Underscores in numeric literals are only allowed in Python 3.6 or greater */
|
||||||
|
/* Check for underscores here rather than in parse_number so we can report a line number on error */
|
||||||
|
if (c->c_feature_version < 6 && strchr(STR(ch), '_') != NULL) {
|
||||||
|
ast_error(c, ch,
|
||||||
|
"Underscores in numeric literals are only supported in Python 3.6 and greater");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
pynum = parsenumber(c, STR(ch));
|
||||||
if (!pynum)
|
if (!pynum)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
|
@ -2420,8 +2449,8 @@ ast_for_atom(struct compiling *c, const node *n)
|
||||||
TYPE(CHILD(ch, 3 - is_dict)) == comp_for) {
|
TYPE(CHILD(ch, 3 - is_dict)) == comp_for) {
|
||||||
/* It's a dictionary comprehension. */
|
/* It's a dictionary comprehension. */
|
||||||
if (is_dict) {
|
if (is_dict) {
|
||||||
ast_error(c, n, "dict unpacking cannot be used in "
|
ast_error(c, n,
|
||||||
"dict comprehension");
|
"dict unpacking cannot be used in dict comprehension");
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
res = ast_for_dictcomp(c, ch);
|
res = ast_for_dictcomp(c, ch);
|
||||||
|
@ -2524,7 +2553,7 @@ ast_for_binop(struct compiling *c, const node *n)
|
||||||
if (!expr2)
|
if (!expr2)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
newoperator = get_operator(CHILD(n, 1));
|
newoperator = get_operator(c, CHILD(n, 1));
|
||||||
if (!newoperator)
|
if (!newoperator)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
|
@ -2539,7 +2568,7 @@ ast_for_binop(struct compiling *c, const node *n)
|
||||||
expr_ty tmp_result, tmp;
|
expr_ty tmp_result, tmp;
|
||||||
const node* next_oper = CHILD(n, i * 2 + 1);
|
const node* next_oper = CHILD(n, i * 2 + 1);
|
||||||
|
|
||||||
newoperator = get_operator(next_oper);
|
newoperator = get_operator(c, next_oper);
|
||||||
if (!newoperator)
|
if (!newoperator)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
|
@ -2678,7 +2707,12 @@ ast_for_atom_expr(struct compiling *c, const node *n)
|
||||||
REQ(n, atom_expr);
|
REQ(n, atom_expr);
|
||||||
nch = NCH(n);
|
nch = NCH(n);
|
||||||
|
|
||||||
if (TYPE(CHILD(n, 0)) == NAME && strcmp(STR(CHILD(n, 0)), "await") == 0) {
|
if (TYPE(CHILD(n, 0)) == AWAIT) {
|
||||||
|
if (c->c_feature_version < 5) {
|
||||||
|
ast_error(c, n,
|
||||||
|
"Await expressions are only supported in Python 3.5 and greater");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
start = 1;
|
start = 1;
|
||||||
assert(nch > 1);
|
assert(nch > 1);
|
||||||
}
|
}
|
||||||
|
@ -2775,7 +2809,7 @@ ast_for_expr(struct compiling *c, const node *n)
|
||||||
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
|
||||||
factor: ('+'|'-'|'~') factor | power
|
factor: ('+'|'-'|'~') factor | power
|
||||||
power: atom_expr ['**' factor]
|
power: atom_expr ['**' factor]
|
||||||
atom_expr: ['await'] atom trailer*
|
atom_expr: [AWAIT] atom trailer*
|
||||||
yield_expr: 'yield' [yield_arg]
|
yield_expr: 'yield' [yield_arg]
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -3233,6 +3267,13 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
|
||||||
node *deep, *ann = CHILD(n, 1);
|
node *deep, *ann = CHILD(n, 1);
|
||||||
int simple = 1;
|
int simple = 1;
|
||||||
|
|
||||||
|
/* AnnAssigns are only allowed in Python 3.6 or greater */
|
||||||
|
if (c->c_feature_version < 6) {
|
||||||
|
ast_error(c, ch,
|
||||||
|
"Variable annotation syntax is only supported in Python 3.6 and greater");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
/* we keep track of parens to qualify (x) as expression not name */
|
/* we keep track of parens to qualify (x) as expression not name */
|
||||||
deep = ch;
|
deep = ch;
|
||||||
while (NCH(deep) == 1) {
|
while (NCH(deep) == 1) {
|
||||||
|
@ -4050,6 +4091,13 @@ ast_for_for_stmt(struct compiling *c, const node *n0, bool is_async)
|
||||||
int end_lineno, end_col_offset;
|
int end_lineno, end_col_offset;
|
||||||
int has_type_comment;
|
int has_type_comment;
|
||||||
string type_comment;
|
string type_comment;
|
||||||
|
|
||||||
|
if (is_async && c->c_feature_version < 5) {
|
||||||
|
ast_error(c, n,
|
||||||
|
"Async for loops are only supported in Python 3.5 and greater");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
/* for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite] */
|
/* for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite] */
|
||||||
REQ(n, for_stmt);
|
REQ(n, for_stmt);
|
||||||
|
|
||||||
|
@ -4278,6 +4326,12 @@ ast_for_with_stmt(struct compiling *c, const node *n0, bool is_async)
|
||||||
asdl_seq *items, *body;
|
asdl_seq *items, *body;
|
||||||
string type_comment;
|
string type_comment;
|
||||||
|
|
||||||
|
if (is_async && c->c_feature_version < 5) {
|
||||||
|
ast_error(c, n,
|
||||||
|
"Async with statements are only supported in Python 3.5 and greater");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
REQ(n, with_stmt);
|
REQ(n, with_stmt);
|
||||||
|
|
||||||
has_type_comment = TYPE(CHILD(n, NCH(n) - 2)) == TYPE_COMMENT;
|
has_type_comment = TYPE(CHILD(n, NCH(n) - 2)) == TYPE_COMMENT;
|
||||||
|
@ -4768,6 +4822,7 @@ fstring_compile_expr(const char *expr_start, const char *expr_end,
|
||||||
str[len+2] = 0;
|
str[len+2] = 0;
|
||||||
|
|
||||||
cf.cf_flags = PyCF_ONLY_AST;
|
cf.cf_flags = PyCF_ONLY_AST;
|
||||||
|
cf.cf_feature_version = PY_MINOR_VERSION;
|
||||||
mod_n = PyParser_SimpleParseStringFlagsFilename(str, "<fstring>",
|
mod_n = PyParser_SimpleParseStringFlagsFilename(str, "<fstring>",
|
||||||
Py_eval_input, 0);
|
Py_eval_input, 0);
|
||||||
if (!mod_n) {
|
if (!mod_n) {
|
||||||
|
@ -5568,6 +5623,13 @@ parsestr(struct compiling *c, const node *n, int *bytesmode, int *rawmode,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* fstrings are only allowed in Python 3.6 and greater */
|
||||||
|
if (fmode && c->c_feature_version < 6) {
|
||||||
|
ast_error(c, n, "Format strings are only supported in Python 3.6 and greater");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
if (fmode && *bytesmode) {
|
if (fmode && *bytesmode) {
|
||||||
PyErr_BadInternalCall();
|
PyErr_BadInternalCall();
|
||||||
return -1;
|
return -1;
|
||||||
|
|
|
@ -745,6 +745,7 @@ compile as builtin_compile
|
||||||
flags: int = 0
|
flags: int = 0
|
||||||
dont_inherit: bool(accept={int}) = False
|
dont_inherit: bool(accept={int}) = False
|
||||||
optimize: int = -1
|
optimize: int = -1
|
||||||
|
feature_version: int = -1
|
||||||
|
|
||||||
Compile source into a code object that can be executed by exec() or eval().
|
Compile source into a code object that can be executed by exec() or eval().
|
||||||
|
|
||||||
|
@ -763,8 +764,8 @@ in addition to any features explicitly specified.
|
||||||
static PyObject *
|
static PyObject *
|
||||||
builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename,
|
builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename,
|
||||||
const char *mode, int flags, int dont_inherit,
|
const char *mode, int flags, int dont_inherit,
|
||||||
int optimize)
|
int optimize, int feature_version)
|
||||||
/*[clinic end generated code: output=1fa176e33452bb63 input=0ff726f595eb9fcd]*/
|
/*[clinic end generated code: output=b0c09c84f116d3d7 input=5fcc30651a6acaa9]*/
|
||||||
{
|
{
|
||||||
PyObject *source_copy;
|
PyObject *source_copy;
|
||||||
const char *str;
|
const char *str;
|
||||||
|
@ -775,6 +776,10 @@ builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename,
|
||||||
PyObject *result;
|
PyObject *result;
|
||||||
|
|
||||||
cf.cf_flags = flags | PyCF_SOURCE_IS_UTF8;
|
cf.cf_flags = flags | PyCF_SOURCE_IS_UTF8;
|
||||||
|
cf.cf_feature_version = PY_MINOR_VERSION;
|
||||||
|
if (feature_version >= 0 && (flags & PyCF_ONLY_AST)) {
|
||||||
|
cf.cf_feature_version = feature_version;
|
||||||
|
}
|
||||||
|
|
||||||
if (flags &
|
if (flags &
|
||||||
~(PyCF_MASK | PyCF_MASK_OBSOLETE | PyCF_DONT_IMPLY_DEDENT | PyCF_ONLY_AST | PyCF_TYPE_COMMENTS))
|
~(PyCF_MASK | PyCF_MASK_OBSOLETE | PyCF_DONT_IMPLY_DEDENT | PyCF_ONLY_AST | PyCF_TYPE_COMMENTS))
|
||||||
|
@ -981,6 +986,7 @@ builtin_eval_impl(PyObject *module, PyObject *source, PyObject *globals,
|
||||||
}
|
}
|
||||||
|
|
||||||
cf.cf_flags = PyCF_SOURCE_IS_UTF8;
|
cf.cf_flags = PyCF_SOURCE_IS_UTF8;
|
||||||
|
cf.cf_feature_version = PY_MINOR_VERSION;
|
||||||
str = source_as_string(source, "eval", "string, bytes or code", &cf, &source_copy);
|
str = source_as_string(source, "eval", "string, bytes or code", &cf, &source_copy);
|
||||||
if (str == NULL)
|
if (str == NULL)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -1068,6 +1074,7 @@ builtin_exec_impl(PyObject *module, PyObject *source, PyObject *globals,
|
||||||
const char *str;
|
const char *str;
|
||||||
PyCompilerFlags cf;
|
PyCompilerFlags cf;
|
||||||
cf.cf_flags = PyCF_SOURCE_IS_UTF8;
|
cf.cf_flags = PyCF_SOURCE_IS_UTF8;
|
||||||
|
cf.cf_feature_version = PY_MINOR_VERSION;
|
||||||
str = source_as_string(source, "exec",
|
str = source_as_string(source, "exec",
|
||||||
"string, bytes or code", &cf,
|
"string, bytes or code", &cf,
|
||||||
&source_copy);
|
&source_copy);
|
||||||
|
|
|
@ -151,7 +151,7 @@ exit:
|
||||||
|
|
||||||
PyDoc_STRVAR(builtin_compile__doc__,
|
PyDoc_STRVAR(builtin_compile__doc__,
|
||||||
"compile($module, /, source, filename, mode, flags=0,\n"
|
"compile($module, /, source, filename, mode, flags=0,\n"
|
||||||
" dont_inherit=False, optimize=-1)\n"
|
" dont_inherit=False, optimize=-1, feature_version=-1)\n"
|
||||||
"--\n"
|
"--\n"
|
||||||
"\n"
|
"\n"
|
||||||
"Compile source into a code object that can be executed by exec() or eval().\n"
|
"Compile source into a code object that can be executed by exec() or eval().\n"
|
||||||
|
@ -173,26 +173,27 @@ PyDoc_STRVAR(builtin_compile__doc__,
|
||||||
static PyObject *
|
static PyObject *
|
||||||
builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename,
|
builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename,
|
||||||
const char *mode, int flags, int dont_inherit,
|
const char *mode, int flags, int dont_inherit,
|
||||||
int optimize);
|
int optimize, int feature_version);
|
||||||
|
|
||||||
static PyObject *
|
static PyObject *
|
||||||
builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
|
builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
|
||||||
{
|
{
|
||||||
PyObject *return_value = NULL;
|
PyObject *return_value = NULL;
|
||||||
static const char * const _keywords[] = {"source", "filename", "mode", "flags", "dont_inherit", "optimize", NULL};
|
static const char * const _keywords[] = {"source", "filename", "mode", "flags", "dont_inherit", "optimize", "feature_version", NULL};
|
||||||
static _PyArg_Parser _parser = {"OO&s|iii:compile", _keywords, 0};
|
static _PyArg_Parser _parser = {"OO&s|iiii:compile", _keywords, 0};
|
||||||
PyObject *source;
|
PyObject *source;
|
||||||
PyObject *filename;
|
PyObject *filename;
|
||||||
const char *mode;
|
const char *mode;
|
||||||
int flags = 0;
|
int flags = 0;
|
||||||
int dont_inherit = 0;
|
int dont_inherit = 0;
|
||||||
int optimize = -1;
|
int optimize = -1;
|
||||||
|
int feature_version = -1;
|
||||||
|
|
||||||
if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
|
if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
|
||||||
&source, PyUnicode_FSDecoder, &filename, &mode, &flags, &dont_inherit, &optimize)) {
|
&source, PyUnicode_FSDecoder, &filename, &mode, &flags, &dont_inherit, &optimize, &feature_version)) {
|
||||||
goto exit;
|
goto exit;
|
||||||
}
|
}
|
||||||
return_value = builtin_compile_impl(module, source, filename, mode, flags, dont_inherit, optimize);
|
return_value = builtin_compile_impl(module, source, filename, mode, flags, dont_inherit, optimize, feature_version);
|
||||||
|
|
||||||
exit:
|
exit:
|
||||||
return return_value;
|
return return_value;
|
||||||
|
@ -754,4 +755,4 @@ builtin_issubclass(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
|
||||||
exit:
|
exit:
|
||||||
return return_value;
|
return return_value;
|
||||||
}
|
}
|
||||||
/*[clinic end generated code: output=54e5e33dcc2659e0 input=a9049054013a1b77]*/
|
/*[clinic end generated code: output=00b97a48ea49eaf2 input=a9049054013a1b77]*/
|
||||||
|
|
|
@ -330,6 +330,7 @@ PyAST_CompileObject(mod_ty mod, PyObject *filename, PyCompilerFlags *flags,
|
||||||
goto finally;
|
goto finally;
|
||||||
if (!flags) {
|
if (!flags) {
|
||||||
local_flags.cf_flags = 0;
|
local_flags.cf_flags = 0;
|
||||||
|
local_flags.cf_feature_version = PY_MINOR_VERSION;
|
||||||
flags = &local_flags;
|
flags = &local_flags;
|
||||||
}
|
}
|
||||||
merged = c.c_future->ff_features | flags->cf_flags;
|
merged = c.c_future->ff_features | flags->cf_flags;
|
||||||
|
|
|
@ -106,7 +106,7 @@ static state states_5[3] = {
|
||||||
{1, arcs_5_2},
|
{1, arcs_5_2},
|
||||||
};
|
};
|
||||||
static arc arcs_6_0[1] = {
|
static arc arcs_6_0[1] = {
|
||||||
{16, 1},
|
{38, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_6_1[1] = {
|
static arc arcs_6_1[1] = {
|
||||||
{56, 2},
|
{56, 2},
|
||||||
|
@ -120,7 +120,7 @@ static state states_6[3] = {
|
||||||
{1, arcs_6_2},
|
{1, arcs_6_2},
|
||||||
};
|
};
|
||||||
static arc arcs_7_0[1] = {
|
static arc arcs_7_0[1] = {
|
||||||
{21, 1},
|
{19, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_7_1[1] = {
|
static arc arcs_7_1[1] = {
|
||||||
{40, 2},
|
{40, 2},
|
||||||
|
@ -583,7 +583,7 @@ static state states_19[2] = {
|
||||||
{1, arcs_19_1},
|
{1, arcs_19_1},
|
||||||
};
|
};
|
||||||
static arc arcs_20_0[1] = {
|
static arc arcs_20_0[1] = {
|
||||||
{22, 1},
|
{20, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_20_1[1] = {
|
static arc arcs_20_1[1] = {
|
||||||
{98, 2},
|
{98, 2},
|
||||||
|
@ -597,7 +597,7 @@ static state states_20[3] = {
|
||||||
{1, arcs_20_2},
|
{1, arcs_20_2},
|
||||||
};
|
};
|
||||||
static arc arcs_21_0[1] = {
|
static arc arcs_21_0[1] = {
|
||||||
{31, 1},
|
{29, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_21_1[1] = {
|
static arc arcs_21_1[1] = {
|
||||||
{0, 1},
|
{0, 1},
|
||||||
|
@ -621,7 +621,7 @@ static state states_22[2] = {
|
||||||
{1, arcs_22_1},
|
{1, arcs_22_1},
|
||||||
};
|
};
|
||||||
static arc arcs_23_0[1] = {
|
static arc arcs_23_0[1] = {
|
||||||
{18, 1},
|
{16, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_23_1[1] = {
|
static arc arcs_23_1[1] = {
|
||||||
{0, 1},
|
{0, 1},
|
||||||
|
@ -631,7 +631,7 @@ static state states_23[2] = {
|
||||||
{1, arcs_23_1},
|
{1, arcs_23_1},
|
||||||
};
|
};
|
||||||
static arc arcs_24_0[1] = {
|
static arc arcs_24_0[1] = {
|
||||||
{20, 1},
|
{18, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_24_1[1] = {
|
static arc arcs_24_1[1] = {
|
||||||
{0, 1},
|
{0, 1},
|
||||||
|
@ -641,7 +641,7 @@ static state states_24[2] = {
|
||||||
{1, arcs_24_1},
|
{1, arcs_24_1},
|
||||||
};
|
};
|
||||||
static arc arcs_25_0[1] = {
|
static arc arcs_25_0[1] = {
|
||||||
{33, 1},
|
{31, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_25_1[2] = {
|
static arc arcs_25_1[2] = {
|
||||||
{80, 2},
|
{80, 2},
|
||||||
|
@ -666,14 +666,14 @@ static state states_26[2] = {
|
||||||
{1, arcs_26_1},
|
{1, arcs_26_1},
|
||||||
};
|
};
|
||||||
static arc arcs_27_0[1] = {
|
static arc arcs_27_0[1] = {
|
||||||
{32, 1},
|
{30, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_27_1[2] = {
|
static arc arcs_27_1[2] = {
|
||||||
{60, 2},
|
{60, 2},
|
||||||
{0, 1},
|
{0, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_27_2[2] = {
|
static arc arcs_27_2[2] = {
|
||||||
{24, 3},
|
{22, 3},
|
||||||
{0, 2},
|
{0, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_27_3[1] = {
|
static arc arcs_27_3[1] = {
|
||||||
|
@ -701,7 +701,7 @@ static state states_28[2] = {
|
||||||
{1, arcs_28_1},
|
{1, arcs_28_1},
|
||||||
};
|
};
|
||||||
static arc arcs_29_0[1] = {
|
static arc arcs_29_0[1] = {
|
||||||
{27, 1},
|
{25, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_29_1[1] = {
|
static arc arcs_29_1[1] = {
|
||||||
{106, 2},
|
{106, 2},
|
||||||
|
@ -715,7 +715,7 @@ static state states_29[3] = {
|
||||||
{1, arcs_29_2},
|
{1, arcs_29_2},
|
||||||
};
|
};
|
||||||
static arc arcs_30_0[1] = {
|
static arc arcs_30_0[1] = {
|
||||||
{24, 1},
|
{22, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_30_1[3] = {
|
static arc arcs_30_1[3] = {
|
||||||
{107, 2},
|
{107, 2},
|
||||||
|
@ -725,11 +725,11 @@ static arc arcs_30_1[3] = {
|
||||||
static arc arcs_30_2[4] = {
|
static arc arcs_30_2[4] = {
|
||||||
{107, 2},
|
{107, 2},
|
||||||
{9, 2},
|
{9, 2},
|
||||||
{27, 4},
|
{25, 4},
|
||||||
{49, 3},
|
{49, 3},
|
||||||
};
|
};
|
||||||
static arc arcs_30_3[1] = {
|
static arc arcs_30_3[1] = {
|
||||||
{27, 4},
|
{25, 4},
|
||||||
};
|
};
|
||||||
static arc arcs_30_4[3] = {
|
static arc arcs_30_4[3] = {
|
||||||
{5, 5},
|
{5, 5},
|
||||||
|
@ -832,7 +832,7 @@ static state states_35[2] = {
|
||||||
{2, arcs_35_1},
|
{2, arcs_35_1},
|
||||||
};
|
};
|
||||||
static arc arcs_36_0[1] = {
|
static arc arcs_36_0[1] = {
|
||||||
{25, 1},
|
{23, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_36_1[1] = {
|
static arc arcs_36_1[1] = {
|
||||||
{40, 2},
|
{40, 2},
|
||||||
|
@ -847,7 +847,7 @@ static state states_36[3] = {
|
||||||
{2, arcs_36_2},
|
{2, arcs_36_2},
|
||||||
};
|
};
|
||||||
static arc arcs_37_0[1] = {
|
static arc arcs_37_0[1] = {
|
||||||
{29, 1},
|
{27, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_37_1[1] = {
|
static arc arcs_37_1[1] = {
|
||||||
{40, 2},
|
{40, 2},
|
||||||
|
@ -903,7 +903,7 @@ static state states_39[2] = {
|
||||||
{1, arcs_39_1},
|
{1, arcs_39_1},
|
||||||
};
|
};
|
||||||
static arc arcs_40_0[1] = {
|
static arc arcs_40_0[1] = {
|
||||||
{16, 1},
|
{38, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_40_1[3] = {
|
static arc arcs_40_1[3] = {
|
||||||
{113, 2},
|
{113, 2},
|
||||||
|
@ -919,7 +919,7 @@ static state states_40[3] = {
|
||||||
{1, arcs_40_2},
|
{1, arcs_40_2},
|
||||||
};
|
};
|
||||||
static arc arcs_41_0[1] = {
|
static arc arcs_41_0[1] = {
|
||||||
{26, 1},
|
{24, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_41_1[1] = {
|
static arc arcs_41_1[1] = {
|
||||||
{118, 2},
|
{118, 2},
|
||||||
|
@ -955,7 +955,7 @@ static state states_41[8] = {
|
||||||
{1, arcs_41_7},
|
{1, arcs_41_7},
|
||||||
};
|
};
|
||||||
static arc arcs_42_0[1] = {
|
static arc arcs_42_0[1] = {
|
||||||
{35, 1},
|
{33, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_42_1[1] = {
|
static arc arcs_42_1[1] = {
|
||||||
{118, 2},
|
{118, 2},
|
||||||
|
@ -990,7 +990,7 @@ static state states_42[8] = {
|
||||||
{1, arcs_42_7},
|
{1, arcs_42_7},
|
||||||
};
|
};
|
||||||
static arc arcs_43_0[1] = {
|
static arc arcs_43_0[1] = {
|
||||||
{23, 1},
|
{21, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_43_1[1] = {
|
static arc arcs_43_1[1] = {
|
||||||
{98, 2},
|
{98, 2},
|
||||||
|
@ -1038,7 +1038,7 @@ static state states_43[11] = {
|
||||||
{1, arcs_43_10},
|
{1, arcs_43_10},
|
||||||
};
|
};
|
||||||
static arc arcs_44_0[1] = {
|
static arc arcs_44_0[1] = {
|
||||||
{34, 1},
|
{32, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_44_1[1] = {
|
static arc arcs_44_1[1] = {
|
||||||
{59, 2},
|
{59, 2},
|
||||||
|
@ -1097,7 +1097,7 @@ static state states_44[13] = {
|
||||||
{2, arcs_44_12},
|
{2, arcs_44_12},
|
||||||
};
|
};
|
||||||
static arc arcs_45_0[1] = {
|
static arc arcs_45_0[1] = {
|
||||||
{36, 1},
|
{34, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_45_1[1] = {
|
static arc arcs_45_1[1] = {
|
||||||
{125, 2},
|
{125, 2},
|
||||||
|
@ -1218,7 +1218,7 @@ static arc arcs_50_1[1] = {
|
||||||
{0, 1},
|
{0, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_50_2[2] = {
|
static arc arcs_50_2[2] = {
|
||||||
{26, 3},
|
{24, 3},
|
||||||
{0, 2},
|
{0, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_50_3[1] = {
|
static arc arcs_50_3[1] = {
|
||||||
|
@ -1250,7 +1250,7 @@ static state states_51[2] = {
|
||||||
{1, arcs_51_1},
|
{1, arcs_51_1},
|
||||||
};
|
};
|
||||||
static arc arcs_52_0[1] = {
|
static arc arcs_52_0[1] = {
|
||||||
{28, 1},
|
{26, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_52_1[2] = {
|
static arc arcs_52_1[2] = {
|
||||||
{59, 2},
|
{59, 2},
|
||||||
|
@ -1273,7 +1273,7 @@ static state states_52[5] = {
|
||||||
{1, arcs_52_4},
|
{1, arcs_52_4},
|
||||||
};
|
};
|
||||||
static arc arcs_53_0[1] = {
|
static arc arcs_53_0[1] = {
|
||||||
{28, 1},
|
{26, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_53_1[2] = {
|
static arc arcs_53_1[2] = {
|
||||||
{59, 2},
|
{59, 2},
|
||||||
|
@ -1318,7 +1318,7 @@ static state states_55[2] = {
|
||||||
{2, arcs_55_1},
|
{2, arcs_55_1},
|
||||||
};
|
};
|
||||||
static arc arcs_56_0[2] = {
|
static arc arcs_56_0[2] = {
|
||||||
{30, 1},
|
{28, 1},
|
||||||
{139, 2},
|
{139, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_56_1[1] = {
|
static arc arcs_56_1[1] = {
|
||||||
|
@ -1353,13 +1353,13 @@ static arc arcs_58_0[10] = {
|
||||||
{146, 1},
|
{146, 1},
|
||||||
{122, 1},
|
{122, 1},
|
||||||
{147, 2},
|
{147, 2},
|
||||||
{30, 3},
|
{28, 3},
|
||||||
};
|
};
|
||||||
static arc arcs_58_1[1] = {
|
static arc arcs_58_1[1] = {
|
||||||
{0, 1},
|
{0, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_58_2[2] = {
|
static arc arcs_58_2[2] = {
|
||||||
{30, 1},
|
{28, 1},
|
||||||
{0, 2},
|
{0, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_58_3[1] = {
|
static arc arcs_58_3[1] = {
|
||||||
|
@ -1460,7 +1460,7 @@ static state states_65[2] = {
|
||||||
static arc arcs_66_0[4] = {
|
static arc arcs_66_0[4] = {
|
||||||
{7, 1},
|
{7, 1},
|
||||||
{8, 1},
|
{8, 1},
|
||||||
{39, 1},
|
{37, 1},
|
||||||
{162, 2},
|
{162, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_66_1[1] = {
|
static arc arcs_66_1[1] = {
|
||||||
|
@ -1494,7 +1494,7 @@ static state states_67[4] = {
|
||||||
{1, arcs_67_3},
|
{1, arcs_67_3},
|
||||||
};
|
};
|
||||||
static arc arcs_68_0[2] = {
|
static arc arcs_68_0[2] = {
|
||||||
{17, 1},
|
{39, 1},
|
||||||
{164, 2},
|
{164, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_68_1[1] = {
|
static arc arcs_68_1[1] = {
|
||||||
|
@ -1516,7 +1516,7 @@ static arc arcs_69_0[10] = {
|
||||||
{12, 2},
|
{12, 2},
|
||||||
{13, 2},
|
{13, 2},
|
||||||
{14, 3},
|
{14, 3},
|
||||||
{38, 4},
|
{36, 4},
|
||||||
{40, 2},
|
{40, 2},
|
||||||
{41, 2},
|
{41, 2},
|
||||||
{42, 5},
|
{42, 5},
|
||||||
|
@ -1788,7 +1788,7 @@ static state states_77[14] = {
|
||||||
{1, arcs_77_13},
|
{1, arcs_77_13},
|
||||||
};
|
};
|
||||||
static arc arcs_78_0[1] = {
|
static arc arcs_78_0[1] = {
|
||||||
{19, 1},
|
{17, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_78_1[1] = {
|
static arc arcs_78_1[1] = {
|
||||||
{40, 2},
|
{40, 2},
|
||||||
|
@ -1874,7 +1874,7 @@ static state states_81[2] = {
|
||||||
{1, arcs_81_1},
|
{1, arcs_81_1},
|
||||||
};
|
};
|
||||||
static arc arcs_82_0[1] = {
|
static arc arcs_82_0[1] = {
|
||||||
{23, 1},
|
{21, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_82_1[1] = {
|
static arc arcs_82_1[1] = {
|
||||||
{98, 2},
|
{98, 2},
|
||||||
|
@ -1901,7 +1901,7 @@ static state states_82[6] = {
|
||||||
{1, arcs_82_5},
|
{1, arcs_82_5},
|
||||||
};
|
};
|
||||||
static arc arcs_83_0[2] = {
|
static arc arcs_83_0[2] = {
|
||||||
{16, 1},
|
{38, 1},
|
||||||
{177, 2},
|
{177, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_83_1[1] = {
|
static arc arcs_83_1[1] = {
|
||||||
|
@ -1916,7 +1916,7 @@ static state states_83[3] = {
|
||||||
{1, arcs_83_2},
|
{1, arcs_83_2},
|
||||||
};
|
};
|
||||||
static arc arcs_84_0[1] = {
|
static arc arcs_84_0[1] = {
|
||||||
{26, 1},
|
{24, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_84_1[1] = {
|
static arc arcs_84_1[1] = {
|
||||||
{133, 2},
|
{133, 2},
|
||||||
|
@ -1945,7 +1945,7 @@ static state states_85[2] = {
|
||||||
{1, arcs_85_1},
|
{1, arcs_85_1},
|
||||||
};
|
};
|
||||||
static arc arcs_86_0[1] = {
|
static arc arcs_86_0[1] = {
|
||||||
{37, 1},
|
{35, 1},
|
||||||
};
|
};
|
||||||
static arc arcs_86_1[2] = {
|
static arc arcs_86_1[2] = {
|
||||||
{179, 2},
|
{179, 2},
|
||||||
|
@ -1960,7 +1960,7 @@ static state states_86[3] = {
|
||||||
{1, arcs_86_2},
|
{1, arcs_86_2},
|
||||||
};
|
};
|
||||||
static arc arcs_87_0[2] = {
|
static arc arcs_87_0[2] = {
|
||||||
{24, 1},
|
{22, 1},
|
||||||
{80, 2},
|
{80, 2},
|
||||||
};
|
};
|
||||||
static arc arcs_87_1[1] = {
|
static arc arcs_87_1[1] = {
|
||||||
|
@ -2115,7 +2115,7 @@ static dfa dfas[92] = {
|
||||||
{257, "file_input", 0, 2, states_1,
|
{257, "file_input", 0, 2, states_1,
|
||||||
"\344\377\377\377\377\027\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\344\377\377\377\377\027\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{258, "eval_input", 0, 3, states_2,
|
{258, "eval_input", 0, 3, states_2,
|
||||||
"\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{259, "decorator", 0, 7, states_3,
|
{259, "decorator", 0, 7, states_3,
|
||||||
"\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{260, "decorators", 0, 2, states_4,
|
{260, "decorators", 0, 2, states_4,
|
||||||
|
@ -2123,9 +2123,9 @@ static dfa dfas[92] = {
|
||||||
{261, "decorated", 0, 3, states_5,
|
{261, "decorated", 0, 3, states_5,
|
||||||
"\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{262, "async_funcdef", 0, 3, states_6,
|
{262, "async_funcdef", 0, 3, states_6,
|
||||||
"\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{263, "funcdef", 0, 9, states_7,
|
{263, "funcdef", 0, 9, states_7,
|
||||||
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{264, "parameters", 0, 4, states_8,
|
{264, "parameters", 0, 4, states_8,
|
||||||
"\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{265, "typedargslist", 0, 22, states_9,
|
{265, "typedargslist", 0, 22, states_9,
|
||||||
|
@ -2139,39 +2139,39 @@ static dfa dfas[92] = {
|
||||||
{269, "stmt", 0, 2, states_13,
|
{269, "stmt", 0, 2, states_13,
|
||||||
"\340\377\377\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\377\377\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{270, "simple_stmt", 0, 4, states_14,
|
{270, "simple_stmt", 0, 4, states_14,
|
||||||
"\340\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{271, "small_stmt", 0, 2, states_15,
|
{271, "small_stmt", 0, 2, states_15,
|
||||||
"\340\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{272, "expr_stmt", 0, 6, states_16,
|
{272, "expr_stmt", 0, 6, states_16,
|
||||||
"\340\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{273, "annassign", 0, 5, states_17,
|
{273, "annassign", 0, 5, states_17,
|
||||||
"\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{274, "testlist_star_expr", 0, 3, states_18,
|
{274, "testlist_star_expr", 0, 3, states_18,
|
||||||
"\340\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{275, "augassign", 0, 2, states_19,
|
{275, "augassign", 0, 2, states_19,
|
||||||
"\000\000\000\000\000\000\000\000\000\000\340\377\003\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\000\000\000\000\000\000\340\377\003\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{276, "del_stmt", 0, 3, states_20,
|
{276, "del_stmt", 0, 3, states_20,
|
||||||
"\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
|
||||||
{277, "pass_stmt", 0, 2, states_21,
|
|
||||||
"\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
|
||||||
{278, "flow_stmt", 0, 2, states_22,
|
|
||||||
"\000\000\024\000\043\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
|
||||||
{279, "break_stmt", 0, 2, states_23,
|
|
||||||
"\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
|
||||||
{280, "continue_stmt", 0, 2, states_24,
|
|
||||||
"\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
|
{277, "pass_stmt", 0, 2, states_21,
|
||||||
|
"\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
|
{278, "flow_stmt", 0, 2, states_22,
|
||||||
|
"\000\000\005\300\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
|
{279, "break_stmt", 0, 2, states_23,
|
||||||
|
"\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
|
{280, "continue_stmt", 0, 2, states_24,
|
||||||
|
"\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{281, "return_stmt", 0, 3, states_25,
|
{281, "return_stmt", 0, 3, states_25,
|
||||||
"\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{282, "yield_stmt", 0, 2, states_26,
|
{282, "yield_stmt", 0, 2, states_26,
|
||||||
"\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{283, "raise_stmt", 0, 5, states_27,
|
{283, "raise_stmt", 0, 5, states_27,
|
||||||
"\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{284, "import_stmt", 0, 2, states_28,
|
{284, "import_stmt", 0, 2, states_28,
|
||||||
"\000\000\000\011\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\100\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{285, "import_name", 0, 3, states_29,
|
{285, "import_name", 0, 3, states_29,
|
||||||
"\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{286, "import_from", 0, 8, states_30,
|
{286, "import_from", 0, 8, states_30,
|
||||||
"\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{287, "import_as_name", 0, 4, states_31,
|
{287, "import_as_name", 0, 4, states_31,
|
||||||
"\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{288, "dotted_as_name", 0, 4, states_32,
|
{288, "dotted_as_name", 0, 4, states_32,
|
||||||
|
@ -2183,117 +2183,117 @@ static dfa dfas[92] = {
|
||||||
{291, "dotted_name", 0, 2, states_35,
|
{291, "dotted_name", 0, 2, states_35,
|
||||||
"\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{292, "global_stmt", 0, 3, states_36,
|
{292, "global_stmt", 0, 3, states_36,
|
||||||
"\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{293, "nonlocal_stmt", 0, 3, states_37,
|
{293, "nonlocal_stmt", 0, 3, states_37,
|
||||||
"\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{294, "assert_stmt", 0, 5, states_38,
|
{294, "assert_stmt", 0, 5, states_38,
|
||||||
"\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{295, "compound_stmt", 0, 2, states_39,
|
{295, "compound_stmt", 0, 2, states_39,
|
||||||
"\000\004\251\004\034\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\004\052\001\107\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{296, "async_stmt", 0, 3, states_40,
|
{296, "async_stmt", 0, 3, states_40,
|
||||||
"\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{297, "if_stmt", 0, 8, states_41,
|
{297, "if_stmt", 0, 8, states_41,
|
||||||
"\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{298, "while_stmt", 0, 8, states_42,
|
{298, "while_stmt", 0, 8, states_42,
|
||||||
"\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{299, "for_stmt", 0, 11, states_43,
|
{299, "for_stmt", 0, 11, states_43,
|
||||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{300, "try_stmt", 0, 13, states_44,
|
{300, "try_stmt", 0, 13, states_44,
|
||||||
"\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{301, "with_stmt", 0, 6, states_45,
|
{301, "with_stmt", 0, 6, states_45,
|
||||||
"\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{302, "with_item", 0, 4, states_46,
|
{302, "with_item", 0, 4, states_46,
|
||||||
"\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{303, "except_clause", 0, 5, states_47,
|
{303, "except_clause", 0, 5, states_47,
|
||||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000"},
|
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000"},
|
||||||
{304, "suite", 0, 5, states_48,
|
{304, "suite", 0, 5, states_48,
|
||||||
"\344\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\344\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{305, "namedexpr_test", 0, 4, states_49,
|
{305, "namedexpr_test", 0, 4, states_49,
|
||||||
"\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{306, "test", 0, 6, states_50,
|
{306, "test", 0, 6, states_50,
|
||||||
"\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{307, "test_nocond", 0, 2, states_51,
|
{307, "test_nocond", 0, 2, states_51,
|
||||||
"\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{308, "lambdef", 0, 5, states_52,
|
{308, "lambdef", 0, 5, states_52,
|
||||||
"\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{309, "lambdef_nocond", 0, 5, states_53,
|
{309, "lambdef_nocond", 0, 5, states_53,
|
||||||
"\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{310, "or_test", 0, 2, states_54,
|
{310, "or_test", 0, 2, states_54,
|
||||||
"\240\173\002\100\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\020\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{311, "and_test", 0, 2, states_55,
|
{311, "and_test", 0, 2, states_55,
|
||||||
"\240\173\002\100\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\020\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{312, "not_test", 0, 3, states_56,
|
{312, "not_test", 0, 3, states_56,
|
||||||
"\240\173\002\100\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\020\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{313, "comparison", 0, 2, states_57,
|
{313, "comparison", 0, 2, states_57,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{314, "comp_op", 0, 4, states_58,
|
{314, "comp_op", 0, 4, states_58,
|
||||||
"\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\004\000\340\017\000\000\000\000"},
|
"\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\004\000\340\017\000\000\000\000"},
|
||||||
{315, "star_expr", 0, 3, states_59,
|
{315, "star_expr", 0, 3, states_59,
|
||||||
"\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{316, "expr", 0, 2, states_60,
|
{316, "expr", 0, 2, states_60,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{317, "xor_expr", 0, 2, states_61,
|
{317, "xor_expr", 0, 2, states_61,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{318, "and_expr", 0, 2, states_62,
|
{318, "and_expr", 0, 2, states_62,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{319, "shift_expr", 0, 2, states_63,
|
{319, "shift_expr", 0, 2, states_63,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{320, "arith_expr", 0, 2, states_64,
|
{320, "arith_expr", 0, 2, states_64,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{321, "term", 0, 2, states_65,
|
{321, "term", 0, 2, states_65,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{322, "factor", 0, 3, states_66,
|
{322, "factor", 0, 3, states_66,
|
||||||
"\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{323, "power", 0, 4, states_67,
|
{323, "power", 0, 4, states_67,
|
||||||
"\040\172\002\000\100\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\040\172\000\000\220\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{324, "atom_expr", 0, 3, states_68,
|
{324, "atom_expr", 0, 3, states_68,
|
||||||
"\040\172\002\000\100\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\040\172\000\000\220\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{325, "atom", 0, 9, states_69,
|
{325, "atom", 0, 9, states_69,
|
||||||
"\040\172\000\000\100\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\040\172\000\000\020\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{326, "testlist_comp", 0, 5, states_70,
|
{326, "testlist_comp", 0, 5, states_70,
|
||||||
"\340\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{327, "trailer", 0, 7, states_71,
|
{327, "trailer", 0, 7, states_71,
|
||||||
"\040\100\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"},
|
"\040\100\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"},
|
||||||
{328, "subscriptlist", 0, 3, states_72,
|
{328, "subscriptlist", 0, 3, states_72,
|
||||||
"\240\173\002\120\300\007\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{329, "subscript", 0, 5, states_73,
|
{329, "subscript", 0, 5, states_73,
|
||||||
"\240\173\002\120\300\007\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{330, "sliceop", 0, 3, states_74,
|
{330, "sliceop", 0, 3, states_74,
|
||||||
"\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{331, "exprlist", 0, 3, states_75,
|
{331, "exprlist", 0, 3, states_75,
|
||||||
"\340\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{332, "testlist", 0, 3, states_76,
|
{332, "testlist", 0, 3, states_76,
|
||||||
"\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{333, "dictorsetmaker", 0, 14, states_77,
|
{333, "dictorsetmaker", 0, 14, states_77,
|
||||||
"\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\024\260\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{334, "classdef", 0, 8, states_78,
|
{334, "classdef", 0, 8, states_78,
|
||||||
"\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{335, "arglist", 0, 3, states_79,
|
{335, "arglist", 0, 3, states_79,
|
||||||
"\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\024\260\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{336, "argument", 0, 4, states_80,
|
{336, "argument", 0, 4, states_80,
|
||||||
"\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\024\260\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{337, "comp_iter", 0, 2, states_81,
|
{337, "comp_iter", 0, 2, states_81,
|
||||||
"\000\000\201\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\040\001\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{338, "sync_comp_for", 0, 6, states_82,
|
{338, "sync_comp_for", 0, 6, states_82,
|
||||||
"\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{339, "comp_for", 0, 3, states_83,
|
{339, "comp_for", 0, 3, states_83,
|
||||||
"\000\000\201\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\040\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{340, "comp_if", 0, 4, states_84,
|
{340, "comp_if", 0, 4, states_84,
|
||||||
"\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{341, "encoding_decl", 0, 2, states_85,
|
{341, "encoding_decl", 0, 2, states_85,
|
||||||
"\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{342, "yield_expr", 0, 3, states_86,
|
{342, "yield_expr", 0, 3, states_86,
|
||||||
"\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{343, "yield_arg", 0, 3, states_87,
|
{343, "yield_arg", 0, 3, states_87,
|
||||||
"\340\173\002\121\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\100\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{344, "func_body_suite", 0, 7, states_88,
|
{344, "func_body_suite", 0, 7, states_88,
|
||||||
"\344\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\344\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{345, "func_type_input", 0, 3, states_89,
|
{345, "func_type_input", 0, 3, states_89,
|
||||||
"\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{346, "func_type", 0, 6, states_90,
|
{346, "func_type", 0, 6, states_90,
|
||||||
"\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
{347, "typelist", 0, 11, states_91,
|
{347, "typelist", 0, 11, states_91,
|
||||||
"\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
"\340\173\000\024\260\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||||
};
|
};
|
||||||
static label labels[183] = {
|
static label labels[183] = {
|
||||||
{0, "EMPTY"},
|
{0, "EMPTY"},
|
||||||
|
@ -2312,8 +2312,6 @@ static label labels[183] = {
|
||||||
{1, "True"},
|
{1, "True"},
|
||||||
{9, 0},
|
{9, 0},
|
||||||
{1, "assert"},
|
{1, "assert"},
|
||||||
{1, "async"},
|
|
||||||
{1, "await"},
|
|
||||||
{1, "break"},
|
{1, "break"},
|
||||||
{1, "class"},
|
{1, "class"},
|
||||||
{1, "continue"},
|
{1, "continue"},
|
||||||
|
@ -2336,6 +2334,8 @@ static label labels[183] = {
|
||||||
{1, "yield"},
|
{1, "yield"},
|
||||||
{25, 0},
|
{25, 0},
|
||||||
{31, 0},
|
{31, 0},
|
||||||
|
{56, 0},
|
||||||
|
{55, 0},
|
||||||
{1, 0},
|
{1, 0},
|
||||||
{2, 0},
|
{2, 0},
|
||||||
{3, 0},
|
{3, 0},
|
||||||
|
@ -2357,7 +2357,7 @@ static label labels[183] = {
|
||||||
{51, 0},
|
{51, 0},
|
||||||
{11, 0},
|
{11, 0},
|
||||||
{306, 0},
|
{306, 0},
|
||||||
{56, 0},
|
{58, 0},
|
||||||
{344, 0},
|
{344, 0},
|
||||||
{265, 0},
|
{265, 0},
|
||||||
{35, 0},
|
{35, 0},
|
||||||
|
|
|
@ -105,6 +105,7 @@ PyRun_InteractiveLoopFlags(FILE *fp, const char *filename_str, PyCompilerFlags *
|
||||||
if (flags == NULL) {
|
if (flags == NULL) {
|
||||||
flags = &local_flags;
|
flags = &local_flags;
|
||||||
local_flags.cf_flags = 0;
|
local_flags.cf_flags = 0;
|
||||||
|
local_flags.cf_feature_version = PY_MINOR_VERSION;
|
||||||
}
|
}
|
||||||
v = _PySys_GetObjectId(&PyId_ps1);
|
v = _PySys_GetObjectId(&PyId_ps1);
|
||||||
if (v == NULL) {
|
if (v == NULL) {
|
||||||
|
@ -1165,6 +1166,7 @@ Py_SymtableStringObject(const char *str, PyObject *filename, int start)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
flags.cf_flags = 0;
|
flags.cf_flags = 0;
|
||||||
|
flags.cf_feature_version = PY_MINOR_VERSION;
|
||||||
mod = PyParser_ASTFromStringObject(str, filename, start, &flags, arena);
|
mod = PyParser_ASTFromStringObject(str, filename, start, &flags, arena);
|
||||||
if (mod == NULL) {
|
if (mod == NULL) {
|
||||||
PyArena_Free(arena);
|
PyArena_Free(arena);
|
||||||
|
@ -1198,12 +1200,15 @@ PyParser_ASTFromStringObject(const char *s, PyObject *filename, int start,
|
||||||
PyCompilerFlags localflags;
|
PyCompilerFlags localflags;
|
||||||
perrdetail err;
|
perrdetail err;
|
||||||
int iflags = PARSER_FLAGS(flags);
|
int iflags = PARSER_FLAGS(flags);
|
||||||
|
if (flags && flags->cf_feature_version < 7)
|
||||||
|
iflags |= PyPARSE_ASYNC_HACKS;
|
||||||
|
|
||||||
node *n = PyParser_ParseStringObject(s, filename,
|
node *n = PyParser_ParseStringObject(s, filename,
|
||||||
&_PyParser_Grammar, start, &err,
|
&_PyParser_Grammar, start, &err,
|
||||||
&iflags);
|
&iflags);
|
||||||
if (flags == NULL) {
|
if (flags == NULL) {
|
||||||
localflags.cf_flags = 0;
|
localflags.cf_flags = 0;
|
||||||
|
localflags.cf_feature_version = PY_MINOR_VERSION;
|
||||||
flags = &localflags;
|
flags = &localflags;
|
||||||
}
|
}
|
||||||
if (n) {
|
if (n) {
|
||||||
|
@ -1249,6 +1254,7 @@ PyParser_ASTFromFileObject(FILE *fp, PyObject *filename, const char* enc,
|
||||||
start, ps1, ps2, &err, &iflags);
|
start, ps1, ps2, &err, &iflags);
|
||||||
if (flags == NULL) {
|
if (flags == NULL) {
|
||||||
localflags.cf_flags = 0;
|
localflags.cf_flags = 0;
|
||||||
|
localflags.cf_feature_version = PY_MINOR_VERSION;
|
||||||
flags = &localflags;
|
flags = &localflags;
|
||||||
}
|
}
|
||||||
if (n) {
|
if (n) {
|
||||||
|
|
Loading…
Reference in New Issue