bpo-40334: Support type comments (GH-19780)
This implements full support for # type: <type> comments, # type: ignore <stuff> comments, and the func_type parsing mode for ast.parse() and compile(). Closes https://github.com/we-like-parsers/cpython/issues/95. (For now, you need to use the master branch of mypy, since another issue unique to 3.9 had to be fixed there, and there's no mypy release yet.) The only thing missing is `feature_version=N`, which is being tracked in https://github.com/we-like-parsers/cpython/issues/124.
This commit is contained in:
parent
efb8dd5b3e
commit
c001c09e90
|
@ -17,6 +17,8 @@ _PyPegen_parse(Parser *p)
|
|||
result = interactive_rule(p);
|
||||
} else if (p->start_rule == Py_eval_input) {
|
||||
result = eval_rule(p);
|
||||
} else if (p->start_rule == Py_func_type_input) {
|
||||
result = func_type_rule(p);
|
||||
} else if (p->start_rule == Py_fstring_input) {
|
||||
result = fstring_rule(p);
|
||||
}
|
||||
|
@ -26,11 +28,20 @@ _PyPegen_parse(Parser *p)
|
|||
|
||||
// The end
|
||||
'''
|
||||
file[mod_ty]: a=[statements] ENDMARKER { Module(a, NULL, p->arena) }
|
||||
file[mod_ty]: a=[statements] ENDMARKER { _PyPegen_make_module(p, a) }
|
||||
interactive[mod_ty]: a=statement_newline { Interactive(a, p->arena) }
|
||||
eval[mod_ty]: a=expressions NEWLINE* ENDMARKER { Expression(a, p->arena) }
|
||||
func_type[mod_ty]: '(' a=[type_expressions] ')' '->' b=expression NEWLINE* ENDMARKER { FunctionType(a, b, p->arena) }
|
||||
fstring[expr_ty]: star_expressions
|
||||
|
||||
# type_expressions allow */** but ignore them
|
||||
type_expressions[asdl_seq*]:
|
||||
| a=','.expression+ ',' '*' b=expression ',' '**' c=expression {
|
||||
_PyPegen_seq_append_to_end(p, CHECK(_PyPegen_seq_append_to_end(p, a, b)), c) }
|
||||
| a=','.expression+ ',' '*' b=expression { _PyPegen_seq_append_to_end(p, a, b) }
|
||||
| a=','.expression+ ',' '**' b=expression { _PyPegen_seq_append_to_end(p, a, b) }
|
||||
| ','.expression+
|
||||
|
||||
statements[asdl_seq*]: a=statement+ { _PyPegen_seq_flatten(p, a) }
|
||||
statement[asdl_seq*]: a=compound_stmt { _PyPegen_singleton_seq(p, a) } | simple_stmt
|
||||
statement_newline[asdl_seq*]:
|
||||
|
@ -73,8 +84,8 @@ assignment:
|
|||
| a=('(' b=inside_paren_ann_assign_target ')' { b }
|
||||
| ann_assign_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] {
|
||||
_Py_AnnAssign(a, b, c, 0, EXTRA)}
|
||||
| a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) {
|
||||
_Py_Assign(a, b, NULL, EXTRA) }
|
||||
| a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) tc=[TYPE_COMMENT] {
|
||||
_Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
|
||||
| a=target b=augassign c=(yield_expr | star_expressions) {
|
||||
_Py_AugAssign(a, b->kind, c, EXTRA) }
|
||||
| invalid_assignment
|
||||
|
@ -145,14 +156,14 @@ while_stmt[stmt_ty]:
|
|||
| 'while' a=named_expression ':' b=block c=[else_block] { _Py_While(a, b, c, EXTRA) }
|
||||
|
||||
for_stmt[stmt_ty]:
|
||||
| is_async=[ASYNC] 'for' t=star_targets 'in' ex=star_expressions ':' b=block el=[else_block] {
|
||||
(is_async ? _Py_AsyncFor : _Py_For)(t, ex, b, el, NULL, EXTRA) }
|
||||
| is_async=[ASYNC] 'for' t=star_targets 'in' ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
|
||||
(is_async ? _Py_AsyncFor : _Py_For)(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) }
|
||||
|
||||
with_stmt[stmt_ty]:
|
||||
| is_async=[ASYNC] 'with' '(' a=','.with_item+ ')' ':' b=block {
|
||||
(is_async ? _Py_AsyncWith : _Py_With)(a, b, NULL, EXTRA) }
|
||||
| is_async=[ASYNC] 'with' a=','.with_item+ ':' b=block {
|
||||
(is_async ? _Py_AsyncWith : _Py_With)(a, b, NULL, EXTRA) }
|
||||
| is_async=[ASYNC] 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
|
||||
(is_async ? _Py_AsyncWith : _Py_With)(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
|
||||
with_item[withitem_ty]:
|
||||
| e=expression o=['as' t=target { t }] { _Py_withitem(e, o, p->arena) }
|
||||
|
||||
|
@ -177,43 +188,74 @@ function_def[stmt_ty]:
|
|||
| function_def_raw
|
||||
|
||||
function_def_raw[stmt_ty]:
|
||||
| is_async=[ASYNC] 'def' n=NAME '(' params=[params] ')' a=['->' z=annotation { z }] ':' b=block {
|
||||
| is_async=[ASYNC] 'def' n=NAME '(' params=[params] ')' a=['->' z=expression { z }] ':' tc=[func_type_comment] b=block {
|
||||
(is_async ? _Py_AsyncFunctionDef : _Py_FunctionDef)(n->v.Name.id,
|
||||
(params) ? params : CHECK(_PyPegen_empty_arguments(p)),
|
||||
b, NULL, a, NULL, EXTRA) }
|
||||
b, NULL, a, NEW_TYPE_COMMENT(p, tc), EXTRA) }
|
||||
func_type_comment[PyObject*]:
|
||||
| NEWLINE t=TYPE_COMMENT &(NEWLINE INDENT) { t } # Must be followed by indented block
|
||||
| invalid_double_type_comments
|
||||
| TYPE_COMMENT
|
||||
|
||||
params[arguments_ty]:
|
||||
| invalid_parameters
|
||||
| parameters
|
||||
|
||||
parameters[arguments_ty]:
|
||||
| a=slash_without_default b=[',' x=plain_names { x }] c=[',' y=names_with_default { y }] d=[',' z=[star_etc] { z }] {
|
||||
| a=slash_no_default b=param_no_default* c=param_with_default* d=[star_etc] {
|
||||
_PyPegen_make_arguments(p, a, NULL, b, c, d) }
|
||||
| a=slash_with_default b=[',' y=names_with_default { y }] c=[',' z=[star_etc] { z }] {
|
||||
| a=slash_with_default b=param_with_default* c=[star_etc] {
|
||||
_PyPegen_make_arguments(p, NULL, a, NULL, b, c) }
|
||||
| a=plain_names b=[',' y=names_with_default { y }] c=[',' z=[star_etc] { z }] {
|
||||
| a=param_no_default+ b=param_with_default* c=[star_etc] {
|
||||
_PyPegen_make_arguments(p, NULL, NULL, a, b, c) }
|
||||
| a=names_with_default b=[',' z=[star_etc] { z }] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)}
|
||||
| a=param_with_default+ b=[star_etc] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)}
|
||||
| a=star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, a) }
|
||||
slash_without_default[asdl_seq*]: a=plain_names ',' '/' { a }
|
||||
slash_with_default[SlashWithDefault*]: a=[n=plain_names ',' { n }] b=names_with_default ',' '/' {
|
||||
_PyPegen_slash_with_default(p, a, b) }
|
||||
|
||||
# Some duplication here because we can't write (',' | &')'),
|
||||
# which is because we don't support empty alternatives (yet).
|
||||
#
|
||||
slash_no_default[asdl_seq*]:
|
||||
| a=param_no_default+ '/' ',' { a }
|
||||
| a=param_no_default+ '/' &')' { a }
|
||||
slash_with_default[SlashWithDefault*]:
|
||||
| a=param_no_default* b=param_with_default+ '/' ',' { _PyPegen_slash_with_default(p, a, b) }
|
||||
| a=param_no_default* b=param_with_default+ '/' &')' { _PyPegen_slash_with_default(p, a, b) }
|
||||
|
||||
star_etc[StarEtc*]:
|
||||
| '*' a=plain_name b=name_with_optional_default* c=[',' d=kwds { d }] [','] {
|
||||
| '*' a=param_no_default b=param_maybe_default* c=[kwds] {
|
||||
_PyPegen_star_etc(p, a, b, c) }
|
||||
| '*' b=name_with_optional_default+ c=[',' d=kwds { d }] [','] {
|
||||
| '*' ',' b=param_maybe_default+ c=[kwds] {
|
||||
_PyPegen_star_etc(p, NULL, b, c) }
|
||||
| a=kwds [','] { _PyPegen_star_etc(p, NULL, NULL, a) }
|
||||
name_with_optional_default[NameDefaultPair*]:
|
||||
| ',' a=plain_name b=['=' e=expression { e }] { _PyPegen_name_default_pair(p, a, b) }
|
||||
names_with_default[asdl_seq*]: a=','.name_with_default+ { a }
|
||||
name_with_default[NameDefaultPair*]:
|
||||
| n=plain_name '=' e=expression { _PyPegen_name_default_pair(p, n, e) }
|
||||
plain_names[asdl_seq*] (memo): a=','.(plain_name !'=')+ { a }
|
||||
plain_name[arg_ty]:
|
||||
| a=NAME b=[':' z=annotation { z }] { _Py_arg(a->v.Name.id, b, NULL, EXTRA) }
|
||||
| a=kwds { _PyPegen_star_etc(p, NULL, NULL, a) }
|
||||
|
||||
kwds[arg_ty]:
|
||||
| '**' a=plain_name { a }
|
||||
annotation[expr_ty]: expression
|
||||
| '**' a=param_no_default { a }
|
||||
|
||||
# One parameter. This *includes* a following comma and type comment.
|
||||
#
|
||||
# There are three styles:
|
||||
# - No default
|
||||
# - With default
|
||||
# - Maybe with default
|
||||
#
|
||||
# There are two alternative forms of each, to deal with type comments:
|
||||
# - Ends in a comma followed by an optional type comment
|
||||
# - No comma, optional type comment, must be followed by close paren
|
||||
# The latter form is for a final parameter without trailing comma.
|
||||
#
|
||||
param_no_default[arg_ty]:
|
||||
| a=param ',' tc=TYPE_COMMENT? { _PyPegen_add_type_comment_to_arg(p, a, tc) }
|
||||
| a=param tc=TYPE_COMMENT? &')' { _PyPegen_add_type_comment_to_arg(p, a, tc) }
|
||||
param_with_default[NameDefaultPair*]:
|
||||
| a=param c=default ',' tc=TYPE_COMMENT? { _PyPegen_name_default_pair(p, a, c, tc) }
|
||||
| a=param c=default tc=TYPE_COMMENT? &')' { _PyPegen_name_default_pair(p, a, c, tc) }
|
||||
param_maybe_default[NameDefaultPair*]:
|
||||
| a=param c=default? ',' tc=TYPE_COMMENT? { _PyPegen_name_default_pair(p, a, c, tc) }
|
||||
| a=param c=default? tc=TYPE_COMMENT? &')' { _PyPegen_name_default_pair(p, a, c, tc) }
|
||||
param[arg_ty]: a=NAME b=annotation? { _Py_arg(a->v.Name.id, b, NULL, EXTRA) }
|
||||
|
||||
annotation[expr_ty]: ':' a=expression { a }
|
||||
default[expr_ty]: '=' a=expression { a }
|
||||
|
||||
decorators[asdl_seq*]: a=('@' f=named_expression NEWLINE { f })+ { a }
|
||||
|
||||
|
@ -284,10 +326,10 @@ lambda_star_etc[StarEtc*]:
|
|||
_PyPegen_star_etc(p, NULL, b, c) }
|
||||
| a=lambda_kwds [','] { _PyPegen_star_etc(p, NULL, NULL, a) }
|
||||
lambda_name_with_optional_default[NameDefaultPair*]:
|
||||
| ',' a=lambda_plain_name b=['=' e=expression { e }] { _PyPegen_name_default_pair(p, a, b) }
|
||||
| ',' a=lambda_plain_name b=['=' e=expression { e }] { _PyPegen_name_default_pair(p, a, b, NULL) }
|
||||
lambda_names_with_default[asdl_seq*]: a=','.lambda_name_with_default+ { a }
|
||||
lambda_name_with_default[NameDefaultPair*]:
|
||||
| n=lambda_plain_name '=' e=expression { _PyPegen_name_default_pair(p, n, e) }
|
||||
| n=lambda_plain_name '=' e=expression { _PyPegen_name_default_pair(p, n, e, NULL) }
|
||||
lambda_plain_names[asdl_seq*]: a=','.(lambda_plain_name !'=')+ { a }
|
||||
lambda_plain_name[arg_ty]: a=NAME { _Py_arg(a->v.Name.id, NULL, NULL, EXTRA) }
|
||||
lambda_kwds[arg_ty]: '**' a=lambda_plain_name { a }
|
||||
|
@ -552,5 +594,8 @@ invalid_comprehension:
|
|||
| ('[' | '(' | '{') '*' expression for_if_clauses {
|
||||
RAISE_SYNTAX_ERROR("iterable unpacking cannot be used in comprehension") }
|
||||
invalid_parameters:
|
||||
| [plain_names ','] (slash_with_default | names_with_default) ',' plain_names {
|
||||
| param_no_default* (slash_with_default | param_with_default+) param_no_default {
|
||||
RAISE_SYNTAX_ERROR("non-default argument follows default argument") }
|
||||
invalid_double_type_comments:
|
||||
| TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT {
|
||||
RAISE_SYNTAX_ERROR("Cannot have two type comments on def") }
|
||||
|
|
|
@ -219,7 +219,6 @@ def favk(
|
|||
"""
|
||||
|
||||
|
||||
@support.skip_if_new_parser("Pegen does not support type comments yet")
|
||||
class TypeCommentTests(unittest.TestCase):
|
||||
|
||||
lowest = 4 # Lowest minor version supported
|
||||
|
@ -253,6 +252,7 @@ class TypeCommentTests(unittest.TestCase):
|
|||
self.assertEqual(tree.body[0].type_comment, None)
|
||||
self.assertEqual(tree.body[1].type_comment, None)
|
||||
|
||||
@support.skip_if_new_parser("Pegen does not support feature_version yet")
|
||||
def test_asyncdef(self):
|
||||
for tree in self.parse_all(asyncdef, minver=5):
|
||||
self.assertEqual(tree.body[0].type_comment, "() -> int")
|
||||
|
@ -261,22 +261,27 @@ class TypeCommentTests(unittest.TestCase):
|
|||
self.assertEqual(tree.body[0].type_comment, None)
|
||||
self.assertEqual(tree.body[1].type_comment, None)
|
||||
|
||||
@support.skip_if_new_parser("Pegen does not support feature_version yet")
|
||||
def test_asyncvar(self):
|
||||
for tree in self.parse_all(asyncvar, maxver=6):
|
||||
pass
|
||||
|
||||
@support.skip_if_new_parser("Pegen does not support feature_version yet")
|
||||
def test_asynccomp(self):
|
||||
for tree in self.parse_all(asynccomp, minver=6):
|
||||
pass
|
||||
|
||||
@support.skip_if_new_parser("Pegen does not support feature_version yet")
|
||||
def test_matmul(self):
|
||||
for tree in self.parse_all(matmul, minver=5):
|
||||
pass
|
||||
|
||||
@support.skip_if_new_parser("Pegen does not support feature_version yet")
|
||||
def test_fstring(self):
|
||||
for tree in self.parse_all(fstring, minver=6):
|
||||
pass
|
||||
|
||||
@support.skip_if_new_parser("Pegen does not support feature_version yet")
|
||||
def test_underscorednumber(self):
|
||||
for tree in self.parse_all(underscorednumber, minver=6):
|
||||
pass
|
||||
|
|
5466
Parser/pegen/parse.c
5466
Parser/pegen/parse.c
File diff suppressed because it is too large
Load Diff
|
@ -5,6 +5,39 @@
|
|||
#include "pegen.h"
|
||||
#include "parse_string.h"
|
||||
|
||||
PyObject *
|
||||
_PyPegen_new_type_comment(Parser *p, char *s)
|
||||
{
|
||||
PyObject *res = PyUnicode_DecodeUTF8(s, strlen(s), NULL);
|
||||
if (res == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (PyArena_AddPyObject(p->arena, res) < 0) {
|
||||
Py_DECREF(res);
|
||||
return NULL;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
arg_ty
|
||||
_PyPegen_add_type_comment_to_arg(Parser *p, arg_ty a, Token *tc)
|
||||
{
|
||||
if (tc == NULL) {
|
||||
return a;
|
||||
}
|
||||
char *bytes = PyBytes_AsString(tc->bytes);
|
||||
if (bytes == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
PyObject *tco = _PyPegen_new_type_comment(p, bytes);
|
||||
if (tco == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
return arg(a->arg, a->annotation, tco,
|
||||
a->lineno, a->col_offset, a->end_lineno, a->end_col_offset,
|
||||
p->arena);
|
||||
}
|
||||
|
||||
static int
|
||||
init_normalization(Parser *p)
|
||||
{
|
||||
|
@ -539,11 +572,66 @@ _get_keyword_or_name_type(Parser *p, const char *name, int name_len)
|
|||
return NAME;
|
||||
}
|
||||
|
||||
static int
|
||||
growable_comment_array_init(growable_comment_array *arr, size_t initial_size) {
|
||||
assert(initial_size > 0);
|
||||
arr->items = PyMem_Malloc(initial_size * sizeof(*arr->items));
|
||||
arr->size = initial_size;
|
||||
arr->num_items = 0;
|
||||
|
||||
return arr->items != NULL;
|
||||
}
|
||||
|
||||
static int
|
||||
growable_comment_array_add(growable_comment_array *arr, int lineno, char *comment) {
|
||||
if (arr->num_items >= arr->size) {
|
||||
size_t new_size = arr->size * 2;
|
||||
void *new_items_array = PyMem_Realloc(arr->items, new_size * sizeof(*arr->items));
|
||||
if (!new_items_array) {
|
||||
return 0;
|
||||
}
|
||||
arr->items = new_items_array;
|
||||
arr->size = new_size;
|
||||
}
|
||||
|
||||
arr->items[arr->num_items].lineno = lineno;
|
||||
arr->items[arr->num_items].comment = comment; // Take ownership
|
||||
arr->num_items++;
|
||||
return 1;
|
||||
}
|
||||
|
||||
static void
|
||||
growable_comment_array_deallocate(growable_comment_array *arr) {
|
||||
for (unsigned i = 0; i < arr->num_items; i++) {
|
||||
PyMem_Free(arr->items[i].comment);
|
||||
}
|
||||
PyMem_Free(arr->items);
|
||||
}
|
||||
|
||||
int
|
||||
_PyPegen_fill_token(Parser *p)
|
||||
{
|
||||
const char *start, *end;
|
||||
int type = PyTokenizer_Get(p->tok, &start, &end);
|
||||
|
||||
// Record and skip '# type: ignore' comments
|
||||
while (type == TYPE_IGNORE) {
|
||||
Py_ssize_t len = end - start;
|
||||
char *tag = PyMem_Malloc(len + 1);
|
||||
if (tag == NULL) {
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
strncpy(tag, start, len);
|
||||
tag[len] = '\0';
|
||||
// Ownership of tag passes to the growable array
|
||||
if (!growable_comment_array_add(&p->type_ignore_comments, p->tok->lineno, tag)) {
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
type = PyTokenizer_Get(p->tok, &start, &end);
|
||||
}
|
||||
|
||||
if (type == ERRORTOKEN) {
|
||||
if (p->tok->done == E_DECODE) {
|
||||
return raise_decode_error(p);
|
||||
|
@ -919,6 +1007,7 @@ _PyPegen_Parser_Free(Parser *p)
|
|||
PyMem_Free(p->tokens[i]);
|
||||
}
|
||||
PyMem_Free(p->tokens);
|
||||
growable_comment_array_deallocate(&p->type_ignore_comments);
|
||||
PyMem_Free(p);
|
||||
}
|
||||
|
||||
|
@ -961,13 +1050,19 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags,
|
|||
PyMem_Free(p);
|
||||
return (Parser *) PyErr_NoMemory();
|
||||
}
|
||||
p->tokens[0] = PyMem_Malloc(sizeof(Token));
|
||||
p->tokens[0] = PyMem_Calloc(1, sizeof(Token));
|
||||
if (!p->tokens) {
|
||||
PyMem_Free(p->tokens);
|
||||
PyMem_Free(p);
|
||||
return (Parser *) PyErr_NoMemory();
|
||||
}
|
||||
memset(p->tokens[0], '\0', sizeof(Token));
|
||||
if (!growable_comment_array_init(&p->type_ignore_comments, 10)) {
|
||||
PyMem_Free(p->tokens[0]);
|
||||
PyMem_Free(p->tokens);
|
||||
PyMem_Free(p);
|
||||
return (Parser *) PyErr_NoMemory();
|
||||
}
|
||||
|
||||
p->mark = 0;
|
||||
p->fill = 0;
|
||||
p->size = 1;
|
||||
|
@ -1099,6 +1194,8 @@ _PyPegen_run_parser_from_string(const char *str, int start_rule, PyObject *filen
|
|||
mod_ty result = NULL;
|
||||
|
||||
int parser_flags = compute_parser_flags(flags);
|
||||
tok->type_comments = (parser_flags & PyPARSE_TYPE_COMMENTS) > 0;
|
||||
|
||||
Parser *p = _PyPegen_Parser_New(tok, start_rule, parser_flags, NULL, arena);
|
||||
if (p == NULL) {
|
||||
goto error;
|
||||
|
@ -1155,6 +1252,27 @@ _PyPegen_seq_insert_in_front(Parser *p, void *a, asdl_seq *seq)
|
|||
return new_seq;
|
||||
}
|
||||
|
||||
/* Creates a copy of seq and appends a to it */
|
||||
asdl_seq *
|
||||
_PyPegen_seq_append_to_end(Parser *p, asdl_seq *seq, void *a)
|
||||
{
|
||||
assert(a != NULL);
|
||||
if (!seq) {
|
||||
return _PyPegen_singleton_seq(p, a);
|
||||
}
|
||||
|
||||
asdl_seq *new_seq = _Py_asdl_seq_new(asdl_seq_LEN(seq) + 1, p->arena);
|
||||
if (!new_seq) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
for (Py_ssize_t i = 0, l = asdl_seq_LEN(new_seq); i + 1 < l; i++) {
|
||||
asdl_seq_SET(new_seq, i, asdl_seq_GET(seq, i));
|
||||
}
|
||||
asdl_seq_SET(new_seq, asdl_seq_LEN(new_seq) - 1, a);
|
||||
return new_seq;
|
||||
}
|
||||
|
||||
static Py_ssize_t
|
||||
_get_flattened_seq_size(asdl_seq *seqs)
|
||||
{
|
||||
|
@ -1483,13 +1601,13 @@ _PyPegen_get_values(Parser *p, asdl_seq *seq)
|
|||
|
||||
/* Constructs a NameDefaultPair */
|
||||
NameDefaultPair *
|
||||
_PyPegen_name_default_pair(Parser *p, arg_ty arg, expr_ty value)
|
||||
_PyPegen_name_default_pair(Parser *p, arg_ty arg, expr_ty value, Token *tc)
|
||||
{
|
||||
NameDefaultPair *a = PyArena_Malloc(p->arena, sizeof(NameDefaultPair));
|
||||
if (!a) {
|
||||
return NULL;
|
||||
}
|
||||
a->arg = arg;
|
||||
a->arg = _PyPegen_add_type_comment_to_arg(p, arg, tc);
|
||||
a->value = value;
|
||||
return a;
|
||||
}
|
||||
|
@ -1946,3 +2064,28 @@ error:
|
|||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
mod_ty
|
||||
_PyPegen_make_module(Parser *p, asdl_seq *a) {
|
||||
asdl_seq *type_ignores = NULL;
|
||||
Py_ssize_t num = p->type_ignore_comments.num_items;
|
||||
if (num > 0) {
|
||||
// Turn the raw (comment, lineno) pairs into TypeIgnore objects in the arena
|
||||
type_ignores = _Py_asdl_seq_new(num, p->arena);
|
||||
if (type_ignores == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
for (int i = 0; i < num; i++) {
|
||||
PyObject *tag = _PyPegen_new_type_comment(p, p->type_ignore_comments.items[i].comment);
|
||||
if (tag == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
type_ignore_ty ti = TypeIgnore(p->type_ignore_comments.items[i].lineno, tag, p->arena);
|
||||
if (ti == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
asdl_seq_SET(type_ignores, i, ti);
|
||||
}
|
||||
}
|
||||
return Module(a, type_ignores, p->arena);
|
||||
}
|
||||
|
|
|
@ -43,6 +43,16 @@ typedef struct {
|
|||
int type;
|
||||
} KeywordToken;
|
||||
|
||||
|
||||
typedef struct {
|
||||
struct {
|
||||
int lineno;
|
||||
char *comment; // The " <tag>" in "# type: ignore <tag>"
|
||||
} *items;
|
||||
size_t size;
|
||||
size_t num_items;
|
||||
} growable_comment_array;
|
||||
|
||||
typedef struct {
|
||||
struct tok_state *tok;
|
||||
Token **tokens;
|
||||
|
@ -59,6 +69,7 @@ typedef struct {
|
|||
int starting_col_offset;
|
||||
int error_indicator;
|
||||
int flags;
|
||||
growable_comment_array type_ignore_comments;
|
||||
} Parser;
|
||||
|
||||
typedef struct {
|
||||
|
@ -110,13 +121,7 @@ int _PyPegen_lookahead(int, void *(func)(Parser *), Parser *);
|
|||
Token *_PyPegen_expect_token(Parser *p, int type);
|
||||
Token *_PyPegen_get_last_nonnwhitespace_token(Parser *);
|
||||
int _PyPegen_fill_token(Parser *p);
|
||||
void *_PyPegen_async_token(Parser *p);
|
||||
void *_PyPegen_await_token(Parser *p);
|
||||
void *_PyPegen_endmarker_token(Parser *p);
|
||||
expr_ty _PyPegen_name_token(Parser *p);
|
||||
void *_PyPegen_newline_token(Parser *p);
|
||||
void *_PyPegen_indent_token(Parser *p);
|
||||
void *_PyPegen_dedent_token(Parser *p);
|
||||
expr_ty _PyPegen_number_token(Parser *p);
|
||||
void *_PyPegen_string_token(Parser *p);
|
||||
const char *_PyPegen_get_expr_name(expr_ty);
|
||||
|
@ -153,6 +158,29 @@ CHECK_CALL_NULL_ALLOWED(Parser *p, void *result)
|
|||
#define CHECK(result) CHECK_CALL(p, result)
|
||||
#define CHECK_NULL_ALLOWED(result) CHECK_CALL_NULL_ALLOWED(p, result)
|
||||
|
||||
PyObject *_PyPegen_new_type_comment(Parser *, char *);
|
||||
|
||||
Py_LOCAL_INLINE(PyObject *)
|
||||
NEW_TYPE_COMMENT(Parser *p, Token *tc)
|
||||
{
|
||||
if (tc == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
char *bytes = PyBytes_AsString(tc->bytes);
|
||||
if (bytes == NULL) {
|
||||
goto error;
|
||||
}
|
||||
PyObject *tco = _PyPegen_new_type_comment(p, bytes);
|
||||
if (tco == NULL) {
|
||||
goto error;
|
||||
}
|
||||
return tco;
|
||||
error:
|
||||
p->error_indicator = 1; // Inline CHECK_CALL
|
||||
return NULL;
|
||||
}
|
||||
|
||||
arg_ty _PyPegen_add_type_comment_to_arg(Parser *, arg_ty, Token *);
|
||||
PyObject *_PyPegen_new_identifier(Parser *, char *);
|
||||
Parser *_PyPegen_Parser_New(struct tok_state *, int, int, int *, PyArena *);
|
||||
void _PyPegen_Parser_Free(Parser *);
|
||||
|
@ -164,6 +192,7 @@ mod_ty _PyPegen_run_parser_from_string(const char *, int, PyObject *, PyCompiler
|
|||
void *_PyPegen_interactive_exit(Parser *);
|
||||
asdl_seq *_PyPegen_singleton_seq(Parser *, void *);
|
||||
asdl_seq *_PyPegen_seq_insert_in_front(Parser *, void *, asdl_seq *);
|
||||
asdl_seq *_PyPegen_seq_append_to_end(Parser *, asdl_seq *, void *);
|
||||
asdl_seq *_PyPegen_seq_flatten(Parser *, asdl_seq *);
|
||||
expr_ty _PyPegen_join_names_with_dot(Parser *, expr_ty, expr_ty);
|
||||
int _PyPegen_seq_count_dots(asdl_seq *);
|
||||
|
@ -176,7 +205,7 @@ expr_ty _PyPegen_set_expr_context(Parser *, expr_ty, expr_context_ty);
|
|||
KeyValuePair *_PyPegen_key_value_pair(Parser *, expr_ty, expr_ty);
|
||||
asdl_seq *_PyPegen_get_keys(Parser *, asdl_seq *);
|
||||
asdl_seq *_PyPegen_get_values(Parser *, asdl_seq *);
|
||||
NameDefaultPair *_PyPegen_name_default_pair(Parser *, arg_ty, expr_ty);
|
||||
NameDefaultPair *_PyPegen_name_default_pair(Parser *, arg_ty, expr_ty, Token *);
|
||||
SlashWithDefault *_PyPegen_slash_with_default(Parser *, asdl_seq *, asdl_seq *);
|
||||
StarEtc *_PyPegen_star_etc(Parser *, arg_ty, asdl_seq *, arg_ty);
|
||||
arguments_ty _PyPegen_make_arguments(Parser *, asdl_seq *, SlashWithDefault *,
|
||||
|
@ -192,6 +221,7 @@ expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_seq *);
|
|||
asdl_seq *_PyPegen_join_sequences(Parser *, asdl_seq *, asdl_seq *);
|
||||
void *_PyPegen_arguments_parsing_error(Parser *, expr_ty);
|
||||
int _PyPegen_check_barry_as_flufl(Parser *);
|
||||
mod_ty _PyPegen_make_module(Parser *, asdl_seq *);
|
||||
|
||||
void *_PyPegen_parse(Parser *);
|
||||
|
||||
|
|
|
@ -816,12 +816,8 @@ builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename,
|
|||
if (str == NULL)
|
||||
goto error;
|
||||
|
||||
int current_use_peg = PyInterpreterState_Get()->config._use_peg_parser;
|
||||
if (flags & PyCF_TYPE_COMMENTS || feature_version >= 0 || compile_mode == 3) {
|
||||
PyInterpreterState_Get()->config._use_peg_parser = 0;
|
||||
}
|
||||
result = Py_CompileStringObject(str, filename, start[compile_mode], &cf, optimize);
|
||||
PyInterpreterState_Get()->config._use_peg_parser = current_use_peg;
|
||||
|
||||
Py_XDECREF(source_copy);
|
||||
goto finally;
|
||||
|
||||
|
|
Loading…
Reference in New Issue