bpo-40939: Remove PEG parser easter egg (__new_parser__) (#20802)

It no longer serves a purpose (there's only one parser) and having "new" in any name will eventually look odd. Also, it impinges on a potential sub-namespace, `__new_...__`.
This commit is contained in:
Lysandros Nikolaou 2020-06-11 19:09:21 +03:00 committed by GitHub
parent 10e6506aa8
commit bcd7deed91
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 10 additions and 47 deletions

View File

@ -477,7 +477,6 @@ atom[expr_ty]:
| 'True' { _Py_Constant(Py_True, NULL, EXTRA) } | 'True' { _Py_Constant(Py_True, NULL, EXTRA) }
| 'False' { _Py_Constant(Py_False, NULL, EXTRA) } | 'False' { _Py_Constant(Py_False, NULL, EXTRA) }
| 'None' { _Py_Constant(Py_None, NULL, EXTRA) } | 'None' { _Py_Constant(Py_None, NULL, EXTRA) }
| '__new_parser__' { RAISE_SYNTAX_ERROR("You found it!") }
| &STRING strings | &STRING strings
| NUMBER | NUMBER
| &'(' (tuple | group | genexp) | &'(' (tuple | group | genexp)

View File

@ -19,7 +19,6 @@ kwlist = [
'False', 'False',
'None', 'None',
'True', 'True',
'__new_parser__',
'and', 'and',
'as', 'as',
'assert', 'assert',

View File

@ -1817,7 +1817,6 @@ class Helper:
'False': '', 'False': '',
'None': '', 'None': '',
'True': '', 'True': '',
'__new_parser__': '',
'and': 'BOOLEAN', 'and': 'BOOLEAN',
'as': 'with', 'as': 'with',
'assert': ('assert', ''), 'assert': ('assert', ''),

View File

@ -7,7 +7,7 @@ extern int Py_DebugFlag;
#else #else
#define D(x) #define D(x)
#endif #endif
static const int n_keyword_lists = 15; static const int n_keyword_lists = 9;
static KeywordToken *reserved_keywords[] = { static KeywordToken *reserved_keywords[] = {
NULL, NULL,
NULL, NULL,
@ -15,8 +15,8 @@ static KeywordToken *reserved_keywords[] = {
{"if", 510}, {"if", 510},
{"in", 518}, {"in", 518},
{"is", 526}, {"is", 526},
{"as", 531}, {"as", 530},
{"or", 532}, {"or", 531},
{NULL, -1}, {NULL, -1},
}, },
(KeywordToken[]) { (KeywordToken[]) {
@ -25,7 +25,7 @@ static KeywordToken *reserved_keywords[] = {
{"for", 517}, {"for", 517},
{"def", 522}, {"def", 522},
{"not", 525}, {"not", 525},
{"and", 533}, {"and", 532},
{NULL, -1}, {NULL, -1},
}, },
(KeywordToken[]) { (KeywordToken[]) {
@ -65,15 +65,6 @@ static KeywordToken *reserved_keywords[] = {
{"nonlocal", 509}, {"nonlocal", 509},
{NULL, -1}, {NULL, -1},
}, },
NULL,
NULL,
NULL,
NULL,
NULL,
(KeywordToken[]) {
{"__new_parser__", 530},
{NULL, -1},
},
}; };
#define file_type 1000 #define file_type 1000
#define interactive_type 1001 #define interactive_type 1001
@ -10567,7 +10558,6 @@ slice_rule(Parser *p)
// | 'True' // | 'True'
// | 'False' // | 'False'
// | 'None' // | 'None'
// | '__new_parser__'
// | &STRING strings // | &STRING strings
// | NUMBER // | NUMBER
// | &'(' (tuple | group | genexp) // | &'(' (tuple | group | genexp)
@ -10711,30 +10701,6 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
} }
{ // '__new_parser__'
if (p->error_indicator) {
D(p->level--);
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'__new_parser__'"));
Token * _keyword;
if (
(_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'__new_parser__'"));
_res = RAISE_SYNTAX_ERROR ( "You found it!" );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
D(p->level--);
return NULL;
}
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'__new_parser__'"));
}
{ // &STRING strings { // &STRING strings
if (p->error_indicator) { if (p->error_indicator) {
D(p->level--); D(p->level--);
@ -17313,7 +17279,7 @@ _tmp_34_rule(Parser *p)
Token * _keyword; Token * _keyword;
expr_ty z; expr_ty z;
if ( if (
(_keyword = _PyPegen_expect_token(p, 531)) // token='as' (_keyword = _PyPegen_expect_token(p, 530)) // token='as'
&& &&
(z = _PyPegen_name_token(p)) // NAME (z = _PyPegen_name_token(p)) // NAME
) )
@ -17471,7 +17437,7 @@ _tmp_37_rule(Parser *p)
Token * _keyword; Token * _keyword;
expr_ty z; expr_ty z;
if ( if (
(_keyword = _PyPegen_expect_token(p, 531)) // token='as' (_keyword = _PyPegen_expect_token(p, 530)) // token='as'
&& &&
(z = _PyPegen_name_token(p)) // NAME (z = _PyPegen_name_token(p)) // NAME
) )
@ -17971,7 +17937,7 @@ _tmp_46_rule(Parser *p)
Token * _keyword; Token * _keyword;
expr_ty t; expr_ty t;
if ( if (
(_keyword = _PyPegen_expect_token(p, 531)) // token='as' (_keyword = _PyPegen_expect_token(p, 530)) // token='as'
&& &&
(t = target_rule(p)) // target (t = target_rule(p)) // target
) )
@ -18086,7 +18052,7 @@ _tmp_48_rule(Parser *p)
Token * _keyword; Token * _keyword;
expr_ty z; expr_ty z;
if ( if (
(_keyword = _PyPegen_expect_token(p, 531)) // token='as' (_keyword = _PyPegen_expect_token(p, 530)) // token='as'
&& &&
(z = _PyPegen_name_token(p)) // NAME (z = _PyPegen_name_token(p)) // NAME
) )
@ -23892,7 +23858,7 @@ _tmp_144_rule(Parser *p)
Token * _keyword; Token * _keyword;
expr_ty c; expr_ty c;
if ( if (
(_keyword = _PyPegen_expect_token(p, 532)) // token='or' (_keyword = _PyPegen_expect_token(p, 531)) // token='or'
&& &&
(c = conjunction_rule(p)) // conjunction (c = conjunction_rule(p)) // conjunction
) )
@ -23936,7 +23902,7 @@ _tmp_145_rule(Parser *p)
Token * _keyword; Token * _keyword;
expr_ty c; expr_ty c;
if ( if (
(_keyword = _PyPegen_expect_token(p, 533)) // token='and' (_keyword = _PyPegen_expect_token(p, 532)) // token='and'
&& &&
(c = inversion_rule(p)) // inversion (c = inversion_rule(p)) // inversion
) )