From bcd7deed9118e365c1225de2a2e1a81bf988c6ab Mon Sep 17 00:00:00 2001 From: Lysandros Nikolaou Date: Thu, 11 Jun 2020 19:09:21 +0300 Subject: [PATCH] bpo-40939: Remove PEG parser easter egg (__new_parser__) (#20802) It no longer serves a purpose (there's only one parser) and having "new" in any name will eventually look odd. Also, it impinges on a potential sub-namespace, `__new_...__`. --- Grammar/python.gram | 1 - Lib/keyword.py | 1 - Lib/pydoc.py | 1 - Parser/pegen/parse.c | 54 ++++++++------------------------------------ 4 files changed, 10 insertions(+), 47 deletions(-) diff --git a/Grammar/python.gram b/Grammar/python.gram index 2c350ef68a2..745c14ebb98 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -477,7 +477,6 @@ atom[expr_ty]: | 'True' { _Py_Constant(Py_True, NULL, EXTRA) } | 'False' { _Py_Constant(Py_False, NULL, EXTRA) } | 'None' { _Py_Constant(Py_None, NULL, EXTRA) } - | '__new_parser__' { RAISE_SYNTAX_ERROR("You found it!") } | &STRING strings | NUMBER | &'(' (tuple | group | genexp) diff --git a/Lib/keyword.py b/Lib/keyword.py index afc3db3942c..b6a99825702 100644 --- a/Lib/keyword.py +++ b/Lib/keyword.py @@ -19,7 +19,6 @@ kwlist = [ 'False', 'None', 'True', - '__new_parser__', 'and', 'as', 'assert', diff --git a/Lib/pydoc.py b/Lib/pydoc.py index a5368bf8bfe..628f9fc7d1d 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1817,7 +1817,6 @@ class Helper: 'False': '', 'None': '', 'True': '', - '__new_parser__': '', 'and': 'BOOLEAN', 'as': 'with', 'assert': ('assert', ''), diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 4f13bf772f2..d28e6c83aad 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -7,7 +7,7 @@ extern int Py_DebugFlag; #else #define D(x) #endif -static const int n_keyword_lists = 15; +static const int n_keyword_lists = 9; static KeywordToken *reserved_keywords[] = { NULL, NULL, @@ -15,8 +15,8 @@ static KeywordToken *reserved_keywords[] = { {"if", 510}, {"in", 518}, {"is", 526}, - {"as", 531}, - {"or", 532}, + {"as", 530}, + {"or", 531}, {NULL, -1}, }, (KeywordToken[]) { @@ -25,7 +25,7 @@ static KeywordToken *reserved_keywords[] = { {"for", 517}, {"def", 522}, {"not", 525}, - {"and", 533}, + {"and", 532}, {NULL, -1}, }, (KeywordToken[]) { @@ -65,15 +65,6 @@ static KeywordToken *reserved_keywords[] = { {"nonlocal", 509}, {NULL, -1}, }, - NULL, - NULL, - NULL, - NULL, - NULL, - (KeywordToken[]) { - {"__new_parser__", 530}, - {NULL, -1}, - }, }; #define file_type 1000 #define interactive_type 1001 @@ -10567,7 +10558,6 @@ slice_rule(Parser *p) // | 'True' // | 'False' // | 'None' -// | '__new_parser__' // | &STRING strings // | NUMBER // | &'(' (tuple | group | genexp) @@ -10711,30 +10701,6 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } - { // '__new_parser__' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'__new_parser__'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'__new_parser__'")); - _res = RAISE_SYNTAX_ERROR ( "You found it!" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'__new_parser__'")); - } { // &STRING strings if (p->error_indicator) { D(p->level--); @@ -17313,7 +17279,7 @@ _tmp_34_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -17471,7 +17437,7 @@ _tmp_37_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -17971,7 +17937,7 @@ _tmp_46_rule(Parser *p) Token * _keyword; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (t = target_rule(p)) // target ) @@ -18086,7 +18052,7 @@ _tmp_48_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -23892,7 +23858,7 @@ _tmp_144_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 532)) // token='or' + (_keyword = _PyPegen_expect_token(p, 531)) // token='or' && (c = conjunction_rule(p)) // conjunction ) @@ -23936,7 +23902,7 @@ _tmp_145_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 533)) // token='and' + (_keyword = _PyPegen_expect_token(p, 532)) // token='and' && (c = inversion_rule(p)) // inversion )