gh-104016: Fixed off by 1 error in f string tokenizer (#104047)

Co-authored-by: sunmy2019 <59365878+sunmy2019@users.noreply.github.com>
Co-authored-by: Ken Jin <kenjin@python.org>
Co-authored-by: Pablo Galindo <pablogsal@gmail.com>
This commit is contained in:
jx124 2023-05-02 03:15:47 +08:00 committed by GitHub
parent 2d526cd32f
commit 5078eedc5b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 25 additions and 5 deletions

View File

@ -566,6 +566,22 @@ x = (
"f-string: expressions nested too deeply", "f-string: expressions nested too deeply",
['f"{1+2:{1+2:{1+1:{1}}}}"']) ['f"{1+2:{1+2:{1+1:{1}}}}"'])
def create_nested_fstring(n):
if n == 0:
return "1+1"
prev = create_nested_fstring(n-1)
return f'f"{{{prev}}}"'
self.assertAllRaise(SyntaxError,
"too many nested f-strings",
[create_nested_fstring(160)])
def test_syntax_error_in_nested_fstring(self):
# See gh-104016 for more information on this crash
self.assertAllRaise(SyntaxError,
"invalid syntax",
['f"{1 1:' + ('{f"1:' * 199)])
def test_double_braces(self): def test_double_braces(self):
self.assertEqual(f'{{', '{') self.assertEqual(f'{{', '{')
self.assertEqual(f'a{{', 'a{') self.assertEqual(f'a{{', 'a{')

View File

@ -43,12 +43,12 @@
#ifdef Py_DEBUG #ifdef Py_DEBUG
static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) { static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) {
assert(tok->tok_mode_stack_index >= 0); assert(tok->tok_mode_stack_index >= 0);
assert(tok->tok_mode_stack_index < MAXLEVEL); assert(tok->tok_mode_stack_index < MAXFSTRINGLEVEL);
return &(tok->tok_mode_stack[tok->tok_mode_stack_index]); return &(tok->tok_mode_stack[tok->tok_mode_stack_index]);
} }
static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) { static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) {
assert(tok->tok_mode_stack_index >= 0); assert(tok->tok_mode_stack_index >= 0);
assert(tok->tok_mode_stack_index < MAXLEVEL); assert(tok->tok_mode_stack_index + 1 < MAXFSTRINGLEVEL);
return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]); return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]);
} }
#else #else
@ -2235,6 +2235,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
p_start = tok->start; p_start = tok->start;
p_end = tok->cur; p_end = tok->cur;
if (tok->tok_mode_stack_index + 1 >= MAXFSTRINGLEVEL) {
return MAKE_TOKEN(syntaxerror(tok, "too many nested f-strings"));
}
tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok); tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok);
the_current_tok->kind = TOK_FSTRING_MODE; the_current_tok->kind = TOK_FSTRING_MODE;
the_current_tok->f_string_quote = quote; the_current_tok->f_string_quote = quote;

View File

@ -10,8 +10,9 @@ extern "C" {
#include "pycore_token.h" /* For token types */ #include "pycore_token.h" /* For token types */
#define MAXINDENT 100 /* Max indentation level */ #define MAXINDENT 100 /* Max indentation level */
#define MAXLEVEL 200 /* Max parentheses level */ #define MAXLEVEL 200 /* Max parentheses level */
#define MAXFSTRINGLEVEL 150 /* Max f-string nesting level */
enum decoding_state { enum decoding_state {
STATE_INIT, STATE_INIT,
@ -123,7 +124,7 @@ struct tok_state {
enum interactive_underflow_t interactive_underflow; enum interactive_underflow_t interactive_underflow;
int report_warnings; int report_warnings;
// TODO: Factor this into its own thing // TODO: Factor this into its own thing
tokenizer_mode tok_mode_stack[MAXLEVEL]; tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL];
int tok_mode_stack_index; int tok_mode_stack_index;
int tok_report_warnings; int tok_report_warnings;
#ifdef Py_DEBUG #ifdef Py_DEBUG