Ensure the str member of the tokenizer is always initialised (GH-29681)

This commit is contained in:
Pablo Galindo Salgado 2021-11-21 02:06:39 +00:00 committed by GitHub
parent 4cf65240ae
commit 4f006a789a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 3 additions and 3 deletions

View File

@ -245,7 +245,7 @@ get_error_line_from_tokenizer_buffers(Parser *p, Py_ssize_t lineno)
* (multi-line) statement are stored in p->tok->interactive_src_start.
* If not, we're parsing from a string, which means that the whole source
* is stored in p->tok->str. */
assert(p->tok->fp == NULL || p->tok->fp == stdin);
assert((p->tok->fp == NULL && p->tok->str != NULL) || p->tok->fp == stdin);
char *cur_line = p->tok->fp_interactive ? p->tok->interactive_src_start : p->tok->str;
assert(cur_line != NULL);

View File

@ -87,7 +87,7 @@ tok_new(void)
tok->async_def_indent = 0;
tok->async_def_nl = 0;
tok->interactive_underflow = IUNDERFLOW_NORMAL;
tok->str = NULL;
return tok;
}

View File

@ -71,7 +71,7 @@ struct tok_state {
PyObject *decoding_readline; /* open(...).readline */
PyObject *decoding_buffer;
const char* enc; /* Encoding for the current str. */
char* str;
char* str; /* Source string being tokenized (if tokenizing from a string)*/
char* input; /* Tokenizer's newline translated copy of the string. */
int type_comments; /* Whether to look for type comments */