mirror of https://github.com/python/cpython
gh-112387: Fix error positions for decoded strings with backwards tokenize errors (#112409)
Signed-off-by: Pablo Galindo <pablogsal@gmail.com>
This commit is contained in:
parent
2c8b191742
commit
45d648597b
|
@ -2334,6 +2334,10 @@ func(
|
|||
"""
|
||||
self._check_error(code, "parenthesis '\\)' does not match opening parenthesis '\\['")
|
||||
|
||||
# Examples with dencodings
|
||||
s = b'# coding=latin\n(aaaaaaaaaaaaaaaaa\naaaaaaaaaaa\xb5'
|
||||
self._check_error(s, "'\(' was never closed")
|
||||
|
||||
def test_error_string_literal(self):
|
||||
|
||||
self._check_error("'blech", r"unterminated string literal \(.*\)$")
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
Fix error positions for decoded strings with backwards tokenize errors.
|
||||
Patch by Pablo Galindo
|
|
@ -282,6 +282,10 @@ get_error_line_from_tokenizer_buffers(Parser *p, Py_ssize_t lineno)
|
|||
Py_ssize_t relative_lineno = p->starting_lineno ? lineno - p->starting_lineno + 1 : lineno;
|
||||
const char* buf_end = p->tok->fp_interactive ? p->tok->interactive_src_end : p->tok->inp;
|
||||
|
||||
if (buf_end < cur_line) {
|
||||
buf_end = cur_line + strlen(cur_line);
|
||||
}
|
||||
|
||||
for (int i = 0; i < relative_lineno - 1; i++) {
|
||||
char *new_line = strchr(cur_line, '\n');
|
||||
// The assert is here for debug builds but the conditional that
|
||||
|
|
Loading…
Reference in New Issue