Format the Python-tokenize module and fix exit path (GH-27935)

This commit is contained in:
Pablo Galindo Salgado 2021-08-25 13:41:14 +01:00 committed by GitHub
parent 33d95c6fac
commit 214c2e5d91
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 46 additions and 47 deletions

View File

@ -4,16 +4,15 @@
static struct PyModuleDef _tokenizemodule;
typedef struct {
PyTypeObject* TokenizerIter;
PyTypeObject *TokenizerIter;
} tokenize_state;
static tokenize_state*
get_tokenize_state(PyObject* module)
{
return (tokenize_state*)PyModule_GetState(module);
static tokenize_state *
get_tokenize_state(PyObject *module) {
return (tokenize_state *)PyModule_GetState(module);
}
#define _tokenize_get_state_by_type(type) \
#define _tokenize_get_state_by_type(type) \
get_tokenize_state(_PyType_GetModuleByDef(type, &_tokenizemodule))
#include "clinic/Python-tokenize.c.h"
@ -24,9 +23,9 @@ class _tokenizer.tokenizeriter "tokenizeriterobject *" "_tokenize_get_state_by_t
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=96d98ee2fef7a8bc]*/
typedef struct {
PyObject_HEAD
struct tok_state* tok;
typedef struct
{
PyObject_HEAD struct tok_state *tok;
} tokenizeriterobject;
/*[clinic input]
@ -40,27 +39,28 @@ static PyObject *
tokenizeriter_new_impl(PyTypeObject *type, const char *source)
/*[clinic end generated code: output=7fd9f46cf9263cbb input=4384b368407375c6]*/
{
tokenizeriterobject* self = (tokenizeriterobject*)type->tp_alloc(type, 0);
tokenizeriterobject *self = (tokenizeriterobject *)type->tp_alloc(type, 0);
if (self == NULL) {
return NULL;
}
PyObject* filename = PyUnicode_FromString("<string>");
PyObject *filename = PyUnicode_FromString("<string>");
if (filename == NULL) {
return NULL;
}
self->tok = PyTokenizer_FromUTF8(source, 1);
if (self->tok == NULL) {
Py_DECREF(filename);
return NULL;
}
self->tok->filename = filename;
return (PyObject*)self;
return (PyObject *)self;
}
static PyObject*
tokenizeriter_next(tokenizeriterobject* it)
static PyObject *
tokenizeriter_next(tokenizeriterobject *it)
{
const char* start;
const char* end;
const char *start;
const char *end;
int type = PyTokenizer_Get(it->tok, &start, &end);
if (type == ERRORTOKEN && PyErr_Occurred()) {
return NULL;
@ -69,10 +69,11 @@ tokenizeriter_next(tokenizeriterobject* it)
PyErr_SetString(PyExc_StopIteration, "EOF");
return NULL;
}
PyObject* str = NULL;
PyObject *str = NULL;
if (start == NULL || end == NULL) {
str = PyUnicode_FromString("");
} else {
}
else {
str = PyUnicode_FromStringAndSize(start, end - start);
}
if (str == NULL) {
@ -80,12 +81,12 @@ tokenizeriter_next(tokenizeriterobject* it)
}
Py_ssize_t size = it->tok->inp - it->tok->buf;
PyObject* line = PyUnicode_DecodeUTF8(it->tok->buf, size, "replace");
PyObject *line = PyUnicode_DecodeUTF8(it->tok->buf, size, "replace");
if (line == NULL) {
Py_DECREF(str);
return NULL;
}
const char* line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
const char *line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
int lineno = type == STRING ? it->tok->first_lineno : it->tok->lineno;
int end_lineno = it->tok->lineno;
int col_offset = -1;
@ -101,41 +102,39 @@ tokenizeriter_next(tokenizeriterobject* it)
}
static void
tokenizeriter_dealloc(tokenizeriterobject* it)
tokenizeriter_dealloc(tokenizeriterobject *it)
{
PyTypeObject* tp = Py_TYPE(it);
PyTypeObject *tp = Py_TYPE(it);
PyTokenizer_Free(it->tok);
tp->tp_free(it);
Py_DECREF(tp);
}
static PyType_Slot tokenizeriter_slots[] = {
{Py_tp_new, tokenizeriter_new},
{Py_tp_dealloc, tokenizeriter_dealloc},
{Py_tp_getattro, PyObject_GenericGetAttr},
{Py_tp_iter, PyObject_SelfIter},
{Py_tp_iternext, tokenizeriter_next},
{0, NULL},
{Py_tp_new, tokenizeriter_new},
{Py_tp_dealloc, tokenizeriter_dealloc},
{Py_tp_getattro, PyObject_GenericGetAttr},
{Py_tp_iter, PyObject_SelfIter},
{Py_tp_iternext, tokenizeriter_next},
{0, NULL},
};
static PyType_Spec tokenizeriter_spec = {
.name = "_tokenize.TokenizerIter",
.basicsize = sizeof(tokenizeriterobject),
.flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE),
.slots = tokenizeriter_slots,
.name = "_tokenize.TokenizerIter",
.basicsize = sizeof(tokenizeriterobject),
.flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE),
.slots = tokenizeriter_slots,
};
static int
tokenizemodule_exec(PyObject* m)
tokenizemodule_exec(PyObject *m)
{
tokenize_state* state = get_tokenize_state(m);
tokenize_state *state = get_tokenize_state(m);
if (state == NULL) {
return -1;
}
state->TokenizerIter = (PyTypeObject *)PyType_FromModuleAndSpec(
m, &tokenizeriter_spec, NULL);
state->TokenizerIter = (PyTypeObject *)PyType_FromModuleAndSpec(m, &tokenizeriter_spec, NULL);
if (state->TokenizerIter == NULL) {
return -1;
}
@ -147,11 +146,11 @@ tokenizemodule_exec(PyObject* m)
}
static PyMethodDef tokenize_methods[] = {
{NULL, NULL, 0, NULL} /* Sentinel */
{NULL, NULL, 0, NULL} /* Sentinel */
};
static PyModuleDef_Slot tokenizemodule_slots[] = {
{Py_mod_exec, tokenizemodule_exec},
{Py_mod_exec, tokenizemodule_exec},
{0, NULL}
};
@ -178,14 +177,14 @@ tokenizemodule_free(void *m)
}
static struct PyModuleDef _tokenizemodule = {
PyModuleDef_HEAD_INIT,
.m_name = "_tokenize",
.m_size = sizeof(tokenize_state),
.m_slots = tokenizemodule_slots,
.m_methods = tokenize_methods,
.m_traverse = tokenizemodule_traverse,
.m_clear = tokenizemodule_clear,
.m_free = tokenizemodule_free,
PyModuleDef_HEAD_INIT,
.m_name = "_tokenize",
.m_size = sizeof(tokenize_state),
.m_slots = tokenizemodule_slots,
.m_methods = tokenize_methods,
.m_traverse = tokenizemodule_traverse,
.m_clear = tokenizemodule_clear,
.m_free = tokenizemodule_free,
};
PyMODINIT_FUNC