mirror of https://github.com/python/cpython
gh-92651: Remove the Include/token.h header file (#92652)
Remove the token.h header file. There was never any public tokenizer C API. The token.h header file was only designed to be used by Python internals. Move Include/token.h to Include/internal/pycore_token.h. Including this header file now requires that the Py_BUILD_CORE macro is defined. It no longer checks for the Py_LIMITED_API macro. Rename functions: * PyToken_OneChar() => _PyToken_OneChar() * PyToken_TwoChars() => _PyToken_TwoChars() * PyToken_ThreeChars() => _PyToken_ThreeChars()
This commit is contained in:
parent
b69297ea23
commit
da5727a120
|
@ -139,3 +139,8 @@ Deprecated
|
|||
|
||||
Removed
|
||||
-------
|
||||
|
||||
* Remove the ``token.h`` header file. There was never any public tokenizer C
|
||||
API. The ``token.h`` header file was only designed to be used by Python
|
||||
internals.
|
||||
(Contributed by Victor Stinner in :gh:`92651`.)
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
/* Auto-generated by Tools/scripts/generate_token.py */
|
||||
|
||||
/* Token types */
|
||||
#ifndef Py_LIMITED_API
|
||||
#ifndef Py_TOKEN_H
|
||||
#define Py_TOKEN_H
|
||||
#ifndef Py_INTERNAL_TOKEN_H
|
||||
#define Py_INTERNAL_TOKEN_H
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#ifndef Py_BUILD_CORE
|
||||
# error "this header requires Py_BUILD_CORE define"
|
||||
#endif
|
||||
|
||||
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
|
||||
|
||||
#define ENDMARKER 0
|
||||
|
@ -85,13 +88,13 @@ extern "C" {
|
|||
(x) == DEDENT)
|
||||
|
||||
|
||||
// Symbols exported for test_peg_generator
|
||||
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
|
||||
PyAPI_FUNC(int) PyToken_OneChar(int);
|
||||
PyAPI_FUNC(int) PyToken_TwoChars(int, int);
|
||||
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
|
||||
PyAPI_FUNC(int) _PyToken_OneChar(int);
|
||||
PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
|
||||
PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif /* !Py_TOKEN_H */
|
||||
#endif /* Py_LIMITED_API */
|
||||
#endif // !Py_INTERNAL_TOKEN_H
|
|
@ -1325,11 +1325,11 @@ regen-token:
|
|||
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \
|
||||
$(srcdir)/Grammar/Tokens \
|
||||
$(srcdir)/Doc/library/token-list.inc
|
||||
# Regenerate Include/token.h from Grammar/Tokens
|
||||
# Regenerate Include/internal/pycore_token.h from Grammar/Tokens
|
||||
# using Tools/scripts/generate_token.py
|
||||
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \
|
||||
$(srcdir)/Grammar/Tokens \
|
||||
$(srcdir)/Include/token.h
|
||||
$(srcdir)/Include/internal/pycore_token.h
|
||||
# Regenerate Parser/token.c from Grammar/Tokens
|
||||
# using Tools/scripts/generate_token.py
|
||||
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \
|
||||
|
@ -1521,7 +1521,6 @@ PYTHON_HEADERS= \
|
|||
$(srcdir)/Include/structmember.h \
|
||||
$(srcdir)/Include/structseq.h \
|
||||
$(srcdir)/Include/sysmodule.h \
|
||||
$(srcdir)/Include/token.h \
|
||||
$(srcdir)/Include/traceback.h \
|
||||
$(srcdir)/Include/tracemalloc.h \
|
||||
$(srcdir)/Include/tupleobject.h \
|
||||
|
@ -1632,6 +1631,7 @@ PYTHON_HEADERS= \
|
|||
$(srcdir)/Include/internal/pycore_structseq.h \
|
||||
$(srcdir)/Include/internal/pycore_symtable.h \
|
||||
$(srcdir)/Include/internal/pycore_sysmodule.h \
|
||||
$(srcdir)/Include/internal/pycore_token.h \
|
||||
$(srcdir)/Include/internal/pycore_traceback.h \
|
||||
$(srcdir)/Include/internal/pycore_tuple.h \
|
||||
$(srcdir)/Include/internal/pycore_typeobject.h \
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
Remove the ``token.h`` header file. There was never any public tokenizer C
|
||||
API. The ``token.h`` header file was only designed to be used by Python
|
||||
internals. Patch by Victor Stinner.
|
|
@ -244,6 +244,7 @@
|
|||
<ClInclude Include="..\Include\internal\pycore_structseq.h" />
|
||||
<ClInclude Include="..\Include\internal\pycore_sysmodule.h" />
|
||||
<ClInclude Include="..\Include\internal\pycore_symtable.h" />
|
||||
<ClInclude Include="..\Include\internal\pycore_token.h" />
|
||||
<ClInclude Include="..\Include\internal\pycore_traceback.h" />
|
||||
<ClInclude Include="..\Include\internal\pycore_tuple.h" />
|
||||
<ClInclude Include="..\Include\internal\pycore_typeobject.h" />
|
||||
|
@ -291,7 +292,6 @@
|
|||
<ClInclude Include="..\Include\structseq.h" />
|
||||
<ClInclude Include="..\Include\symtable.h" />
|
||||
<ClInclude Include="..\Include\sysmodule.h" />
|
||||
<ClInclude Include="..\Include\token.h" />
|
||||
<ClInclude Include="..\Include\traceback.h" />
|
||||
<ClInclude Include="..\Include\tracemalloc.h" />
|
||||
<ClInclude Include="..\Include\tupleobject.h" />
|
||||
|
|
|
@ -213,9 +213,6 @@
|
|||
<ClInclude Include="..\Include\sysmodule.h">
|
||||
<Filter>Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\Include\token.h">
|
||||
<Filter>Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\Include\traceback.h">
|
||||
<Filter>Include</Filter>
|
||||
</ClInclude>
|
||||
|
@ -633,6 +630,9 @@
|
|||
<ClInclude Include="..\Include\internal\pycore_symtable.h">
|
||||
<Filter>Include\internal</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\Include\internal\pycore_token.h">
|
||||
<Filter>Include\internal</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\Include\internal\pycore_traceback.h">
|
||||
<Filter>Include\internal</Filter>
|
||||
</ClInclude>
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
<_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc">
|
||||
<Format>rst</Format>
|
||||
</_TokenOutputs>
|
||||
<_TokenOutputs Include="$(PySourcePath)Include\token.h">
|
||||
<_TokenOutputs Include="$(PySourcePath)Include\internal\pycore_token.h">
|
||||
<Format>h</Format>
|
||||
</_TokenOutputs>
|
||||
<_TokenOutputs Include="$(PySourcePath)Parser\token.c">
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include <Python.h>
|
||||
#include <token.h>
|
||||
#include <pycore_ast.h>
|
||||
#include <pycore_token.h>
|
||||
|
||||
#if 0
|
||||
#define PyPARSE_YIELD_IS_KEYWORD 0x0001
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* Auto-generated by Tools/scripts/generate_token.py */
|
||||
|
||||
#include "Python.h"
|
||||
#include "token.h"
|
||||
#include "pycore_token.h"
|
||||
|
||||
/* Token names */
|
||||
|
||||
|
@ -76,7 +76,7 @@ const char * const _PyParser_TokenNames[] = {
|
|||
/* Return the token corresponding to a single character */
|
||||
|
||||
int
|
||||
PyToken_OneChar(int c1)
|
||||
_PyToken_OneChar(int c1)
|
||||
{
|
||||
switch (c1) {
|
||||
case '%': return PERCENT;
|
||||
|
@ -107,7 +107,7 @@ PyToken_OneChar(int c1)
|
|||
}
|
||||
|
||||
int
|
||||
PyToken_TwoChars(int c1, int c2)
|
||||
_PyToken_TwoChars(int c1, int c2)
|
||||
{
|
||||
switch (c1) {
|
||||
case '!':
|
||||
|
@ -191,7 +191,7 @@ PyToken_TwoChars(int c1, int c2)
|
|||
}
|
||||
|
||||
int
|
||||
PyToken_ThreeChars(int c1, int c2, int c3)
|
||||
_PyToken_ThreeChars(int c1, int c2, int c3)
|
||||
{
|
||||
switch (c1) {
|
||||
case '*':
|
||||
|
|
|
@ -1992,10 +1992,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
|
|||
/* Check for two-character token */
|
||||
{
|
||||
int c2 = tok_nextc(tok);
|
||||
int token = PyToken_TwoChars(c, c2);
|
||||
int token = _PyToken_TwoChars(c, c2);
|
||||
if (token != OP) {
|
||||
int c3 = tok_nextc(tok);
|
||||
int token3 = PyToken_ThreeChars(c, c2, c3);
|
||||
int token3 = _PyToken_ThreeChars(c, c2, c3);
|
||||
if (token3 != OP) {
|
||||
token = token3;
|
||||
}
|
||||
|
@ -2059,7 +2059,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
|
|||
/* Punctuation character */
|
||||
*p_start = tok->start;
|
||||
*p_end = tok->cur;
|
||||
return PyToken_OneChar(c);
|
||||
return _PyToken_OneChar(c);
|
||||
}
|
||||
|
||||
int
|
||||
|
|
|
@ -8,7 +8,7 @@ extern "C" {
|
|||
|
||||
/* Tokenizer interface */
|
||||
|
||||
#include "token.h" /* For token types */
|
||||
#include "pycore_token.h" /* For token types */
|
||||
|
||||
#define MAXINDENT 100 /* Max indentation level */
|
||||
#define MAXLEVEL 200 /* Max parentheses level */
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
#include "pycore_sysmodule.h" // _PySys_Audit()
|
||||
#include "pycore_traceback.h" // _PyTraceBack_Print_Indented()
|
||||
|
||||
#include "token.h" // INDENT
|
||||
#include "errcode.h" // E_EOF
|
||||
#include "marshal.h" // PyMarshal_ReadLongFromFile()
|
||||
|
||||
|
|
|
@ -51,13 +51,16 @@ token_h_template = """\
|
|||
/* Auto-generated by Tools/scripts/generate_token.py */
|
||||
|
||||
/* Token types */
|
||||
#ifndef Py_LIMITED_API
|
||||
#ifndef Py_TOKEN_H
|
||||
#define Py_TOKEN_H
|
||||
#ifndef Py_INTERNAL_TOKEN_H
|
||||
#define Py_INTERNAL_TOKEN_H
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#ifndef Py_BUILD_CORE
|
||||
# error "this header requires Py_BUILD_CORE define"
|
||||
#endif
|
||||
|
||||
#undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
|
||||
|
||||
%s\
|
||||
|
@ -75,19 +78,19 @@ extern "C" {
|
|||
(x) == DEDENT)
|
||||
|
||||
|
||||
// Symbols exported for test_peg_generator
|
||||
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
|
||||
PyAPI_FUNC(int) PyToken_OneChar(int);
|
||||
PyAPI_FUNC(int) PyToken_TwoChars(int, int);
|
||||
PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
|
||||
PyAPI_FUNC(int) _PyToken_OneChar(int);
|
||||
PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
|
||||
PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif /* !Py_TOKEN_H */
|
||||
#endif /* Py_LIMITED_API */
|
||||
#endif // !Py_INTERNAL_TOKEN_H
|
||||
"""
|
||||
|
||||
def make_h(infile, outfile='Include/token.h'):
|
||||
def make_h(infile, outfile='Include/internal/pycore_token.h'):
|
||||
tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile)
|
||||
|
||||
defines = []
|
||||
|
@ -106,7 +109,7 @@ token_c_template = """\
|
|||
/* Auto-generated by Tools/scripts/generate_token.py */
|
||||
|
||||
#include "Python.h"
|
||||
#include "token.h"
|
||||
#include "pycore_token.h"
|
||||
|
||||
/* Token names */
|
||||
|
||||
|
@ -117,21 +120,21 @@ const char * const _PyParser_TokenNames[] = {
|
|||
/* Return the token corresponding to a single character */
|
||||
|
||||
int
|
||||
PyToken_OneChar(int c1)
|
||||
_PyToken_OneChar(int c1)
|
||||
{
|
||||
%s\
|
||||
return OP;
|
||||
}
|
||||
|
||||
int
|
||||
PyToken_TwoChars(int c1, int c2)
|
||||
_PyToken_TwoChars(int c1, int c2)
|
||||
{
|
||||
%s\
|
||||
return OP;
|
||||
}
|
||||
|
||||
int
|
||||
PyToken_ThreeChars(int c1, int c2, int c3)
|
||||
_PyToken_ThreeChars(int c1, int c2, int c3)
|
||||
{
|
||||
%s\
|
||||
return OP;
|
||||
|
|
Loading…
Reference in New Issue