diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 461d9db7934..bc354c36cda 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -139,3 +139,8 @@ Deprecated Removed ------- + +* Remove the ``token.h`` header file. There was never any public tokenizer C + API. The ``token.h`` header file was only designed to be used by Python + internals. + (Contributed by Victor Stinner in :gh:`92651`.) diff --git a/Include/token.h b/Include/internal/pycore_token.h similarity index 87% rename from Include/token.h rename to Include/internal/pycore_token.h index eb1b9ea47b4..f9b8240e216 100644 --- a/Include/token.h +++ b/Include/internal/pycore_token.h @@ -1,13 +1,16 @@ /* Auto-generated by Tools/scripts/generate_token.py */ /* Token types */ -#ifndef Py_LIMITED_API -#ifndef Py_TOKEN_H -#define Py_TOKEN_H +#ifndef Py_INTERNAL_TOKEN_H +#define Py_INTERNAL_TOKEN_H #ifdef __cplusplus extern "C" { #endif +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + #undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */ #define ENDMARKER 0 @@ -85,13 +88,13 @@ extern "C" { (x) == DEDENT) +// Symbols exported for test_peg_generator PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */ -PyAPI_FUNC(int) PyToken_OneChar(int); -PyAPI_FUNC(int) PyToken_TwoChars(int, int); -PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int); +PyAPI_FUNC(int) _PyToken_OneChar(int); +PyAPI_FUNC(int) _PyToken_TwoChars(int, int); +PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int); #ifdef __cplusplus } #endif -#endif /* !Py_TOKEN_H */ -#endif /* Py_LIMITED_API */ +#endif // !Py_INTERNAL_TOKEN_H diff --git a/Makefile.pre.in b/Makefile.pre.in index e45d4fe3ecb..869c78ee0d3 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1325,11 +1325,11 @@ regen-token: $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \ $(srcdir)/Grammar/Tokens \ $(srcdir)/Doc/library/token-list.inc - # Regenerate Include/token.h from Grammar/Tokens + # Regenerate Include/internal/pycore_token.h from Grammar/Tokens # using Tools/scripts/generate_token.py $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \ $(srcdir)/Grammar/Tokens \ - $(srcdir)/Include/token.h + $(srcdir)/Include/internal/pycore_token.h # Regenerate Parser/token.c from Grammar/Tokens # using Tools/scripts/generate_token.py $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \ @@ -1521,7 +1521,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/structmember.h \ $(srcdir)/Include/structseq.h \ $(srcdir)/Include/sysmodule.h \ - $(srcdir)/Include/token.h \ $(srcdir)/Include/traceback.h \ $(srcdir)/Include/tracemalloc.h \ $(srcdir)/Include/tupleobject.h \ @@ -1632,6 +1631,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_structseq.h \ $(srcdir)/Include/internal/pycore_symtable.h \ $(srcdir)/Include/internal/pycore_sysmodule.h \ + $(srcdir)/Include/internal/pycore_token.h \ $(srcdir)/Include/internal/pycore_traceback.h \ $(srcdir)/Include/internal/pycore_tuple.h \ $(srcdir)/Include/internal/pycore_typeobject.h \ diff --git a/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst b/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst new file mode 100644 index 00000000000..60a8818e46b --- /dev/null +++ b/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst @@ -0,0 +1,3 @@ +Remove the ``token.h`` header file. There was never any public tokenizer C +API. The ``token.h`` header file was only designed to be used by Python +internals. Patch by Victor Stinner. diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 3ce116d2bab..a35884b3c35 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -244,6 +244,7 @@ + @@ -291,7 +292,6 @@ - diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 542d5510456..ff42cc92c4b 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -213,9 +213,6 @@ Include - - Include - Include @@ -633,6 +630,9 @@ Include\internal + + Include\internal + Include\internal diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets index 24b5ced1de0..9073bb6ab2b 100644 --- a/PCbuild/regen.targets +++ b/PCbuild/regen.targets @@ -19,7 +19,7 @@ <_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc"> rst - <_TokenOutputs Include="$(PySourcePath)Include\token.h"> + <_TokenOutputs Include="$(PySourcePath)Include\internal\pycore_token.h"> h <_TokenOutputs Include="$(PySourcePath)Parser\token.c"> diff --git a/Parser/pegen.h b/Parser/pegen.h index fe0c327b875..d6a6e4e1eeb 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -3,8 +3,8 @@ #define PY_SSIZE_T_CLEAN #include -#include #include +#include #if 0 #define PyPARSE_YIELD_IS_KEYWORD 0x0001 diff --git a/Parser/token.c b/Parser/token.c index 74bca0eff65..fa03fbc450b 100644 --- a/Parser/token.c +++ b/Parser/token.c @@ -1,7 +1,7 @@ /* Auto-generated by Tools/scripts/generate_token.py */ #include "Python.h" -#include "token.h" +#include "pycore_token.h" /* Token names */ @@ -76,7 +76,7 @@ const char * const _PyParser_TokenNames[] = { /* Return the token corresponding to a single character */ int -PyToken_OneChar(int c1) +_PyToken_OneChar(int c1) { switch (c1) { case '%': return PERCENT; @@ -107,7 +107,7 @@ PyToken_OneChar(int c1) } int -PyToken_TwoChars(int c1, int c2) +_PyToken_TwoChars(int c1, int c2) { switch (c1) { case '!': @@ -191,7 +191,7 @@ PyToken_TwoChars(int c1, int c2) } int -PyToken_ThreeChars(int c1, int c2, int c3) +_PyToken_ThreeChars(int c1, int c2, int c3) { switch (c1) { case '*': diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index c450aa8e463..7c797180956 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1992,10 +1992,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) /* Check for two-character token */ { int c2 = tok_nextc(tok); - int token = PyToken_TwoChars(c, c2); + int token = _PyToken_TwoChars(c, c2); if (token != OP) { int c3 = tok_nextc(tok); - int token3 = PyToken_ThreeChars(c, c2, c3); + int token3 = _PyToken_ThreeChars(c, c2, c3); if (token3 != OP) { token = token3; } @@ -2059,7 +2059,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) /* Punctuation character */ *p_start = tok->start; *p_end = tok->cur; - return PyToken_OneChar(c); + return _PyToken_OneChar(c); } int diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index 0cb665104b2..dba71bd60fe 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -8,7 +8,7 @@ extern "C" { /* Tokenizer interface */ -#include "token.h" /* For token types */ +#include "pycore_token.h" /* For token types */ #define MAXINDENT 100 /* Max indentation level */ #define MAXLEVEL 200 /* Max parentheses level */ diff --git a/Python/pythonrun.c b/Python/pythonrun.c index f12b9f6e953..202df585f31 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -24,7 +24,6 @@ #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_traceback.h" // _PyTraceBack_Print_Indented() -#include "token.h" // INDENT #include "errcode.h" // E_EOF #include "marshal.h" // PyMarshal_ReadLongFromFile() diff --git a/Tools/scripts/generate_token.py b/Tools/scripts/generate_token.py index 77bb5bd5eca..d8be8b93de1 100755 --- a/Tools/scripts/generate_token.py +++ b/Tools/scripts/generate_token.py @@ -51,13 +51,16 @@ def update_file(file, content): /* Auto-generated by Tools/scripts/generate_token.py */ /* Token types */ -#ifndef Py_LIMITED_API -#ifndef Py_TOKEN_H -#define Py_TOKEN_H +#ifndef Py_INTERNAL_TOKEN_H +#define Py_INTERNAL_TOKEN_H #ifdef __cplusplus extern "C" { #endif +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + #undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */ %s\ @@ -75,19 +78,19 @@ def update_file(file, content): (x) == DEDENT) +// Symbols exported for test_peg_generator PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */ -PyAPI_FUNC(int) PyToken_OneChar(int); -PyAPI_FUNC(int) PyToken_TwoChars(int, int); -PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int); +PyAPI_FUNC(int) _PyToken_OneChar(int); +PyAPI_FUNC(int) _PyToken_TwoChars(int, int); +PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int); #ifdef __cplusplus } #endif -#endif /* !Py_TOKEN_H */ -#endif /* Py_LIMITED_API */ +#endif // !Py_INTERNAL_TOKEN_H """ -def make_h(infile, outfile='Include/token.h'): +def make_h(infile, outfile='Include/internal/pycore_token.h'): tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile) defines = [] @@ -106,7 +109,7 @@ def make_h(infile, outfile='Include/token.h'): /* Auto-generated by Tools/scripts/generate_token.py */ #include "Python.h" -#include "token.h" +#include "pycore_token.h" /* Token names */ @@ -117,21 +120,21 @@ def make_h(infile, outfile='Include/token.h'): /* Return the token corresponding to a single character */ int -PyToken_OneChar(int c1) +_PyToken_OneChar(int c1) { %s\ return OP; } int -PyToken_TwoChars(int c1, int c2) +_PyToken_TwoChars(int c1, int c2) { %s\ return OP; } int -PyToken_ThreeChars(int c1, int c2, int c3) +_PyToken_ThreeChars(int c1, int c2, int c3) { %s\ return OP;