Issue #10386: Added __all__ to token module; this simplifies importing

in tokenize module and prevents leaking of private names through
import *.
This commit is contained in:
Alexander Belopolsky 2010-11-11 14:07:41 +00:00
parent bb27c128a5
commit b9d10d08c4
3 changed files with 14 additions and 13 deletions

View file

@ -1,7 +1,7 @@
#! /usr/bin/env python3
"""Token constants (from "token.h")."""
__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, 'cd' to the top directory of
@ -68,12 +68,10 @@ N_TOKENS = 55
NT_OFFSET = 256
#--end constants--
tok_name = {}
for _name, _value in list(globals().items()):
if type(_value) is type(0):
tok_name[_value] = _name
del _name, _value
tok_name = {value: name
for name, value in globals().items()
if isinstance(value, int)}
__all__.extend(tok_name.values())
def ISTERMINAL(x):
return x < NT_OFFSET
@ -85,7 +83,7 @@ def ISEOF(x):
return x == ENDMARKER
def main():
def _main():
import re
import sys
args = sys.argv[1:]
@ -139,4 +137,4 @@ def main():
if __name__ == "__main__":
main()
_main()

View file

@ -33,9 +33,8 @@
cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
import token
__all__ = [x for x in dir(token) if not x.startswith("_")]
__all__.extend(["COMMENT", "tokenize", "detect_encoding", "NL", "untokenize",
"ENCODING", "TokenInfo"])
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
"NL", "untokenize", "ENCODING", "TokenInfo"]
del token
COMMENT = N_TOKENS

View file

@ -63,6 +63,10 @@ Core and Builtins
Library
-------
- Issue #10386: Add __all__ to token module; this simplifies importing
in tokenize module and prevents leaking of private names through
import *.
- Issue #4471: Properly shutdown socket in IMAP.shutdown(). Patch by
Lorenzo M. Catucci.