[Python-checkins] r86410 - in python/branches/py3k: Lib/token.py Lib/tokenize.py Misc/NEWS

alexander.belopolsky python-checkins at python.org
Thu Nov 11 15:07:47 CET 2010


Author: alexander.belopolsky
Date: Thu Nov 11 15:07:41 2010
New Revision: 86410

Log:
Issue #10386: Added __all__ to token module; this simplifies importing
in tokenize module and prevents leaking of private names through
import *.


Modified:
   python/branches/py3k/Lib/token.py
   python/branches/py3k/Lib/tokenize.py
   python/branches/py3k/Misc/NEWS

Modified: python/branches/py3k/Lib/token.py
==============================================================================
--- python/branches/py3k/Lib/token.py	(original)
+++ python/branches/py3k/Lib/token.py	Thu Nov 11 15:07:41 2010
@@ -1,7 +1,7 @@
-#! /usr/bin/env python3
-
 """Token constants (from "token.h")."""
 
+__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
+
 #  This file is automatically generated; please don't muck it up!
 #
 #  To update the symbols in this file, 'cd' to the top directory of
@@ -68,12 +68,10 @@
 NT_OFFSET = 256
 #--end constants--
 
-tok_name = {}
-for _name, _value in list(globals().items()):
-    if type(_value) is type(0):
-        tok_name[_value] = _name
-del _name, _value
-
+tok_name = {value: name
+            for name, value in globals().items()
+            if isinstance(value, int)}
+__all__.extend(tok_name.values())
 
 def ISTERMINAL(x):
     return x < NT_OFFSET
@@ -85,7 +83,7 @@
     return x == ENDMARKER
 
 
-def main():
+def _main():
     import re
     import sys
     args = sys.argv[1:]
@@ -139,4 +137,4 @@
 
 
 if __name__ == "__main__":
-    main()
+    _main()

Modified: python/branches/py3k/Lib/tokenize.py
==============================================================================
--- python/branches/py3k/Lib/tokenize.py	(original)
+++ python/branches/py3k/Lib/tokenize.py	Thu Nov 11 15:07:41 2010
@@ -33,9 +33,8 @@
 cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
 
 import token
-__all__ = [x for x in dir(token) if not x.startswith("_")]
-__all__.extend(["COMMENT", "tokenize", "detect_encoding", "NL", "untokenize",
-                "ENCODING", "TokenInfo"])
+__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
+                           "NL", "untokenize", "ENCODING", "TokenInfo"]
 del token
 
 COMMENT = N_TOKENS

Modified: python/branches/py3k/Misc/NEWS
==============================================================================
--- python/branches/py3k/Misc/NEWS	(original)
+++ python/branches/py3k/Misc/NEWS	Thu Nov 11 15:07:41 2010
@@ -63,6 +63,10 @@
 Library
 -------
 
+- Issue #10386: Add __all__ to token module; this simplifies importing
+  in tokenize module and prevents leaking of private names through
+  import *.
+
 - Issue #4471: Properly shutdown socket in IMAP.shutdown().  Patch by
   Lorenzo M. Catucci.
 


More information about the Python-checkins mailing list