[Python-checkins] r80934 - in python/trunk/Lib/lib2to3: fixes/fix_import.py fixes/fix_operator.py fixes/fix_reduce.py fixes/fix_sys_exc.py fixes/fix_tuple_params.py fixes/fix_xrange.py main.py patcomp.py pgen2/tokenize.py pytree.py refactor.py tests/test_fixers.py tests/test_parser.py tests/test_pytree.py tests/test_refactor.py

benjamin.peterson python-checkins at python.org
Fri May 7 20:58:24 CEST 2010


Author: benjamin.peterson
Date: Fri May  7 20:58:23 2010
New Revision: 80934

Log:
Merged revisions 79911,79916-79917,80018,80418,80572-80573,80635-80639,80668,80922 via svnmerge from 
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3

........
  r79911 | benjamin.peterson | 2010-04-09 15:38:53 -0500 (Fri, 09 Apr 2010) | 1 line
  
  use absolute import
........
  r79916 | benjamin.peterson | 2010-04-09 16:05:21 -0500 (Fri, 09 Apr 2010) | 1 line
  
  generalize detection of __future__ imports and attach them to the tree
........
  r79917 | benjamin.peterson | 2010-04-09 16:11:44 -0500 (Fri, 09 Apr 2010) | 1 line
  
  don't try to 'fix' relative imports when absolute_import is enabled #8858
........
  r80018 | benjamin.peterson | 2010-04-12 16:12:12 -0500 (Mon, 12 Apr 2010) | 4 lines
  
  prevent diffs from being mangled is multiprocess mode #6409
  
  Patch by George Boutsioukis.
........
  r80418 | benjamin.peterson | 2010-04-23 16:00:03 -0500 (Fri, 23 Apr 2010) | 1 line
  
  remove unhelpful description
........
  r80572 | benjamin.peterson | 2010-04-27 20:33:54 -0500 (Tue, 27 Apr 2010) | 1 line
  
  use unicode literals
........
  r80573 | jeffrey.yasskin | 2010-04-27 23:08:27 -0500 (Tue, 27 Apr 2010) | 6 lines
  
  Don't transform imports that are already relative.  2to3 turned
    from . import refactor
  into
    from .. import refactor
  which broke the transformation of 2to3 itself.
........
  r80635 | benjamin.peterson | 2010-04-29 16:02:23 -0500 (Thu, 29 Apr 2010) | 1 line
  
  remove imports
........
  r80636 | benjamin.peterson | 2010-04-29 16:02:41 -0500 (Thu, 29 Apr 2010) | 1 line
  
  unicode literal
........
  r80637 | benjamin.peterson | 2010-04-29 16:03:42 -0500 (Thu, 29 Apr 2010) | 1 line
  
  must pass a string to Number
........
  r80638 | benjamin.peterson | 2010-04-29 16:05:34 -0500 (Thu, 29 Apr 2010) | 1 line
  
  unicode literals
........
  r80639 | benjamin.peterson | 2010-04-29 16:06:09 -0500 (Thu, 29 Apr 2010) | 1 line
  
  pass string to Number
........
  r80668 | jeffrey.yasskin | 2010-04-30 18:02:47 -0500 (Fri, 30 Apr 2010) | 4 lines
  
  Make 2to3 run under Python 2.5 so that the benchmark suite at
  http://hg.python.org/benchmarks/ can use it and still run on implementations
  that haven't gotten to 2.6 yet.  Fixes issue 8566.
........
  r80922 | benjamin.peterson | 2010-05-07 11:06:25 -0500 (Fri, 07 May 2010) | 1 line
  
  prevent xrange transformation from wrapping range calls it produces in list
........


Modified:
   python/trunk/Lib/lib2to3/   (props changed)
   python/trunk/Lib/lib2to3/fixes/fix_import.py
   python/trunk/Lib/lib2to3/fixes/fix_operator.py
   python/trunk/Lib/lib2to3/fixes/fix_reduce.py
   python/trunk/Lib/lib2to3/fixes/fix_sys_exc.py
   python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py
   python/trunk/Lib/lib2to3/fixes/fix_xrange.py
   python/trunk/Lib/lib2to3/main.py
   python/trunk/Lib/lib2to3/patcomp.py
   python/trunk/Lib/lib2to3/pgen2/tokenize.py
   python/trunk/Lib/lib2to3/pytree.py
   python/trunk/Lib/lib2to3/refactor.py
   python/trunk/Lib/lib2to3/tests/test_fixers.py
   python/trunk/Lib/lib2to3/tests/test_parser.py
   python/trunk/Lib/lib2to3/tests/test_pytree.py
   python/trunk/Lib/lib2to3/tests/test_refactor.py

Modified: python/trunk/Lib/lib2to3/fixes/fix_import.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_import.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_import.py	Fri May  7 20:58:23 2010
@@ -43,7 +43,13 @@
     import_name< 'import' imp=any >
     """
 
+    def start_tree(self, tree, name):
+        super(FixImport, self).start_tree(tree, name)
+        self.skip = "absolute_import" in tree.future_features
+
     def transform(self, node, results):
+        if self.skip:
+            return
         imp = results['imp']
 
         if node.type == syms.import_from:
@@ -71,19 +77,22 @@
                     self.warning(node, "absolute and local imports together")
                 return
 
-            new = FromImport('.', [imp])
+            new = FromImport(u".", [imp])
             new.prefix = node.prefix
             return new
 
     def probably_a_local_import(self, imp_name):
-        imp_name = imp_name.split('.', 1)[0]
+        if imp_name.startswith(u"."):
+            # Relative imports are certainly not local imports.
+            return False
+        imp_name = imp_name.split(u".", 1)[0]
         base_path = dirname(self.filename)
         base_path = join(base_path, imp_name)
         # If there is no __init__.py next to the file its not in a package
         # so can't be a relative import.
-        if not exists(join(dirname(base_path), '__init__.py')):
+        if not exists(join(dirname(base_path), "__init__.py")):
             return False
-        for ext in ['.py', sep, '.pyc', '.so', '.sl', '.pyd']:
+        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
             if exists(base_path + ext):
                 return True
         return False

Modified: python/trunk/Lib/lib2to3/fixes/fix_operator.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_operator.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_operator.py	Fri May  7 20:58:23 2010
@@ -14,10 +14,10 @@
     func = "'(' func=any ')'"
     PATTERN = """
               power< module='operator'
-                trailer< '.' {methods} > trailer< {func} > >
+                trailer< '.' %(methods)s > trailer< %(func)s > >
               |
-              power< {methods} trailer< {func} > >
-              """.format(methods=methods, func=func)
+              power< %(methods)s trailer< %(func)s > >
+              """ % dict(methods=methods, func=func)
 
     def transform(self, node, results):
         method = results["method"][0]

Modified: python/trunk/Lib/lib2to3/fixes/fix_reduce.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_reduce.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_reduce.py	Fri May  7 20:58:23 2010
@@ -7,9 +7,8 @@
 used in that module.
 """
 
-from .. import pytree
-from .. import fixer_base
-from ..fixer_util import Name, Attr, touch_import
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import touch_import
 
 
 

Modified: python/trunk/Lib/lib2to3/fixes/fix_sys_exc.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_sys_exc.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_sys_exc.py	Fri May  7 20:58:23 2010
@@ -13,14 +13,14 @@
 
 class FixSysExc(fixer_base.BaseFix):
     # This order matches the ordering of sys.exc_info().
-    exc_info = ["exc_type", "exc_value", "exc_traceback"]
+    exc_info = [u"exc_type", u"exc_value", u"exc_traceback"]
     PATTERN = """
               power< 'sys' trailer< dot='.' attribute=(%s) > >
               """ % '|'.join("'%s'" % e for e in exc_info)
 
     def transform(self, node, results):
         sys_attr = results["attribute"][0]
-        index = Number(self.exc_info.index(sys_attr.value))
+        index = Number(unicode(self.exc_info.index(sys_attr.value)))
 
         call = Call(Name(u"exc_info"), prefix=sys_attr.prefix)
         attr = Attr(Name(u"sys"), call)

Modified: python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py	Fri May  7 20:58:23 2010
@@ -54,7 +54,7 @@
             end = Newline()
         else:
             start = 0
-            indent = "; "
+            indent = u"; "
             end = pytree.Leaf(token.INDENT, u"")
 
         # We need access to self for new_name(), and making this a method
@@ -154,7 +154,7 @@
     if d is None:
         d = {}
     for i, obj in enumerate(param_list):
-        trailer = [Subscript(Number(i))]
+        trailer = [Subscript(Number(unicode(i)))]
         if isinstance(obj, list):
             map_to_index(obj, trailer, d=d)
         else:

Modified: python/trunk/Lib/lib2to3/fixes/fix_xrange.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_xrange.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_xrange.py	Fri May  7 20:58:23 2010
@@ -17,6 +17,13 @@
               rest=any* >
               """
 
+    def start_tree(self, tree, filename):
+        super(FixXrange, self).start_tree(tree, filename)
+        self.transformed_xranges = set()
+
+    def finish_tree(self, tree, filename):
+        self.transformed_xranges = None
+
     def transform(self, node, results):
         name = results["name"]
         if name.value == u"xrange":
@@ -29,9 +36,12 @@
     def transform_xrange(self, node, results):
         name = results["name"]
         name.replace(Name(u"range", prefix=name.prefix))
+        # This prevents the new range call from being wrapped in a list later.
+        self.transformed_xranges.add(id(node))
 
     def transform_range(self, node, results):
-        if not self.in_special_context(node):
+        if (id(node) not in self.transformed_xranges and
+            not self.in_special_context(node)):
             range_call = Call(Name(u"range"), [results["args"].clone()])
             # Encase the range call in list().
             list_call = Call(Name(u"list"), [range_call],

Modified: python/trunk/Lib/lib2to3/main.py
==============================================================================
--- python/trunk/Lib/lib2to3/main.py	(original)
+++ python/trunk/Lib/lib2to3/main.py	Fri May  7 20:58:23 2010
@@ -2,6 +2,8 @@
 Main program for 2to3.
 """
 
+from __future__ import with_statement
+
 import sys
 import os
 import difflib
@@ -62,8 +64,14 @@
             if self.show_diffs:
                 diff_lines = diff_texts(old, new, filename)
                 try:
-                    for line in diff_lines:
-                        print line
+                    if self.output_lock is not None:
+                        with self.output_lock:
+                            for line in diff_lines:
+                                print line
+                            sys.stdout.flush()
+                    else:
+                        for line in diff_lines:
+                            print line
                 except UnicodeEncodeError:
                     warn("couldn't encode %s's diff for your terminal" %
                          (filename,))
@@ -95,7 +103,7 @@
     parser.add_option("-x", "--nofix", action="append", default=[],
                       help="Prevent a fixer from being run.")
     parser.add_option("-l", "--list-fixes", action="store_true",
-                      help="List available transformations (fixes/fix_*.py)")
+                      help="List available transformations")
     parser.add_option("-p", "--print-function", action="store_true",
                       help="Modify the grammar so that print() is a function")
     parser.add_option("-v", "--verbose", action="store_true",

Modified: python/trunk/Lib/lib2to3/patcomp.py
==============================================================================
--- python/trunk/Lib/lib2to3/patcomp.py	(original)
+++ python/trunk/Lib/lib2to3/patcomp.py	Fri May  7 20:58:23 2010
@@ -57,7 +57,7 @@
         tokens = tokenize_wrapper(input)
         try:
             root = self.driver.parse_tokens(tokens, debug=debug)
-        except parse.ParseError as e:
+        except parse.ParseError, e:
             raise PatternSyntaxError(str(e))
         return self.compile_node(root)
 

Modified: python/trunk/Lib/lib2to3/pgen2/tokenize.py
==============================================================================
--- python/trunk/Lib/lib2to3/pgen2/tokenize.py	(original)
+++ python/trunk/Lib/lib2to3/pgen2/tokenize.py	Fri May  7 20:58:23 2010
@@ -38,6 +38,13 @@
            "generate_tokens", "untokenize"]
 del token
 
+try:
+    bytes
+except NameError:
+    # Support bytes type in Python <= 2.5, so 2to3 turns itself into
+    # valid Python 3 code.
+    bytes = str
+
 def group(*choices): return '(' + '|'.join(choices) + ')'
 def any(*choices): return group(*choices) + '*'
 def maybe(*choices): return group(*choices) + '?'
@@ -267,7 +274,7 @@
         try:
             return readline()
         except StopIteration:
-            return b''
+            return bytes()
 
     def find_cookie(line):
         try:

Modified: python/trunk/Lib/lib2to3/pytree.py
==============================================================================
--- python/trunk/Lib/lib2to3/pytree.py	(original)
+++ python/trunk/Lib/lib2to3/pytree.py	Fri May  7 20:58:23 2010
@@ -289,8 +289,7 @@
             for node in child.post_order():
                 yield node
 
-    @property
-    def prefix(self):
+    def _prefix_getter(self):
         """
         The whitespace and comments preceding this node in the input.
         """
@@ -298,11 +297,12 @@
             return ""
         return self.children[0].prefix
 
-    @prefix.setter
-    def prefix(self, prefix):
+    def _prefix_setter(self, prefix):
         if self.children:
             self.children[0].prefix = prefix
 
+    prefix = property(_prefix_getter, _prefix_setter)
+
     def set_child(self, i, child):
         """
         Equivalent to 'node.children[i] = child'. This method also sets the
@@ -390,18 +390,17 @@
         """Return a pre-order iterator for the tree."""
         yield self
 
-    @property
-    def prefix(self):
+    def _prefix_getter(self):
         """
         The whitespace and comments preceding this token in the input.
         """
         return self._prefix
 
-    @prefix.setter
-    def prefix(self, prefix):
+    def _prefix_setter(self, prefix):
         self.changed()
         self._prefix = prefix
 
+    prefix = property(_prefix_getter, _prefix_setter)
 
 def convert(gr, raw_node):
     """

Modified: python/trunk/Lib/lib2to3/refactor.py
==============================================================================
--- python/trunk/Lib/lib2to3/refactor.py	(original)
+++ python/trunk/Lib/lib2to3/refactor.py	Fri May  7 20:58:23 2010
@@ -8,6 +8,8 @@
 provides infrastructure to write your own refactoring tool.
 """
 
+from __future__ import with_statement
+
 __author__ = "Guido van Rossum <guido at python.org>"
 
 
@@ -122,13 +124,14 @@
     _to_system_newlines = _identity
 
 
-def _detect_future_print(source):
+def _detect_future_features(source):
     have_docstring = False
     gen = tokenize.generate_tokens(StringIO.StringIO(source).readline)
     def advance():
-        tok = next(gen)
+        tok = gen.next()
         return tok[0], tok[1]
     ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT))
+    features = set()
     try:
         while True:
             tp, value = advance()
@@ -140,26 +143,25 @@
                 have_docstring = True
             elif tp == token.NAME and value == u"from":
                 tp, value = advance()
-                if tp != token.NAME and value != u"__future__":
+                if tp != token.NAME or value != u"__future__":
                     break
                 tp, value = advance()
-                if tp != token.NAME and value != u"import":
+                if tp != token.NAME or value != u"import":
                     break
                 tp, value = advance()
                 if tp == token.OP and value == u"(":
                     tp, value = advance()
                 while tp == token.NAME:
-                    if value == u"print_function":
-                        return True
+                    features.add(value)
                     tp, value = advance()
-                    if tp != token.OP and value != u",":
+                    if tp != token.OP or value != u",":
                         break
                     tp, value = advance()
             else:
                 break
     except StopIteration:
         pass
-    return False
+    return frozenset(features)
 
 
 class FixerError(Exception):
@@ -341,7 +343,8 @@
             An AST corresponding to the refactored input stream; None if
             there were errors during the parse.
         """
-        if _detect_future_print(data):
+        features = _detect_future_features(data)
+        if "print_function" in features:
             self.driver.grammar = pygram.python_grammar_no_print_statement
         try:
             tree = self.driver.parse_string(data)
@@ -351,6 +354,7 @@
             return
         finally:
             self.driver.grammar = self.grammar
+        tree.future_features = features
         self.log_debug("Refactoring %s", name)
         self.refactor_tree(tree, name)
         return tree
@@ -605,6 +609,7 @@
     def __init__(self, *args, **kwargs):
         super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
         self.queue = None
+        self.output_lock = None
 
     def refactor(self, items, write=False, doctests_only=False,
                  num_processes=1):
@@ -618,6 +623,7 @@
         if self.queue is not None:
             raise RuntimeError("already doing multiple processes")
         self.queue = multiprocessing.JoinableQueue()
+        self.output_lock = multiprocessing.Lock()
         processes = [multiprocessing.Process(target=self._child)
                      for i in xrange(num_processes)]
         try:

Modified: python/trunk/Lib/lib2to3/tests/test_fixers.py
==============================================================================
--- python/trunk/Lib/lib2to3/tests/test_fixers.py	(original)
+++ python/trunk/Lib/lib2to3/tests/test_fixers.py	Fri May  7 20:58:23 2010
@@ -1497,6 +1497,17 @@
         for call in fixer_util.consuming_calls:
             self.unchanged("a = %s(range(10))" % call)
 
+class Test_xrange_with_reduce(FixerTestCase):
+
+    def setUp(self):
+        super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"])
+
+    def test_double_transform(self):
+        b = """reduce(x, xrange(5))"""
+        a = """from functools import reduce
+reduce(x, range(5))"""
+        self.check(b, a)
+
 class Test_raw_input(FixerTestCase):
     fixer = "raw_input"
 
@@ -3679,7 +3690,7 @@
             self.files_checked.append(name)
             return self.always_exists or (name in self.present_files)
 
-        from ..fixes import fix_import
+        from lib2to3.fixes import fix_import
         fix_import.exists = fake_exists
 
     def tearDown(self):
@@ -3722,6 +3733,12 @@
         self.present_files = set(["bar.py"])
         self.unchanged(s)
 
+    def test_with_absolute_import_enabled(self):
+        s = "from __future__ import absolute_import\nimport bar"
+        self.always_exists = False
+        self.present_files = set(["__init__.py", "bar.py"])
+        self.unchanged(s)
+
     def test_in_package(self):
         b = "import bar"
         a = "from . import bar"
@@ -3736,6 +3753,10 @@
         self.present_files = set(["__init__.py", "bar" + os.path.sep])
         self.check(b, a)
 
+    def test_already_relative_import(self):
+        s = "from . import bar"
+        self.unchanged(s)
+
     def test_comments_and_indent(self):
         b = "import bar # Foo"
         a = "from . import bar # Foo"

Modified: python/trunk/Lib/lib2to3/tests/test_parser.py
==============================================================================
--- python/trunk/Lib/lib2to3/tests/test_parser.py	(original)
+++ python/trunk/Lib/lib2to3/tests/test_parser.py	Fri May  7 20:58:23 2010
@@ -6,13 +6,14 @@
 test_grammar.py files from both Python 2 and Python 3.
 """
 
+from __future__ import with_statement
+
 # Testing imports
 from . import support
 from .support import driver, test_dir
 
 # Python imports
 import os
-import io
 import sys
 
 # Local imports
@@ -156,8 +157,9 @@
                 encoding = tokenize.detect_encoding(fp.readline)[0]
             self.assertTrue(encoding is not None,
                             "can't detect encoding for %s" % filepath)
-            with io.open(filepath, "r", encoding=encoding) as fp:
+            with open(filepath, "r") as fp:
                 source = fp.read()
+                source = source.decode(encoding)
             tree = driver.parse_string(source)
             new = unicode(tree)
             if diff(filepath, new, encoding):
@@ -203,9 +205,9 @@
 
 
 def diff(fn, result, encoding):
-    f = io.open("@", "w", encoding=encoding)
+    f = open("@", "w")
     try:
-        f.write(result)
+        f.write(result.encode(encoding))
     finally:
         f.close()
     try:

Modified: python/trunk/Lib/lib2to3/tests/test_pytree.py
==============================================================================
--- python/trunk/Lib/lib2to3/tests/test_pytree.py	(original)
+++ python/trunk/Lib/lib2to3/tests/test_pytree.py	Fri May  7 20:58:23 2010
@@ -9,6 +9,9 @@
 especially when debugging a test.
 """
 
+from __future__ import with_statement
+
+import sys
 import warnings
 
 # Testing imports
@@ -28,20 +31,22 @@
 
     """Unit tests for nodes (Base, Leaf, Node)."""
 
-    def test_deprecated_prefix_methods(self):
-        l = pytree.Leaf(100, "foo")
-        with warnings.catch_warnings(record=True) as w:
-            warnings.simplefilter("always", DeprecationWarning)
-            self.assertEqual(l.get_prefix(), "")
-            l.set_prefix("hi")
-        self.assertEqual(l.prefix, "hi")
-        self.assertEqual(len(w), 2)
-        for warning in w:
-            self.assertTrue(warning.category is DeprecationWarning)
-        self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
-                             "use the prefix property")
-        self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
-                             "use the prefix property")
+    if sys.version_info >= (2,6):
+        # warnings.catch_warnings is new in 2.6.
+        def test_deprecated_prefix_methods(self):
+            l = pytree.Leaf(100, "foo")
+            with warnings.catch_warnings(record=True) as w:
+                warnings.simplefilter("always", DeprecationWarning)
+                self.assertEqual(l.get_prefix(), "")
+                l.set_prefix("hi")
+            self.assertEqual(l.prefix, "hi")
+            self.assertEqual(len(w), 2)
+            for warning in w:
+                self.assertTrue(warning.category is DeprecationWarning)
+            self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
+                                 "use the prefix property")
+            self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
+                                 "use the prefix property")
 
     def test_instantiate_base(self):
         if __debug__:

Modified: python/trunk/Lib/lib2to3/tests/test_refactor.py
==============================================================================
--- python/trunk/Lib/lib2to3/tests/test_refactor.py	(original)
+++ python/trunk/Lib/lib2to3/tests/test_refactor.py	Fri May  7 20:58:23 2010
@@ -2,6 +2,8 @@
 Unit tests for refactor.py.
 """
 
+from __future__ import with_statement
+
 import sys
 import os
 import codecs
@@ -61,42 +63,50 @@
         self.assertEqual(full_names,
                          ["myfixes.fix_" + name for name in contents])
 
-    def test_detect_future_print(self):
-        run = refactor._detect_future_print
-        self.assertFalse(run(""))
-        self.assertTrue(run("from __future__ import print_function"))
-        self.assertFalse(run("from __future__ import generators"))
-        self.assertFalse(run("from __future__ import generators, feature"))
-        input = "from __future__ import generators, print_function"
-        self.assertTrue(run(input))
-        input ="from __future__ import print_function, generators"
-        self.assertTrue(run(input))
-        input = "from __future__ import (print_function,)"
-        self.assertTrue(run(input))
-        input = "from __future__ import (generators, print_function)"
-        self.assertTrue(run(input))
-        input = "from __future__ import (generators, nested_scopes)"
-        self.assertFalse(run(input))
-        input = """from __future__ import generators
+    def test_detect_future_features(self):
+        run = refactor._detect_future_features
+        fs = frozenset
+        empty = fs()
+        self.assertEqual(run(""), empty)
+        self.assertEqual(run("from __future__ import print_function"),
+                         fs(("print_function",)))
+        self.assertEqual(run("from __future__ import generators"),
+                         fs(("generators",)))
+        self.assertEqual(run("from __future__ import generators, feature"),
+                         fs(("generators", "feature")))
+        inp = "from __future__ import generators, print_function"
+        self.assertEqual(run(inp), fs(("generators", "print_function")))
+        inp ="from __future__ import print_function, generators"
+        self.assertEqual(run(inp), fs(("print_function", "generators")))
+        inp = "from __future__ import (print_function,)"
+        self.assertEqual(run(inp), fs(("print_function",)))
+        inp = "from __future__ import (generators, print_function)"
+        self.assertEqual(run(inp), fs(("generators", "print_function")))
+        inp = "from __future__ import (generators, nested_scopes)"
+        self.assertEqual(run(inp), fs(("generators", "nested_scopes")))
+        inp = """from __future__ import generators
 from __future__ import print_function"""
-        self.assertTrue(run(input))
-        self.assertFalse(run("from"))
-        self.assertFalse(run("from 4"))
-        self.assertFalse(run("from x"))
-        self.assertFalse(run("from x 5"))
-        self.assertFalse(run("from x im"))
-        self.assertFalse(run("from x import"))
-        self.assertFalse(run("from x import 4"))
-        input = "'docstring'\nfrom __future__ import print_function"
-        self.assertTrue(run(input))
-        input = "'docstring'\n'somng'\nfrom __future__ import print_function"
-        self.assertFalse(run(input))
-        input = "# comment\nfrom __future__ import print_function"
-        self.assertTrue(run(input))
-        input = "# comment\n'doc'\nfrom __future__ import print_function"
-        self.assertTrue(run(input))
-        input = "class x: pass\nfrom __future__ import print_function"
-        self.assertFalse(run(input))
+        self.assertEqual(run(inp), fs(("generators", "print_function")))
+        invalid = ("from",
+                   "from 4",
+                   "from x",
+                   "from x 5",
+                   "from x im",
+                   "from x import",
+                   "from x import 4",
+                   )
+        for inp in invalid:
+            self.assertEqual(run(inp), empty)
+        inp = "'docstring'\nfrom __future__ import print_function"
+        self.assertEqual(run(inp), fs(("print_function",)))
+        inp = "'docstring'\n'somng'\nfrom __future__ import print_function"
+        self.assertEqual(run(inp), empty)
+        inp = "# comment\nfrom __future__ import print_function"
+        self.assertEqual(run(inp), fs(("print_function",)))
+        inp = "# comment\n'doc'\nfrom __future__ import print_function"
+        self.assertEqual(run(inp), fs(("print_function",)))
+        inp = "class x: pass\nfrom __future__ import print_function"
+        self.assertEqual(run(inp), empty)
 
     def test_get_headnode_dict(self):
         class NoneFix(fixer_base.BaseFix):


More information about the Python-checkins mailing list