[Python-checkins] r76062 - in python/trunk/Lib/lib2to3: Grammar.txt fixes/fix_idioms.py fixes/fix_map.py fixes/fix_tuple_params.py pgen2/pgen.py pgen2/tokenize.py pytree.py tests/test_all_fixers.py tests/test_fixers.py tests/test_parser.py

benjamin.peterson python-checkins at python.org
Mon Nov 2 19:12:12 CET 2009


Author: benjamin.peterson
Date: Mon Nov  2 19:12:12 2009
New Revision: 76062

Log:
Merged revisions 74359,75081,75088,75213,75278,75303,75427-75428,75734-75736,75865,76059-76061 via svnmerge from 
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3

........
  r74359 | benjamin.peterson | 2009-08-12 17:23:13 -0500 (Wed, 12 Aug 2009) | 1 line
  
  don't pass the deprecated print_function option
........
  r75081 | benjamin.peterson | 2009-09-26 22:02:57 -0500 (Sat, 26 Sep 2009) | 1 line
  
  let 2to3 work with extended iterable unpacking
........
  r75088 | benjamin.peterson | 2009-09-27 11:25:21 -0500 (Sun, 27 Sep 2009) | 1 line
  
  look on the type only for __call__
........
  r75213 | benjamin.peterson | 2009-10-03 10:09:46 -0500 (Sat, 03 Oct 2009) | 5 lines
  
  revert 75212; it's not correct
  
  People can use isinstance(x, collections.Callable) if they expect objects with
  __call__ in their instance dictionaries.
........
  r75278 | benjamin.peterson | 2009-10-07 16:25:56 -0500 (Wed, 07 Oct 2009) | 4 lines
  
  fix whitespace problems with fix_idioms #3563
  
  Patch by Joe Amenta.
........
  r75303 | benjamin.peterson | 2009-10-09 16:59:11 -0500 (Fri, 09 Oct 2009) | 1 line
  
  port latin-1 and utf-8 cookie improvements
........
  r75427 | benjamin.peterson | 2009-10-14 20:35:57 -0500 (Wed, 14 Oct 2009) | 1 line
  
  force floor division
........
  r75428 | benjamin.peterson | 2009-10-14 20:39:21 -0500 (Wed, 14 Oct 2009) | 1 line
  
  silence -3 warnings about __hash__
........
  r75734 | benjamin.peterson | 2009-10-26 16:25:53 -0500 (Mon, 26 Oct 2009) | 2 lines
  
  warn on map(None, ...) with more than 2 arguments #7203
........
  r75735 | benjamin.peterson | 2009-10-26 16:28:25 -0500 (Mon, 26 Oct 2009) | 1 line
  
  remove unused result
........
  r75736 | benjamin.peterson | 2009-10-26 16:29:02 -0500 (Mon, 26 Oct 2009) | 1 line
  
  using get() here is a bit pointless
........
  r75865 | benjamin.peterson | 2009-10-27 15:49:00 -0500 (Tue, 27 Oct 2009) | 1 line
  
  explain reason for warning
........
  r76059 | benjamin.peterson | 2009-11-02 11:43:47 -0600 (Mon, 02 Nov 2009) | 1 line
  
  tuples are no longer used for children
........
  r76060 | benjamin.peterson | 2009-11-02 11:55:40 -0600 (Mon, 02 Nov 2009) | 1 line
  
  revert r76059; apparently some fixers rely on Leaf no () for children
........
  r76061 | benjamin.peterson | 2009-11-02 12:06:17 -0600 (Mon, 02 Nov 2009) | 1 line
  
  make fix_tuple_params keep the tree valid #7253
........


Modified:
   python/trunk/Lib/lib2to3/   (props changed)
   python/trunk/Lib/lib2to3/Grammar.txt
   python/trunk/Lib/lib2to3/fixes/fix_idioms.py
   python/trunk/Lib/lib2to3/fixes/fix_map.py
   python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py
   python/trunk/Lib/lib2to3/pgen2/pgen.py
   python/trunk/Lib/lib2to3/pgen2/tokenize.py
   python/trunk/Lib/lib2to3/pytree.py
   python/trunk/Lib/lib2to3/tests/test_all_fixers.py
   python/trunk/Lib/lib2to3/tests/test_fixers.py
   python/trunk/Lib/lib2to3/tests/test_parser.py

Modified: python/trunk/Lib/lib2to3/Grammar.txt
==============================================================================
--- python/trunk/Lib/lib2to3/Grammar.txt	(original)
+++ python/trunk/Lib/lib2to3/Grammar.txt	Mon Nov  2 19:12:12 2009
@@ -53,8 +53,9 @@
 simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
 small_stmt: (expr_stmt | print_stmt  | del_stmt | pass_stmt | flow_stmt |
              import_stmt | global_stmt | exec_stmt | assert_stmt)
-expr_stmt: testlist (augassign (yield_expr|testlist) |
-                     ('=' (yield_expr|testlist))*)
+expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) |
+                     ('=' (yield_expr|testlist_star_expr))*)
+testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
 augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
             '<<=' | '>>=' | '**=' | '//=')
 # For normal assignments, additional restrictions enforced by the interpreter
@@ -112,6 +113,7 @@
 not_test: 'not' not_test | comparison
 comparison: expr (comp_op expr)*
 comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+star_expr: '*' expr
 expr: xor_expr ('|' xor_expr)*
 xor_expr: and_expr ('^' and_expr)*
 and_expr: shift_expr ('&' shift_expr)*
@@ -125,14 +127,14 @@
        '{' [dictsetmaker] '}' |
        '`' testlist1 '`' |
        NAME | NUMBER | STRING+ | '.' '.' '.')
-listmaker: test ( comp_for | (',' test)* [','] )
-testlist_gexp: test ( comp_for | (',' test)* [','] )
+listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
+testlist_gexp: test ( comp_for | (',' (test|star_expr))* [','] )
 lambdef: 'lambda' [varargslist] ':' test
 trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
 subscriptlist: subscript (',' subscript)* [',']
 subscript: test | [test] ':' [test] [sliceop]
 sliceop: ':' [test]
-exprlist: expr (',' expr)* [',']
+exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
 testlist: test (',' test)* [',']
 dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) |
                 (test (comp_for | (',' test)* [','])) )

Modified: python/trunk/Lib/lib2to3/fixes/fix_idioms.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_idioms.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_idioms.py	Mon Nov  2 19:12:12 2009
@@ -29,7 +29,7 @@
 
 # Local imports
 from .. import fixer_base
-from ..fixer_util import Call, Comma, Name, Node, syms
+from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms
 
 CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
 TYPE = "power< 'type' trailer< '(' x=any ')' > >"
@@ -130,5 +130,24 @@
         else:
             raise RuntimeError("should not have reached here")
         sort_stmt.remove()
-        if next_stmt:
-            next_stmt[0].prefix = sort_stmt.prefix
+
+        btwn = sort_stmt.prefix
+        # Keep any prefix lines between the sort_stmt and the list_call and
+        # shove them right after the sorted() call.
+        if u"\n" in btwn:
+            if next_stmt:
+                # The new prefix should be everything from the sort_stmt's
+                # prefix up to the last newline, then the old prefix after a new
+                # line.
+                prefix_lines = (btwn.rpartition(u"\n")[0], next_stmt[0].prefix)
+                next_stmt[0].prefix = u"\n".join(prefix_lines)
+            else:
+                assert list_call.parent
+                assert list_call.next_sibling is None
+                # Put a blank line after list_call and set its prefix.
+                end_line = BlankLine()
+                list_call.parent.append_child(end_line)
+                assert list_call.next_sibling is end_line
+                # The new prefix should be everything up to the first new line
+                # of sort_stmt's prefix.
+                end_line.prefix = btwn.rpartition(u"\n")[0]

Modified: python/trunk/Lib/lib2to3/fixes/fix_map.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_map.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_map.py	Mon Nov  2 19:12:12 2009
@@ -49,8 +49,7 @@
     >
     |
     power<
-        'map'
-        args=trailer< '(' [any] ')' >
+        'map' trailer< '(' [arglist=any] ')' >
     >
     """
 
@@ -66,13 +65,22 @@
             new.prefix = u""
             new = Call(Name(u"list"), [new])
         elif "map_lambda" in results:
-            new = ListComp(results.get("xp").clone(),
-                           results.get("fp").clone(),
-                           results.get("it").clone())
+            new = ListComp(results["xp"].clone(),
+                           results["fp"].clone(),
+                           results["it"].clone())
         else:
             if "map_none" in results:
                 new = results["arg"].clone()
             else:
+                if "arglist" in results:
+                    args = results["arglist"]
+                    if args.type == syms.arglist and \
+                       args.children[0].type == token.NAME and \
+                       args.children[0].value == "None":
+                        self.warning(node, "cannot convert map(None, ...) "
+                                     "with multiple arguments because map() "
+                                     "now truncates to the shortest sequence")
+                        return
                 if in_special_context(node):
                     return None
                 new = node.clone()

Modified: python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py
==============================================================================
--- python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py	(original)
+++ python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py	Mon Nov  2 19:12:12 2009
@@ -96,6 +96,8 @@
             new_lines[0].prefix = indent
             after = start + 1
 
+        for line in new_lines:
+            line.parent = suite[0]
         suite[0].children[after:after] = new_lines
         for i in range(after+1, after+len(new_lines)+1):
             suite[0].children[i].prefix = indent

Modified: python/trunk/Lib/lib2to3/pgen2/pgen.py
==============================================================================
--- python/trunk/Lib/lib2to3/pgen2/pgen.py	(original)
+++ python/trunk/Lib/lib2to3/pgen2/pgen.py	Mon Nov  2 19:12:12 2009
@@ -379,6 +379,8 @@
                 return False
         return True
 
+    __hash__ = None # For Py3 compatibility.
+
 def generate_grammar(filename="Grammar.txt"):
     p = ParserGenerator(filename)
     return p.make_grammar()

Modified: python/trunk/Lib/lib2to3/pgen2/tokenize.py
==============================================================================
--- python/trunk/Lib/lib2to3/pgen2/tokenize.py	(original)
+++ python/trunk/Lib/lib2to3/pgen2/tokenize.py	Mon Nov  2 19:12:12 2009
@@ -229,6 +229,17 @@
 
 cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
 
+def _get_normal_name(orig_enc):
+    """Imitates get_normal_name in tokenizer.c."""
+    # Only care about the first 12 characters.
+    enc = orig_enc[:12].lower().replace("_", "-")
+    if enc == "utf-8" or enc.startswith("utf-8-"):
+        return "utf-8"
+    if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+       enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+        return "iso-8859-1"
+    return orig_enc
+
 def detect_encoding(readline):
     """
     The detect_encoding() function is used to detect the encoding that should
@@ -263,7 +274,7 @@
         matches = cookie_re.findall(line_string)
         if not matches:
             return None
-        encoding = matches[0]
+        encoding = _get_normal_name(matches[0])
         try:
             codec = lookup(encoding)
         except LookupError:
@@ -373,7 +384,7 @@
             column = 0
             while pos < max:                   # measure leading whitespace
                 if line[pos] == ' ': column = column + 1
-                elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
+                elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize
                 elif line[pos] == '\f': column = 0
                 else: break
                 pos = pos + 1

Modified: python/trunk/Lib/lib2to3/pytree.py
==============================================================================
--- python/trunk/Lib/lib2to3/pytree.py	(original)
+++ python/trunk/Lib/lib2to3/pytree.py	Mon Nov  2 19:12:12 2009
@@ -63,6 +63,8 @@
             return NotImplemented
         return self._eq(other)
 
+    __hash__ = None # For Py3 compatibility.
+
     def __ne__(self, other):
         """
         Compare two nodes for inequality.

Modified: python/trunk/Lib/lib2to3/tests/test_all_fixers.py
==============================================================================
--- python/trunk/Lib/lib2to3/tests/test_all_fixers.py	(original)
+++ python/trunk/Lib/lib2to3/tests/test_all_fixers.py	Mon Nov  2 19:12:12 2009
@@ -15,8 +15,7 @@
 
 class Test_all(support.TestCase):
     def setUp(self):
-        options = {"print_function" : False}
-        self.refactor = support.get_refactorer(options=options)
+        self.refactor = support.get_refactorer()
 
     def test_all_project_files(self):
         for filepath in support.all_project_files():

Modified: python/trunk/Lib/lib2to3/tests/test_fixers.py
==============================================================================
--- python/trunk/Lib/lib2to3/tests/test_fixers.py	(original)
+++ python/trunk/Lib/lib2to3/tests/test_fixers.py	Mon Nov  2 19:12:12 2009
@@ -339,6 +339,12 @@
         a = "from functools import reduce\nreduce(a, b, c)"
         self.check(b, a)
 
+    def test_bug_7253(self):
+        # fix_tuple_params was being bad and orphaning nodes in the tree.
+        b = "def x(arg): reduce(sum, [])"
+        a = "from functools import reduce\ndef x(arg): reduce(sum, [])"
+        self.check(b, a)
+
     def test_call_with_lambda(self):
         b = "reduce(lambda x, y: x + y, seq)"
         a = "from functools import reduce\nreduce(lambda x, y: x + y, seq)"
@@ -2834,6 +2840,11 @@
         a = """x = list(map(f, 'abc'))   #   foo"""
         self.check(b, a)
 
+    def test_None_with_multiple_arguments(self):
+        s = """x = map(None, a, b, c)"""
+        self.warns_unchanged(s, "cannot convert map(None, ...) with "
+                             "multiple arguments")
+
     def test_map_basic(self):
         b = """x = map(f, 'abc')"""
         a = """x = list(map(f, 'abc'))"""
@@ -2847,10 +2858,6 @@
         a = """x = list('abc')"""
         self.check(b, a)
 
-        b = """x = map(None, 'abc', 'def')"""
-        a = """x = list(map(None, 'abc', 'def'))"""
-        self.check(b, a)
-
         b = """x = map(lambda x: x+1, range(4))"""
         a = """x = [x+1 for x in range(4)]"""
         self.check(b, a)
@@ -3238,6 +3245,46 @@
             """
         self.check(b, a)
 
+        b = r"""
+            try:
+                m = list(s)
+                m.sort()
+            except: pass
+            """
+
+        a = r"""
+            try:
+                m = sorted(s)
+            except: pass
+            """
+        self.check(b, a)
+
+        b = r"""
+            try:
+                m = list(s)
+                # foo
+                m.sort()
+            except: pass
+            """
+
+        a = r"""
+            try:
+                m = sorted(s)
+                # foo
+            except: pass
+            """
+        self.check(b, a)
+
+        b = r"""
+            m = list(s)
+            # more comments
+            m.sort()"""
+
+        a = r"""
+            m = sorted(s)
+            # more comments"""
+        self.check(b, a)
+
     def test_sort_simple_expr(self):
         b = """
             v = t

Modified: python/trunk/Lib/lib2to3/tests/test_parser.py
==============================================================================
--- python/trunk/Lib/lib2to3/tests/test_parser.py	(original)
+++ python/trunk/Lib/lib2to3/tests/test_parser.py	Mon Nov  2 19:12:12 2009
@@ -161,6 +161,11 @@
             if diff(filepath, new):
                 self.fail("Idempotency failed: %s" % filepath)
 
+    def test_extended_unpacking(self):
+        driver.parse_string("a, *b, c = x\n")
+        driver.parse_string("[*a, b] = x\n")
+        driver.parse_string("(z, *y, w) = m\n")
+        driver.parse_string("for *z, m in d: pass\n")
 
 class TestLiterals(GrammarTest):
 


More information about the Python-checkins mailing list