[pypy-svn] r16624 - pypy/dist/pypy/interpreter/pyparser/test

arigo at codespeak.net arigo at codespeak.net
Fri Aug 26 14:52:12 CEST 2005


Author: arigo
Date: Fri Aug 26 14:52:09 2005
New Revision: 16624

Modified:
   pypy/dist/pypy/interpreter/pyparser/test/test_pytokenizer.py
Log:
Fixed a test for the previous change in the tokenizer.


Modified: pypy/dist/pypy/interpreter/pyparser/test/test_pytokenizer.py
==============================================================================
--- pypy/dist/pypy/interpreter/pyparser/test/test_pytokenizer.py	(original)
+++ pypy/dist/pypy/interpreter/pyparser/test/test_pytokenizer.py	Fri Aug 26 14:52:09 2005
@@ -26,7 +26,7 @@
     '+=', '>>=', '=', '&=', '/=', '-=', ',', '^',
     '>>', '&', '+', '*', '-', '/', '.', '**',
     '%', '<<', '//', '|', ')', '(', ';', ':',
-    # '@', # XXX This one is skipped for now (?!)
+    '@',
     '[', ']', '`', '{', '}',
     ]
 
@@ -71,10 +71,16 @@
 def test_punct():
     """make sure each punctuation is correctly parsed"""
     for pstr in PUNCTS:
+        if   pstr == ')': prefix = '('
+        elif pstr == ']': prefix = '['
+        elif pstr == '}': prefix = '{'
+        else:             prefix = ''
         try:
-            tokens = parse_source(pstr)
+            tokens = parse_source(prefix+pstr)
         except TokenError, error:
             tokens = [tok for tok, _, _, _ in error.token_stack]
+        if prefix:
+            tokens.pop(0)
         assert tokens[0].codename == tok_punct[pstr]
 
 



More information about the Pypy-commit mailing list