[Python-checkins] python/dist/src/Lib/test tokenize_tests.txt,NONE,1.1 test_tokenize.py,1.8,1.9 tokenize_tests.py,1.7,NONE

tim_one@users.sourceforge.net tim_one@users.sourceforge.net
Mon, 12 May 2003 12:42:06 -0700


Update of /cvsroot/python/python/dist/src/Lib/test
In directory sc8-pr-cvs1:/tmp/cvs-serv13959/Lib/test

Modified Files:
	test_tokenize.py 
Added Files:
	tokenize_tests.txt 
Removed Files:
	tokenize_tests.py 
Log Message:
Effectively renamed tokenize_tests.py to have a txt extension instead.
This file isn't meant to be executed, it's data input for test_tokenize.py.
The problem with the .py extension is that it uses "non-standard"
indentation, and it's good to test that, but reindent.py keeps wanting
to fix it.  But fixing the indentation causes the expected-output file to
change, since exact line and column numbers are part of the
tokenize.tokenize() output getting tested.


--- NEW FILE: tokenize_tests.txt ---
# Tests for the 'tokenize' module.
# Large bits stolen from test_grammar.py. 

# Comments
"#"
#'
#"
#\
       #
    # abc
'''#
#'''

x = 1  #

# Balancing continuation

a = (3, 4,
  5, 6)
y = [3, 4,
  5]
z = {'a':5,
  'b':6}
x = (len(`y`) + 5*x - a[
   3 ]
   - x + len({
   }
    )
  )

# Backslash means line continuation:
x = 1 \
+ 1

# Backslash does not means continuation in comments :\
x = 0

# Ordinary integers
0xff <> 255
0377 <> 255
2147483647   != 017777777777
-2147483647-1 != 020000000000
037777777777 != -1
0xffffffff != -1

# Long integers
x = 0L
x = 0l
x = 0xffffffffffffffffL
x = 0xffffffffffffffffl
x = 077777777777777777L
x = 077777777777777777l
x = 123456789012345678901234567890L
x = 123456789012345678901234567890l

# Floating-point numbers
x = 3.14
x = 314.
x = 0.314
# XXX x = 000.314
x = .314
x = 3e14
x = 3E14
x = 3e-14
x = 3e+14
x = 3.e14
x = .3e14
x = 3.1e4

# String literals
x = ''; y = "";
x = '\''; y = "'";
x = '"'; y = "\"";
x = "doesn't \"shrink\" does it"
y = 'doesn\'t "shrink" does it'
x = "does \"shrink\" doesn't it"
y = 'does "shrink" doesn\'t it'
x = """
The "quick"
brown fox
jumps over
the 'lazy' dog.
"""
y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n'
y = '''
The "quick"
brown fox
jumps over
the 'lazy' dog.
''';
y = "\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the 'lazy' dog.\n\
";
y = '\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the \'lazy\' dog.\n\
';
x = r'\\' + R'\\'
x = r'\'' + ''
y = r'''
foo bar \\
baz''' + R'''
foo'''
y = r"""foo
bar \\ baz
""" + R'''spam
'''
x = u'abc' + U'ABC'
y = u"abc" + U"ABC"
x = ur'abc' + Ur'ABC' + uR'ABC' + UR'ABC'
y = ur"abc" + Ur"ABC" + uR"ABC" + UR"ABC"
x = ur'\\' + UR'\\'
x = ur'\'' + ''
y = ur'''
foo bar \\
baz''' + UR'''
foo'''
y = Ur"""foo
bar \\ baz
""" + uR'''spam
'''

# Indentation
if 1:
    x = 2
if 1:
        x = 2
if 1:
    while 0:
     if 0:
           x = 2
     x = 2
if 0:
  if 2:
   while 0:
        if 1:
          x = 2

# Operators

def d22(a, b, c=1, d=2): pass
def d01v(a=1, *restt, **restd): pass

(x, y) <> ({'a':1}, {'b':2})

# comparison
if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass

# binary
x = 1 & 1
x = 1 ^ 1
x = 1 | 1

# shift
x = 1 << 1 >> 1

# additive
x = 1 - 1 + 1 - 1 + 1

# multiplicative
x = 1 / 1 * 1 % 1

# unary
x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
x = -1*1/1 + 1*1 - ---1*1

# selector
import sys, time
x = sys.modules['time'].time()


Index: test_tokenize.py
===================================================================
RCS file: /cvsroot/python/python/dist/src/Lib/test/test_tokenize.py,v
retrieving revision 1.8
retrieving revision 1.9
diff -C2 -d -r1.8 -r1.9
*** test_tokenize.py	12 May 2003 19:29:36 -0000	1.8
--- test_tokenize.py	12 May 2003 19:42:04 -0000	1.9
***************
*** 5,9 ****
      print 'starting...'
  
! f = file(findfile('tokenize_tests'+os.extsep+'py'))
  tokenize.tokenize(f.readline)
  f.close()
--- 5,9 ----
      print 'starting...'
  
! f = file(findfile('tokenize_tests' + os.extsep + 'txt'))
  tokenize.tokenize(f.readline)
  f.close()

--- tokenize_tests.py DELETED ---