[pypy-svn] r78454 - pypy/branch/fast-forward/lib-python/modified-2.7.0/test

afa at codespeak.net afa at codespeak.net
Fri Oct 29 11:51:42 CEST 2010


Author: afa
Date: Fri Oct 29 11:51:41 2010
New Revision: 78454

Added:
   pypy/branch/fast-forward/lib-python/modified-2.7.0/test/test_zlib.py
      - copied, changed from r78441, pypy/branch/fast-forward/lib-python/2.7.0/test/test_zlib.py
Log:
Use os.urandom() instead of complex stuff.
This allows the test to run without timing out...


Copied: pypy/branch/fast-forward/lib-python/modified-2.7.0/test/test_zlib.py (from r78441, pypy/branch/fast-forward/lib-python/2.7.0/test/test_zlib.py)
==============================================================================
--- pypy/branch/fast-forward/lib-python/2.7.0/test/test_zlib.py	(original)
+++ pypy/branch/fast-forward/lib-python/modified-2.7.0/test/test_zlib.py	Fri Oct 29 11:51:41 2010
@@ -1,6 +1,7 @@
 import unittest
 from test import test_support
 import binascii
+import os
 import random
 from test.test_support import precisionbigmemtest, _1G
 
@@ -93,14 +94,7 @@
 
 class BaseCompressTestCase(object):
     def check_big_compress_buffer(self, size, compress_func):
-        _1M = 1024 * 1024
-        fmt = "%%0%dx" % (2 * _1M)
-        # Generate 10MB worth of random, and expand it by repeating it.
-        # The assumption is that zlib's memory is not big enough to exploit
-        # such spread out redundancy.
-        data = ''.join([binascii.a2b_hex(fmt % random.getrandbits(8 * _1M))
-                        for i in range(10)])
-        data = data * (size // len(data) + 1)
+        data = os.urandom(size)
         try:
             compress_func(data)
         finally:



More information about the Pypy-commit mailing list