[issue11277] Crash with mmap and sparse files on Mac OS X

Steffen Daode Nurpmeso report at bugs.python.org
Fri May 6 17:33:08 CEST 2011


Steffen Daode Nurpmeso <sdaoden at googlemail.com> added the comment:

On Fri,  6 May 2011 02:54:07 +0200, Nadeem Vawda wrote:
> I think so. [.]
> it turns out that the OS X sparsefile crash is also covered by
> LargeMmapTests.test_large_offset() in test_mmap [!!!]. [.]

So i followed your suggestion and did not do something on zlib no
more.  Even if that means that there is no test which checksums an
entire superlarge mmap() region.
Instead i've changed/added test cases in test_mmap.py:

- Removed all context-manager usage from LargeMmapTests().
  This feature has been introduced in 3.2 and is already tested
  elsewhere.  Like this the test is almost identical on 2.7 and 3.x.
- I've dropped _working_largefile().  This creates a useless large
  file only to unlink it directly.  Instead the necessary try:catch:
  is done directly in the tests.
- (Directly testing after .flush() without reopening the file.)
- These new tests don't run on 32 bit.

May the juice be with you

----------
Added file: http://bugs.python.org/file21909/11277-test_mmap.1.py
Added file: http://bugs.python.org/file21910/11277-test_mmap-27.1.py

_______________________________________
Python tracker <report at bugs.python.org>
<http://bugs.python.org/issue11277>
_______________________________________
-------------- next part --------------
diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py
--- a/Lib/test/test_mmap.py
+++ b/Lib/test/test_mmap.py
@@ -1,4 +1,5 @@
-from test.support import TESTFN, run_unittest, import_module, unlink, requires
+from test.support import TESTFN, run_unittest, import_module, unlink
+from test.support import requires, _4G
 import unittest
 import os
 import re
@@ -662,44 +663,87 @@
     def tearDown(self):
         unlink(TESTFN)
 
-    def _working_largefile(self):
-        # Only run if the current filesystem supports large files.
-        f = open(TESTFN, 'wb', buffering=0)
+    def _test_splice(self, f, i):
+        # Test splicing with pages around "critical" values in respect to
+        # memory management
+        # Issue 11277: does mmap() force materialization of backing store?
+        m = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
         try:
-            f.seek(0x80000001)
-            f.write(b'x')
-            f.flush()
-        except (IOError, OverflowError):
-            raise unittest.SkipTest("filesystem does not have largefile support")
+            # Memory page before xy
+            self.assertEqual(m[i+0:i+2], b'  ')
+            # Memory page after xy
+            self.assertEqual(m[i+10:i+12], b'  ')
+            # Cross pages
+            self.assertEqual(m[i+2:i+10], b'DEARdear')
         finally:
-            f.close()
-            unlink(TESTFN)
+            m.close()
 
-    def test_large_offset(self):
+    def _test_subscr(self, f, idx, expect):
+        # Test subscript for critical values like INT32_MAX, UINT32_MAX
+        m = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
+        try:
+            self.assertEqual(m[idx], expect)
+        finally:
+            m.close()
+
+    @unittest.skipUnless(sys.maxsize > _4G, "Can't run on a 32-bit system.")
+    def test_around_32bit_sbitflip(self):
+        start = 0x7FFFFFFA
         if sys.platform[:3] == 'win' or sys.platform == 'darwin':
             requires('largefile',
-                'test requires %s bytes and a long time to run' % str(0x180000000))
-        self._working_largefile()
-        with open(TESTFN, 'wb') as f:
-            f.seek(0x14FFFFFFF)
-            f.write(b" ")
+                     'test requires %s bytes and a long time to run' %
+                     str(start+12))
+        with open(TESTFN, 'w+b') as f:
+            try:
+                f.seek(start)
+                f.write(b'  DEARdear  ')
+                f.flush()
+            except (IOError, OverflowError):
+                raise unittest.SkipTest('filesystem does not have largefile '
+                                        'support')
+            self._test_splice(f, start)
+            self._test_subscr(f, start+len(b'  DEA'), ord(b'R'))
+            self._test_subscr(f, start+len(b'  DEARdea'), ord(b'r'))
+        unlink(TESTFN)
 
-        with open(TESTFN, 'rb') as f:
-            with mmap.mmap(f.fileno(), 0, offset=0x140000000, access=mmap.ACCESS_READ) as m:
-                self.assertEqual(m[0xFFFFFFF], 32)
+    @unittest.skipUnless(sys.maxsize > _4G, "Can't run on a 32-bit system.")
+    def test_around_32bit_excess(self):
+        start = 0xFFFFFFFA
+        if sys.platform[:3] == 'win' or sys.platform == 'darwin':
+            requires('largefile',
+                     'test requires %s bytes and a long time to run' %
+                     str(start+12))
+        with open(TESTFN, 'w+b') as f:
+            try:
+                f.seek(start)
+                f.write(b'  DEARdear  ')
+                f.flush()
+            except (IOError, OverflowError):
+                raise unittest.SkipTest('filesystem does not have largefile '
+                                        'support')
+            self._test_splice(f, start)
+            self._test_subscr(f, start+len(b'  DEA'), ord(b'R'))
+            self._test_subscr(f, start+len(b'  DEARdea'), ord(b'r'))
+        unlink(TESTFN)
 
     def test_large_filesize(self):
         if sys.platform[:3] == 'win' or sys.platform == 'darwin':
             requires('largefile',
                 'test requires %s bytes and a long time to run' % str(0x180000000))
-        self._working_largefile()
         with open(TESTFN, 'wb') as f:
-            f.seek(0x17FFFFFFF)
-            f.write(b" ")
+            try:
+                f.seek(0x17FFFFFFF)
+                f.write(b' ')
+            except (IOError, OverflowError):
+                raise unittest.SkipTest('filesystem does not have largefile '
+                                        'support')
 
         with open(TESTFN, 'rb') as f:
-            with mmap.mmap(f.fileno(), 0x10000, access=mmap.ACCESS_READ) as m:
+            m = mmap.mmap(f.fileno(), 0x10000, access=mmap.ACCESS_READ)
+            try:
                 self.assertEqual(m.size(), 0x180000000)
+            finally:
+                m.close()
 
 
 def test_main():
-------------- next part --------------
diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py
--- a/Lib/test/test_mmap.py
+++ b/Lib/test/test_mmap.py
@@ -1,4 +1,5 @@
-from test.test_support import TESTFN, run_unittest, import_module, unlink, requires
+from test.test_support import TESTFN, run_unittest, import_module, unlink
+from test.test_support import requires, _4G
 import unittest
 import os, re, itertools, socket, sys
 
@@ -644,43 +645,80 @@
     def tearDown(self):
         unlink(TESTFN)
 
-    def _working_largefile(self):
-        # Only run if the current filesystem supports large files.
-        f = open(TESTFN, 'wb', buffering=0)
+    def _test_splice(self, f, i):
+        # Test splicing with pages around "critical" values in respect to
+        # memory management
+        # Issue 11277: does mmap() force materialization of backing store?
+        m = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
         try:
-            f.seek(0x80000001)
-            f.write(b'x')
-            f.flush()
-        except (IOError, OverflowError):
-            raise unittest.SkipTest("filesystem does not have largefile support")
+            # Memory page before xy
+            self.assertEqual(m[i+0:i+2], b'  ')
+            # Memory page after xy
+            self.assertEqual(m[i+10:i+12], b'  ')
+            # Cross pages
+            self.assertEqual(m[i+2:i+10], b'DEARdear')
         finally:
-            f.close()
-            unlink(TESTFN)
+            m.close()
 
-    def test_large_offset(self):
+    def _test_subscr(self, f, idx, expect):
+        # Test subscript for critical values like INT32_MAX, UINT32_MAX
+        m = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
+        try:
+            self.assertEqual(m[idx], expect)
+        finally:
+            m.close()
+
+    @unittest.skipUnless(sys.maxsize > _4G, "Can't run on a 32-bit system.")
+    def test_around_32bit_sbitflip(self):
+        start = 0x7FFFFFFA
         if sys.platform[:3] == 'win' or sys.platform == 'darwin':
             requires('largefile',
-                'test requires %s bytes and a long time to run' % str(0x180000000))
-        self._working_largefile()
-        with open(TESTFN, 'wb') as f:
-            f.seek(0x14FFFFFFF)
-            f.write(b" ")
+                     'test requires %s bytes and a long time to run' %
+                     str(start+12))
+        with open(TESTFN, 'w+b') as f:
+            try:
+                f.seek(start)
+                f.write(b'  DEARdear  ')
+                f.flush()
+            except (IOError, OverflowError):
+                raise unittest.SkipTest('filesystem does not have largefile '
+                                        'support')
+            self._test_splice(f, start)
+            self._test_subscr(f, start+len(b'  DEA'), b'R')
+            self._test_subscr(f, start+len(b'  DEARdea'), b'r')
+        unlink(TESTFN)
 
-        with open(TESTFN, 'rb') as f:
-            m = mmap.mmap(f.fileno(), 0, offset=0x140000000, access=mmap.ACCESS_READ)
+    @unittest.skipUnless(sys.maxsize > _4G, "Can't run on a 32-bit system.")
+    def test_around_32bit_excess(self):
+        start = 0xFFFFFFFA
+        if sys.platform[:3] == 'win' or sys.platform == 'darwin':
+            requires('largefile',
+                     'test requires %s bytes and a long time to run' %
+                     str(start+12))
+        with open(TESTFN, 'w+b') as f:
             try:
-                self.assertEqual(m[0xFFFFFFF], b" ")
-            finally:
-                m.close()
+                f.seek(start)
+                f.write(b'  DEARdear  ')
+                f.flush()
+            except (IOError, OverflowError):
+                raise unittest.SkipTest('filesystem does not have largefile '
+                                        'support')
+            self._test_splice(f, start)
+            self._test_subscr(f, start+len(b'  DEA'), b'R')
+            self._test_subscr(f, start+len(b'  DEARdea'), b'r')
+        unlink(TESTFN)
 
     def test_large_filesize(self):
         if sys.platform[:3] == 'win' or sys.platform == 'darwin':
             requires('largefile',
                 'test requires %s bytes and a long time to run' % str(0x180000000))
-        self._working_largefile()
         with open(TESTFN, 'wb') as f:
-            f.seek(0x17FFFFFFF)
-            f.write(b" ")
+            try:
+                f.seek(0x17FFFFFFF)
+                f.write(b' ')
+            except (IOError, OverflowError):
+                raise unittest.SkipTest('filesystem does not have largefile '
+                                        'support')
 
         with open(TESTFN, 'rb') as f:
             m = mmap.mmap(f.fileno(), 0x10000, access=mmap.ACCESS_READ)


More information about the Python-bugs-list mailing list