[pypy-svn] r47351 - in pypy/dist/pypy: rpython/lltypesystem rpython/lltypesystem/test rpython/memory translator/c translator/c/src

arigo at codespeak.net arigo at codespeak.net
Tue Oct 9 16:07:42 CEST 2007


Author: arigo
Date: Tue Oct  9 16:07:40 2007
New Revision: 47351

Modified:
   pypy/dist/pypy/rpython/lltypesystem/llarena.py
   pypy/dist/pypy/rpython/lltypesystem/test/test_llarena.py
   pypy/dist/pypy/rpython/memory/lltypelayout.py
   pypy/dist/pypy/translator/c/primitive.py
   pypy/dist/pypy/translator/c/src/mem.h
Log:
Introduce llarena.round_up_for_allocation(size).


Modified: pypy/dist/pypy/rpython/lltypesystem/llarena.py
==============================================================================
--- pypy/dist/pypy/rpython/lltypesystem/llarena.py	(original)
+++ pypy/dist/pypy/rpython/lltypesystem/llarena.py	Tue Oct  9 16:07:40 2007
@@ -73,6 +73,8 @@
         Arena.object_arena_location[addr2.ptr._obj] = self, offset
         # common case: 'size' starts with a GCHeaderOffset.  In this case
         # we can also remember that the real object starts after the header.
+        while isinstance(size, RoundedUpForAllocation):
+            size = size.basesize
         if (isinstance(size, llmemory.CompositeOffset) and
             isinstance(size.offsets[0], llmemory.GCHeaderOffset)):
             objaddr = addr2 + size.offsets[0]
@@ -167,6 +169,27 @@
     def _cast_to_int(self):
         return self.arena._getid() + self.offset
 
+
+class RoundedUpForAllocation(llmemory.AddressOffset):
+    """A size that is rounded up in order to preserve alignment of objects
+    following it.  For arenas containing heterogenous objects.
+    """
+    def __init__(self, basesize):
+        assert isinstance(basesize, llmemory.AddressOffset)
+        self.basesize = basesize
+
+    def __repr__(self):
+        return '< RoundedUpForAllocation %r >' % (self.basesize,)
+
+    def ref(self, ptr):
+        return self.basesize.ref(ptr)
+
+    def _raw_malloc(self, rest, zero):
+        return self.basesize._raw_malloc(rest, zero=zero)
+
+    def raw_memcopy(self, srcadr, dstadr):
+        self.basesize.raw_memcopy(srcadr, dstadr)
+
 # ____________________________________________________________
 #
 # Public interface: arena_malloc(), arena_free(), arena_reset()
@@ -201,6 +224,11 @@
     assert isinstance(addr, fakearenaaddress)
     addr.arena.allocate_object(addr.offset, size)
 
+def round_up_for_allocation(size):
+    """Round up the size in order to preserve alignment of objects
+    following an object.  For arenas containing heterogenous objects."""
+    return RoundedUpForAllocation(size)
+
 # ____________________________________________________________
 #
 # Translation support: the functions above turn into the code below.
@@ -210,12 +238,14 @@
 import os, sys
 from pypy.rpython.lltypesystem import rffi, lltype
 from pypy.rpython.extfunc import register_external
+from pypy.rlib.objectmodel import debug_assert, CDefinedIntSymbolic
 
 if os.name == 'posix':
     READ_MAX = (sys.maxint//4) + 1    # upper bound on reads to avoid surprises
     os_read = rffi.llexternal('read',
                               [rffi.INT, llmemory.Address, rffi.SIZE_T],
-                              rffi.SIZE_T)
+                              rffi.SIZE_T,
+                              sandboxsafe=True)
 
     def clear_large_memory_chunk(baseaddr, size):
         # on Linux at least, reading from /dev/zero is the fastest way
@@ -277,3 +307,12 @@
                   llimpl=llimpl_arena_reserve,
                   llfakeimpl=arena_reserve,
                   sandboxsafe=True)
+
+llimpl_round_up_for_allocation = rffi.llexternal('ROUND_UP_FOR_ALLOCATION',
+                                                 [rffi.INT], rffi.INT,
+                                                 sandboxsafe=True)
+register_external(round_up_for_allocation, [int], int,
+                  'll_arena.round_up_for_allocation',
+                  llimpl=llimpl_round_up_for_allocation,
+                  llfakeimpl=round_up_for_allocation,
+                  sandboxsafe=True)

Modified: pypy/dist/pypy/rpython/lltypesystem/test/test_llarena.py
==============================================================================
--- pypy/dist/pypy/rpython/lltypesystem/test/test_llarena.py	(original)
+++ pypy/dist/pypy/rpython/lltypesystem/test/test_llarena.py	Tue Oct  9 16:07:40 2007
@@ -3,6 +3,7 @@
 from pypy.rpython.lltypesystem.llmemory import cast_adr_to_ptr
 from pypy.rpython.lltypesystem.llarena import arena_malloc, arena_reset
 from pypy.rpython.lltypesystem.llarena import arena_reserve, arena_free
+from pypy.rpython.lltypesystem.llarena import round_up_for_allocation
 from pypy.rpython.lltypesystem.llarena import ArenaError
 
 def test_arena():
@@ -107,19 +108,20 @@
 
 
 SX = lltype.Struct('S', ('x',lltype.Signed))
+precomputed_size = round_up_for_allocation(llmemory.sizeof(SX))
 
 def test_look_inside_object():
     SPTR = lltype.Ptr(SX)
     myarenasize = 50
     a = arena_malloc(myarenasize, False)
     b = a + 4
-    arena_reserve(b, llmemory.sizeof(SX))
+    arena_reserve(b, precomputed_size)
     (b + llmemory.offsetof(SX, 'x')).signed[0] = 123
     assert llmemory.cast_adr_to_ptr(b, SPTR).x == 123
     llmemory.cast_adr_to_ptr(b, SPTR).x += 1
     assert (b + llmemory.offsetof(SX, 'x')).signed[0] == 124
     arena_reset(a, myarenasize, True)
-    arena_reserve(b, llmemory.sizeof(SX))
+    arena_reserve(b, round_up_for_allocation(llmemory.sizeof(SX)))
     assert llmemory.cast_adr_to_ptr(b, SPTR).x == 0
     arena_free(a)
     return 42

Modified: pypy/dist/pypy/rpython/memory/lltypelayout.py
==============================================================================
--- pypy/dist/pypy/rpython/memory/lltypelayout.py	(original)
+++ pypy/dist/pypy/rpython/memory/lltypelayout.py	Tue Oct  9 16:07:40 2007
@@ -1,4 +1,4 @@
-from pypy.rpython.lltypesystem import lltype, llmemory
+from pypy.rpython.lltypesystem import lltype, llmemory, llarena
 
 import struct
 
@@ -106,5 +106,8 @@
         return sizeof(offset.gcheaderbuilder.HDR)
     elif isinstance(offset, llmemory.ArrayLengthOffset):
         return 0
+    elif isinstance(offset, llarena.RoundedUpForAllocation):
+        basesize = convert_offset_to_int(offset.basesize)
+        return (basesize + 7) & ~ 7
     else:
         raise Exception("unknown offset type %r"%offset)

Modified: pypy/dist/pypy/translator/c/primitive.py
==============================================================================
--- pypy/dist/pypy/translator/c/primitive.py	(original)
+++ pypy/dist/pypy/translator/c/primitive.py	Tue Oct  9 16:07:40 2007
@@ -8,6 +8,7 @@
      AddressOffset, ItemOffset, ArrayItemsOffset, FieldOffset, \
      CompositeOffset, ArrayLengthOffset, \
      GCHeaderOffset
+from pypy.rpython.lltypesystem.llarena import RoundedUpForAllocation
 from pypy.translator.c.support import cdecl, barebonearray
 
 # ____________________________________________________________
@@ -48,6 +49,9 @@
             return '0'
         elif type(value) == GCHeaderOffset:
             return '0'
+        elif type(value) == RoundedUpForAllocation:
+            return 'ROUND_UP_FOR_ALLOCATION(%s)' % (
+                name_signed(value.basesize, db))
         elif isinstance(value, CDefinedIntSymbolic):
             return str(value.expr)
         elif isinstance(value, ComputedIntSymbolic):

Modified: pypy/dist/pypy/translator/c/src/mem.h
==============================================================================
--- pypy/dist/pypy/translator/c/src/mem.h	(original)
+++ pypy/dist/pypy/translator/c/src/mem.h	Tue Oct  9 16:07:40 2007
@@ -2,6 +2,13 @@
 /************************************************************/
  /***  C header subsection: operations on LowLevelTypes    ***/
 
+/* alignment for arena-based garbage collectors: the following line
+   enforces an alignment of sizeof(double). */
+#define MEMORY_ALIGNMENT		sizeof(double)
+#define ROUND_UP_FOR_ALLOCATION(x)	\
+		(((x) + (MEMORY_ALIGNMENT-1)) & ~(MEMORY_ALIGNMENT-1))
+
+
 #define RAW_MALLOC_ZERO_FILLED 0
 
 #if RAW_MALLOC_ZERO_FILLED



More information about the Pypy-commit mailing list