[pypy-commit] pypy default: hg merge
mjacob
noreply at buildbot.pypy.org
Fri Oct 24 16:17:32 CEST 2014
Author: Manuel Jacob <me at manueljacob.de>
Branch:
Changeset: r74178:3f5db34f528d
Date: 2014-10-24 16:19 +0200
http://bitbucket.org/pypy/pypy/changeset/3f5db34f528d/
Log: hg merge
diff too long, truncating to 2000 out of 3530 lines
diff --git a/pypy/doc/release-2.3.1.rst b/pypy/doc/release-2.3.1.rst
--- a/pypy/doc/release-2.3.1.rst
+++ b/pypy/doc/release-2.3.1.rst
@@ -18,14 +18,11 @@
Please consider donating more, or even better convince your employer to donate,
so we can finish those projects! The three sub-projects are:
-* `Py3k`_ (supporting Python 3.x): the release PyPy3 2.3 is imminent.
-
* `STM`_ (software transactional memory): a preview will be released very soon,
once we fix a few bugs
* `NumPy`_ which requires installation of our fork of upstream numpy, available `on bitbucket`_
-.. _`Py3k`: http://pypy.org/py3donate.html
.. _`STM`: http://pypy.org/tmdonate2.html
.. _`NumPy`: http://pypy.org/numpydonate.html
.. _`on bitbucket`: https://www.bitbucket.org/pypy/numpy
diff --git a/pypy/module/_multibytecodec/c_codecs.py b/pypy/module/_multibytecodec/c_codecs.py
--- a/pypy/module/_multibytecodec/c_codecs.py
+++ b/pypy/module/_multibytecodec/c_codecs.py
@@ -115,8 +115,7 @@
def decodeex(decodebuf, stringdata, errors="strict", errorcb=None, namecb=None,
ignore_error=0):
inleft = len(stringdata)
- inbuf = rffi.get_nonmovingbuffer(stringdata)
- try:
+ with rffi.scoped_nonmovingbuffer(stringdata) as inbuf:
if pypy_cjk_dec_init(decodebuf, inbuf, inleft) < 0:
raise MemoryError
while True:
@@ -128,9 +127,6 @@
src = pypy_cjk_dec_outbuf(decodebuf)
length = pypy_cjk_dec_outlen(decodebuf)
return rffi.wcharpsize2unicode(src, length)
- #
- finally:
- rffi.free_nonmovingbuffer(stringdata, inbuf)
def multibytecodec_decerror(decodebuf, e, errors,
errorcb, namecb, stringdata):
@@ -159,11 +155,8 @@
assert errorcb
replace, end = errorcb(errors, namecb, reason,
stringdata, start, end)
- inbuf = rffi.get_nonmoving_unicodebuffer(replace)
- try:
+ with rffi.scoped_nonmoving_unicodebuffer(replace) as inbuf:
r = pypy_cjk_dec_replace_on_error(decodebuf, inbuf, len(replace), end)
- finally:
- rffi.free_nonmoving_unicodebuffer(replace, inbuf)
if r == MBERR_NOMEMORY:
raise MemoryError
@@ -210,8 +203,7 @@
def encodeex(encodebuf, unicodedata, errors="strict", errorcb=None,
namecb=None, ignore_error=0):
inleft = len(unicodedata)
- inbuf = rffi.get_nonmoving_unicodebuffer(unicodedata)
- try:
+ with rffi.scoped_nonmoving_unicodebuffer(unicodedata) as inbuf:
if pypy_cjk_enc_init(encodebuf, inbuf, inleft) < 0:
raise MemoryError
if ignore_error == 0:
@@ -233,9 +225,6 @@
src = pypy_cjk_enc_outbuf(encodebuf)
length = pypy_cjk_enc_outlen(encodebuf)
return rffi.charpsize2str(src, length)
- #
- finally:
- rffi.free_nonmoving_unicodebuffer(unicodedata, inbuf)
def multibytecodec_encerror(encodebuf, e, errors,
errorcb, namecb, unicodedata):
@@ -275,10 +264,7 @@
assert retu is not None
codec = pypy_cjk_enc_getcodec(encodebuf)
replace = encode(codec, retu, "strict", errorcb, namecb)
- inbuf = rffi.get_nonmovingbuffer(replace)
- try:
+ with rffi.scoped_nonmovingbuffer(replace) as inbuf:
r = pypy_cjk_enc_replace_on_error(encodebuf, inbuf, len(replace), end)
- finally:
- rffi.free_nonmovingbuffer(replace, inbuf)
if r == MBERR_NOMEMORY:
raise MemoryError
diff --git a/pypy/module/bz2/interp_bz2.py b/pypy/module/bz2/interp_bz2.py
--- a/pypy/module/bz2/interp_bz2.py
+++ b/pypy/module/bz2/interp_bz2.py
@@ -195,7 +195,7 @@
self._allocate_chunk(initial_size)
def _allocate_chunk(self, size):
- self.raw_buf, self.gc_buf = rffi.alloc_buffer(size)
+ self.raw_buf, self.gc_buf, self.case_num = rffi.alloc_buffer(size)
self.current_size = size
self.bzs.c_next_out = self.raw_buf
rffi.setintfield(self.bzs, 'c_avail_out', size)
@@ -204,8 +204,10 @@
assert 0 <= chunksize <= self.current_size
raw_buf = self.raw_buf
gc_buf = self.gc_buf
- s = rffi.str_from_buffer(raw_buf, gc_buf, self.current_size, chunksize)
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
+ case_num = self.case_num
+ s = rffi.str_from_buffer(raw_buf, gc_buf, case_num,
+ self.current_size, chunksize)
+ rffi.keep_buffer_alive_until_here(raw_buf, gc_buf, case_num)
self.current_size = 0
return s
@@ -225,7 +227,8 @@
def free(self):
if self.current_size > 0:
- rffi.keep_buffer_alive_until_here(self.raw_buf, self.gc_buf)
+ rffi.keep_buffer_alive_until_here(self.raw_buf, self.gc_buf,
+ self.case_num)
def __enter__(self):
return self
diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py
--- a/pypy/module/cpyext/object.py
+++ b/pypy/module/cpyext/object.py
@@ -446,11 +446,8 @@
count = space.len_w(w_str)
data = space.str_w(w_str)
- buf = rffi.get_nonmovingbuffer(data)
- try:
+ with rffi.scoped_nonmovingbuffer(data) as buf:
fwrite(buf, 1, count, fp)
- finally:
- rffi.free_nonmovingbuffer(data, buf)
return 0
diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py
--- a/pypy/module/cpyext/test/test_unicodeobject.py
+++ b/pypy/module/cpyext/test/test_unicodeobject.py
@@ -326,13 +326,6 @@
self.raises(space, api, TypeError, api.PyUnicode_FromEncodedObject, space.wrap(u_text), null_charp, None)
rffi.free_charp(b_text)
- def test_leak(self):
- size = 50
- raw_buf, gc_buf = rffi.alloc_buffer(size)
- for i in range(size): raw_buf[i] = 'a'
- str = rffi.str_from_buffer(raw_buf, gc_buf, size, size)
- rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
-
def test_mbcs(self, space, api):
if sys.platform != 'win32':
py.test.skip("mcbs encoding only exists on Windows")
diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py
--- a/pypy/tool/release/package.py
+++ b/pypy/tool/release/package.py
@@ -160,6 +160,9 @@
if sys.platform == 'win32' and not rename_pypy_c.lower().endswith('.exe'):
rename_pypy_c += '.exe'
binaries = [(pypy_c, rename_pypy_c)]
+ libpypy_c = basedir.join('pypy', 'goal', 'libpypy-c.so')
+ if libpypy_c.check():
+ binaries.append('libpypy-c.so')
#
builddir = options.builddir
pypydir = builddir.ensure(name, dir=True)
@@ -212,6 +215,7 @@
directory next to the dlls, as per build instructions."""
import traceback;traceback.print_exc()
raise MissingDependenciesError('Tk runtime')
+
# Careful: to copy lib_pypy, copying just the hg-tracked files
# would not be enough: there are also ctypes_config_cache/_*_cache.py.
diff --git a/rpython/jit/backend/llsupport/gc.py b/rpython/jit/backend/llsupport/gc.py
--- a/rpython/jit/backend/llsupport/gc.py
+++ b/rpython/jit/backend/llsupport/gc.py
@@ -9,8 +9,8 @@
from rpython.rtyper.annlowlevel import llhelper, cast_instance_to_gcref
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.jit.codewriter import heaptracker
-from rpython.jit.metainterp.history import ConstPtr, AbstractDescr
-from rpython.jit.metainterp.resoperation import rop
+from rpython.jit.metainterp.history import ConstPtr, AbstractDescr, BoxPtr, ConstInt
+from rpython.jit.metainterp.resoperation import rop, ResOperation
from rpython.jit.backend.llsupport import symbolic, jitframe
from rpython.jit.backend.llsupport.symbolic import WORD
from rpython.jit.backend.llsupport.descr import SizeDescr, ArrayDescr
@@ -21,6 +21,37 @@
from rpython.memory.gctransform import asmgcroot
from rpython.jit.codewriter.effectinfo import EffectInfo
+class MovableObjectTracker(object):
+
+ ptr_array_type = lltype.GcArray(llmemory.GCREF)
+
+ def __init__(self, cpu, const_pointers):
+ size = len(const_pointers)
+ # check that there are any moving object (i.e. chaning pointers).
+ # Otherwise there is no reason for an instance of this class.
+ assert size > 0
+ #
+ # prepare GC array to hold the pointers that may change
+ self.ptr_array = lltype.malloc(MovableObjectTracker.ptr_array_type, size)
+ self.ptr_array_descr = cpu.arraydescrof(MovableObjectTracker.ptr_array_type)
+ self.ptr_array_gcref = lltype.cast_opaque_ptr(llmemory.GCREF, self.ptr_array)
+ # use always the same ConstPtr to access the array
+ # (easer to read JIT trace)
+ self.const_ptr_gcref_array = ConstPtr(self.ptr_array_gcref)
+ #
+ # assign each pointer an index and put the pointer into the GC array.
+ # as pointers and addresses are not a good key to use before translation
+ # ConstPtrs are used as the key for the dict.
+ self._indexes = {}
+ for index in range(size):
+ ptr = const_pointers[index]
+ self._indexes[ptr] = index
+ self.ptr_array[index] = ptr.value
+
+ def get_array_index(self, const_ptr):
+ index = self._indexes[const_ptr]
+ assert const_ptr.value == self.ptr_array[index]
+ return index
# ____________________________________________________________
class GcLLDescription(GcCache):
@@ -97,25 +128,91 @@
def gc_malloc_unicode(self, num_elem):
return self._bh_malloc_array(num_elem, self.unicode_descr)
- def _record_constptrs(self, op, gcrefs_output_list):
+ def _record_constptrs(self, op, gcrefs_output_list, ops_with_movable_const_ptr,
+ changeable_const_pointers):
+ ops_with_movable_const_ptr[op] = []
for i in range(op.numargs()):
v = op.getarg(i)
if isinstance(v, ConstPtr) and bool(v.value):
p = v.value
- rgc._make_sure_does_not_move(p)
- gcrefs_output_list.append(p)
+ if rgc._make_sure_does_not_move(p):
+ gcrefs_output_list.append(p)
+ else:
+ ops_with_movable_const_ptr[op].append(i)
+ if v not in changeable_const_pointers:
+ changeable_const_pointers.append(v)
+ #
if op.is_guard() or op.getopnum() == rop.FINISH:
llref = cast_instance_to_gcref(op.getdescr())
- rgc._make_sure_does_not_move(llref)
+ assert rgc._make_sure_does_not_move(llref)
gcrefs_output_list.append(llref)
+ #
+ if len(ops_with_movable_const_ptr[op]) == 0:
+ del ops_with_movable_const_ptr[op]
+
+ def _rewrite_changeable_constptrs(self, op, ops_with_movable_const_ptr, moving_obj_tracker):
+ newops = []
+ for arg_i in ops_with_movable_const_ptr[op]:
+ v = op.getarg(arg_i)
+ # assert to make sure we got what we expected
+ assert isinstance(v, ConstPtr)
+ result_ptr = BoxPtr()
+ array_index = moving_obj_tracker.get_array_index(v)
+ load_op = ResOperation(rop.GETARRAYITEM_GC,
+ [moving_obj_tracker.const_ptr_gcref_array,
+ ConstInt(array_index)],
+ result_ptr,
+ descr=moving_obj_tracker.ptr_array_descr)
+ newops.append(load_op)
+ op.setarg(arg_i, result_ptr)
+ #
+ newops.append(op)
+ return newops
def rewrite_assembler(self, cpu, operations, gcrefs_output_list):
rewriter = GcRewriterAssembler(self, cpu)
newops = rewriter.rewrite(operations)
- # record all GCREFs, because the GC (or Boehm) cannot see them and
- # keep them alive if they end up as constants in the assembler
+
+ # the key is an operation that contains a ConstPtr as an argument and
+ # this ConstPtrs pointer might change as it points to an object that
+ # can't be made non-moving (e.g. the object is pinned).
+ ops_with_movable_const_ptr = {}
+ #
+ # a list of such not really constant ConstPtrs.
+ changeable_const_pointers = []
for op in newops:
- self._record_constptrs(op, gcrefs_output_list)
+ # record all GCREFs, because the GC (or Boehm) cannot see them and
+ # keep them alive if they end up as constants in the assembler.
+ # If such a GCREF can change and we can't make the object it points
+ # to non-movable, we have to handle it seperatly. Such GCREF's are
+ # returned as ConstPtrs in 'changeable_const_pointers' and the
+ # affected operation is returned in 'op_with_movable_const_ptr'.
+ # For this special case see 'rewrite_changeable_constptrs'.
+ self._record_constptrs(op, gcrefs_output_list,
+ ops_with_movable_const_ptr, changeable_const_pointers)
+ #
+ # handle pointers that are not guaranteed to stay the same
+ if len(ops_with_movable_const_ptr) > 0:
+ moving_obj_tracker = MovableObjectTracker(cpu, changeable_const_pointers)
+ #
+ if not we_are_translated():
+ # used for testing
+ self.last_moving_obj_tracker = moving_obj_tracker
+ # make sure the array containing the pointers is not collected by
+ # the GC (or Boehm)
+ gcrefs_output_list.append(moving_obj_tracker.ptr_array_gcref)
+ rgc._make_sure_does_not_move(moving_obj_tracker.ptr_array_gcref)
+
+ ops = newops
+ newops = []
+ for op in ops:
+ if op in ops_with_movable_const_ptr:
+ rewritten_ops = self._rewrite_changeable_constptrs(op,
+ ops_with_movable_const_ptr, moving_obj_tracker)
+ newops.extend(rewritten_ops)
+ else:
+ newops.append(op)
+ #
return newops
@specialize.memo()
diff --git a/rpython/jit/backend/llsupport/llmodel.py b/rpython/jit/backend/llsupport/llmodel.py
--- a/rpython/jit/backend/llsupport/llmodel.py
+++ b/rpython/jit/backend/llsupport/llmodel.py
@@ -301,6 +301,7 @@
return ofs, size, sign
unpack_fielddescr_size._always_inline_ = True
+ @specialize.memo()
def arraydescrof(self, A):
return get_array_descr(self.gc_ll_descr, A)
diff --git a/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py b/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py
new file mode 100644
--- /dev/null
+++ b/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py
@@ -0,0 +1,149 @@
+from test_rewrite import get_size_descr, get_array_descr, get_description, BaseFakeCPU
+from rpython.jit.backend.llsupport.descr import get_size_descr,\
+ get_field_descr, get_array_descr, ArrayDescr, FieldDescr,\
+ SizeDescrWithVTable, get_interiorfield_descr
+from rpython.jit.backend.llsupport.gc import GcLLDescr_boehm,\
+ GcLLDescr_framework, MovableObjectTracker
+from rpython.jit.backend.llsupport import jitframe, gc
+from rpython.jit.metainterp.gc import get_description
+from rpython.jit.tool.oparser import parse
+from rpython.jit.metainterp.optimizeopt.util import equaloplists
+from rpython.jit.codewriter.heaptracker import register_known_gctype
+from rpython.jit.metainterp.history import JitCellToken, FLOAT
+from rpython.rtyper.lltypesystem import lltype, rclass, rffi, lltype, llmemory
+from rpython.jit.backend.x86.arch import WORD
+from rpython.rlib import rgc
+
+class Evaluator(object):
+ def __init__(self, scope):
+ self.scope = scope
+ def __getitem__(self, key):
+ return eval(key, self.scope)
+
+
+class FakeLoopToken(object):
+ pass
+
+# The following class is based on rpython.jit.backend.llsupport.test.test_rewrite.RewriteTests.
+# It's modified to be able to test the object pinning specific features.
+class RewriteTests(object):
+ def check_rewrite(self, frm_operations, to_operations, **namespace):
+ # objects to use inside the test
+ A = lltype.GcArray(lltype.Signed)
+ adescr = get_array_descr(self.gc_ll_descr, A)
+ adescr.tid = 4321
+ alendescr = adescr.lendescr
+ #
+ pinned_obj_type = lltype.GcStruct('PINNED_STRUCT', ('my_int', lltype.Signed))
+ pinned_obj_my_int_descr = get_field_descr(self.gc_ll_descr, pinned_obj_type, 'my_int')
+ pinned_obj_ptr = lltype.malloc(pinned_obj_type)
+ pinned_obj_gcref = lltype.cast_opaque_ptr(llmemory.GCREF, pinned_obj_ptr)
+ assert rgc.pin(pinned_obj_gcref)
+ #
+ notpinned_obj_type = lltype.GcStruct('NOT_PINNED_STRUCT', ('my_int', lltype.Signed))
+ notpinned_obj_my_int_descr = get_field_descr(self.gc_ll_descr, notpinned_obj_type, 'my_int')
+ notpinned_obj_ptr = lltype.malloc(notpinned_obj_type)
+ notpinned_obj_gcref = lltype.cast_opaque_ptr(llmemory.GCREF, notpinned_obj_ptr)
+ #
+ ptr_array_descr = self.cpu.arraydescrof(MovableObjectTracker.ptr_array_type)
+ #
+ vtable_descr = self.gc_ll_descr.fielddescr_vtable
+ O = lltype.GcStruct('O', ('parent', rclass.OBJECT),
+ ('x', lltype.Signed))
+ o_vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True)
+ register_known_gctype(self.cpu, o_vtable, O)
+ #
+ tiddescr = self.gc_ll_descr.fielddescr_tid
+ wbdescr = self.gc_ll_descr.write_barrier_descr
+ WORD = globals()['WORD']
+ #
+ strdescr = self.gc_ll_descr.str_descr
+ unicodedescr = self.gc_ll_descr.unicode_descr
+ strlendescr = strdescr.lendescr
+ unicodelendescr = unicodedescr.lendescr
+
+ casmdescr = JitCellToken()
+ clt = FakeLoopToken()
+ clt._ll_initial_locs = [0, 8]
+ frame_info = lltype.malloc(jitframe.JITFRAMEINFO, flavor='raw')
+ clt.frame_info = frame_info
+ frame_info.jfi_frame_depth = 13
+ frame_info.jfi_frame_size = 255
+ framedescrs = self.gc_ll_descr.getframedescrs(self.cpu)
+ framelendescr = framedescrs.arraydescr.lendescr
+ jfi_frame_depth = framedescrs.jfi_frame_depth
+ jfi_frame_size = framedescrs.jfi_frame_size
+ jf_frame_info = framedescrs.jf_frame_info
+ signedframedescr = self.cpu.signedframedescr
+ floatframedescr = self.cpu.floatframedescr
+ casmdescr.compiled_loop_token = clt
+ tzdescr = None # noone cares
+ #
+ namespace.update(locals())
+ #
+ for funcname in self.gc_ll_descr._generated_functions:
+ namespace[funcname] = self.gc_ll_descr.get_malloc_fn(funcname)
+ namespace[funcname + '_descr'] = getattr(self.gc_ll_descr,
+ '%s_descr' % funcname)
+ #
+ ops = parse(frm_operations, namespace=namespace)
+ operations = self.gc_ll_descr.rewrite_assembler(self.cpu,
+ ops.operations,
+ [])
+ # make the array containing the GCREF's accessible inside the tests.
+ # This must be done after we call 'rewrite_assembler'. Before that
+ # call 'last_moving_obj_tracker' is None or filled with some old
+ # value.
+ namespace['ptr_array_gcref'] = self.gc_ll_descr.last_moving_obj_tracker.ptr_array_gcref
+ expected = parse(to_operations % Evaluator(namespace),
+ namespace=namespace)
+ equaloplists(operations, expected.operations)
+ lltype.free(frame_info, flavor='raw')
+
+class TestFramework(RewriteTests):
+ def setup_method(self, meth):
+ class config_(object):
+ class translation(object):
+ gc = 'minimark'
+ gcrootfinder = 'asmgcc'
+ gctransformer = 'framework'
+ gcremovetypeptr = False
+ gcdescr = get_description(config_)
+ self.gc_ll_descr = GcLLDescr_framework(gcdescr, None, None, None,
+ really_not_translated=True)
+ self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
+ lambda cpu: True)
+ #
+ class FakeCPU(BaseFakeCPU):
+ def sizeof(self, STRUCT):
+ descr = SizeDescrWithVTable(104)
+ descr.tid = 9315
+ return descr
+ self.cpu = FakeCPU()
+
+ def test_simple_getfield(self):
+ self.check_rewrite("""
+ []
+ i0 = getfield_gc(ConstPtr(pinned_obj_gcref), descr=pinned_obj_my_int_descr)
+ """, """
+ []
+ p1 = getarrayitem_gc(ConstPtr(ptr_array_gcref), 0, descr=ptr_array_descr)
+ i0 = getfield_gc(p1, descr=pinned_obj_my_int_descr)
+ """)
+ assert len(self.gc_ll_descr.last_moving_obj_tracker._indexes) == 1
+
+ def test_simple_getfield_twice(self):
+ self.check_rewrite("""
+ []
+ i0 = getfield_gc(ConstPtr(pinned_obj_gcref), descr=pinned_obj_my_int_descr)
+ i1 = getfield_gc(ConstPtr(notpinned_obj_gcref), descr=notpinned_obj_my_int_descr)
+ i2 = getfield_gc(ConstPtr(pinned_obj_gcref), descr=pinned_obj_my_int_descr)
+ """, """
+ []
+ p1 = getarrayitem_gc(ConstPtr(ptr_array_gcref), 0, descr=ptr_array_descr)
+ i0 = getfield_gc(p1, descr=pinned_obj_my_int_descr)
+ i1 = getfield_gc(ConstPtr(notpinned_obj_gcref), descr=notpinned_obj_my_int_descr)
+ p2 = getarrayitem_gc(ConstPtr(ptr_array_gcref), 1, descr=ptr_array_descr)
+ i2 = getfield_gc(p2, descr=pinned_obj_my_int_descr)
+ """)
+ assert len(self.gc_ll_descr.last_moving_obj_tracker._indexes) == 2
diff --git a/rpython/jit/backend/llsupport/test/zrpy_gc_test.py b/rpython/jit/backend/llsupport/test/zrpy_gc_test.py
--- a/rpython/jit/backend/llsupport/test/zrpy_gc_test.py
+++ b/rpython/jit/backend/llsupport/test/zrpy_gc_test.py
@@ -10,6 +10,7 @@
from rpython.rtyper.lltypesystem import lltype
from rpython.rlib.jit import JitDriver, dont_look_inside
from rpython.rlib.jit import elidable, unroll_safe
+from rpython.rlib.jit import promote
from rpython.jit.backend.llsupport.gc import GcLLDescr_framework
from rpython.tool.udir import udir
from rpython.config.translationoption import DEFL_GC
@@ -22,6 +23,12 @@
next = None
+class Y(object):
+ # for pinning tests we need an object without references to other
+ # objects
+ def __init__(self, x=0):
+ self.x = x
+
class CheckError(Exception):
pass
@@ -778,3 +785,121 @@
def test_compile_framework_call_assembler(self):
self.run('compile_framework_call_assembler')
+
+ def define_pinned_simple(cls):
+ class H:
+ inst = None
+ helper = H()
+
+ @dont_look_inside
+ def get_y():
+ if not helper.inst:
+ helper.inst = Y()
+ helper.inst.x = 101
+ check(rgc.pin(helper.inst))
+ else:
+ check(rgc._is_pinned(helper.inst))
+ return helper.inst
+
+ def fn(n, x, *args):
+ t = get_y()
+ promote(t)
+ t.x += 11
+ n -= 1
+ return (n, x) + args
+
+ return None, fn, None
+
+ def test_pinned_simple(self):
+ self.run('pinned_simple')
+
+ def define_pinned_unpin(cls):
+ class H:
+ inst = None
+ pinned = False
+ count_pinned = 0
+ count_unpinned = 0
+ helper = H()
+
+ @dont_look_inside
+ def get_y(n):
+ if not helper.inst:
+ helper.inst = Y()
+ helper.inst.x = 101
+ helper.pinned = True
+ check(rgc.pin(helper.inst))
+ elif n < 100 and helper.pinned:
+ rgc.unpin(helper.inst)
+ helper.pinned = False
+ #
+ if helper.pinned:
+ check(rgc._is_pinned(helper.inst))
+ helper.count_pinned += 1
+ else:
+ check(not rgc._is_pinned(helper.inst))
+ helper.count_unpinned += 1
+ return helper.inst
+
+ def fn(n, x, *args):
+ t = get_y(n)
+ promote(t)
+ check(t.x == 101)
+ n -= 1
+ return (n, x) + args
+
+ def after(n, x, *args):
+ check(helper.count_pinned > 0)
+ check(helper.count_unpinned > 0)
+ check(not helper.pinned)
+
+ return None, fn, after
+
+ def test_pinned_unpin(self):
+ self.run('pinned_unpin')
+
+ def define_multiple_pinned(cls):
+ class H:
+ inst1 = None
+ inst2 = None
+ inst3 = None
+ initialised = False
+ helper = H()
+
+ @dont_look_inside
+ def get_instances():
+ if not helper.initialised:
+ helper.inst1 = Y()
+ helper.inst1.x = 101
+ check(rgc.pin(helper.inst1))
+ #
+ helper.inst2 = Y()
+ helper.inst2.x = 102
+ #
+ helper.inst3 = Y()
+ helper.inst3.x = 103
+ check(rgc.pin(helper.inst3))
+ #
+ helper.initialised = True
+ #
+ check(rgc._is_pinned(helper.inst1))
+ check(not rgc._is_pinned(helper.inst2))
+ check(rgc._is_pinned(helper.inst3))
+ return (helper.inst1, helper.inst2, helper.inst3)
+
+ def fn(n, x, *args):
+ inst1, inst2, inst3 = get_instances()
+ promote(inst1)
+ promote(inst2)
+ promote(inst3)
+ #
+ check(inst1.x == 101)
+ check(inst2.x == 102)
+ check(inst3.x == 103)
+ #
+ n -= 1
+ return (n, x) + args
+
+ return None, fn, None
+
+ def test_multiple_pinned(self):
+ self.run('multiple_pinned')
diff --git a/rpython/jit/codewriter/jtransform.py b/rpython/jit/codewriter/jtransform.py
--- a/rpython/jit/codewriter/jtransform.py
+++ b/rpython/jit/codewriter/jtransform.py
@@ -506,30 +506,32 @@
# XXX some of the following functions should not become residual calls
# but be really compiled
- rewrite_op_int_floordiv_ovf_zer = _do_builtin_call
- rewrite_op_int_floordiv_ovf = _do_builtin_call
- rewrite_op_int_floordiv_zer = _do_builtin_call
- rewrite_op_int_mod_ovf_zer = _do_builtin_call
- rewrite_op_int_mod_ovf = _do_builtin_call
- rewrite_op_int_mod_zer = _do_builtin_call
- rewrite_op_int_lshift_ovf = _do_builtin_call
- rewrite_op_int_abs = _do_builtin_call
- rewrite_op_llong_abs = _do_builtin_call
- rewrite_op_llong_floordiv = _do_builtin_call
- rewrite_op_llong_floordiv_zer = _do_builtin_call
- rewrite_op_llong_mod = _do_builtin_call
- rewrite_op_llong_mod_zer = _do_builtin_call
- rewrite_op_ullong_floordiv = _do_builtin_call
- rewrite_op_ullong_floordiv_zer = _do_builtin_call
- rewrite_op_ullong_mod = _do_builtin_call
- rewrite_op_ullong_mod_zer = _do_builtin_call
- rewrite_op_gc_identityhash = _do_builtin_call
- rewrite_op_gc_id = _do_builtin_call
- rewrite_op_uint_mod = _do_builtin_call
- rewrite_op_cast_float_to_uint = _do_builtin_call
- rewrite_op_cast_uint_to_float = _do_builtin_call
- rewrite_op_weakref_create = _do_builtin_call
- rewrite_op_weakref_deref = _do_builtin_call
+ rewrite_op_int_floordiv_ovf_zer = _do_builtin_call
+ rewrite_op_int_floordiv_ovf = _do_builtin_call
+ rewrite_op_int_floordiv_zer = _do_builtin_call
+ rewrite_op_int_mod_ovf_zer = _do_builtin_call
+ rewrite_op_int_mod_ovf = _do_builtin_call
+ rewrite_op_int_mod_zer = _do_builtin_call
+ rewrite_op_int_lshift_ovf = _do_builtin_call
+ rewrite_op_int_abs = _do_builtin_call
+ rewrite_op_llong_abs = _do_builtin_call
+ rewrite_op_llong_floordiv = _do_builtin_call
+ rewrite_op_llong_floordiv_zer = _do_builtin_call
+ rewrite_op_llong_mod = _do_builtin_call
+ rewrite_op_llong_mod_zer = _do_builtin_call
+ rewrite_op_ullong_floordiv = _do_builtin_call
+ rewrite_op_ullong_floordiv_zer = _do_builtin_call
+ rewrite_op_ullong_mod = _do_builtin_call
+ rewrite_op_ullong_mod_zer = _do_builtin_call
+ rewrite_op_gc_identityhash = _do_builtin_call
+ rewrite_op_gc_id = _do_builtin_call
+ rewrite_op_gc_pin = _do_builtin_call
+ rewrite_op_gc_unpin = _do_builtin_call
+ rewrite_op_uint_mod = _do_builtin_call
+ rewrite_op_cast_float_to_uint = _do_builtin_call
+ rewrite_op_cast_uint_to_float = _do_builtin_call
+ rewrite_op_weakref_create = _do_builtin_call
+ rewrite_op_weakref_deref = _do_builtin_call
rewrite_op_gc_add_memory_pressure = _do_builtin_call
# ----------
diff --git a/rpython/jit/codewriter/support.py b/rpython/jit/codewriter/support.py
--- a/rpython/jit/codewriter/support.py
+++ b/rpython/jit/codewriter/support.py
@@ -230,6 +230,13 @@
return llop.gc_id(lltype.Signed, ptr)
+def _ll_1_gc_pin(ptr):
+ return llop.gc_pin(lltype.Bool, ptr)
+
+def _ll_1_gc_unpin(ptr):
+ llop.gc_unpin(lltype.Void, ptr)
+
+
@oopspec("jit.force_virtual(inst)")
def _ll_1_jit_force_virtual(inst):
return llop.jit_force_virtual(lltype.typeOf(inst), inst)
diff --git a/rpython/memory/gc/base.py b/rpython/memory/gc/base.py
--- a/rpython/memory/gc/base.py
+++ b/rpython/memory/gc/base.py
@@ -18,6 +18,7 @@
needs_write_barrier = False
malloc_zero_filled = False
prebuilt_gc_objects_are_static_roots = True
+ can_usually_pin_objects = False
object_minimal_size = 0
gcflag_extra = 0 # or a real GC flag that is always 0 when not collecting
@@ -72,7 +73,8 @@
has_custom_trace,
get_custom_trace,
fast_path_tracing,
- has_gcptr):
+ has_gcptr,
+ cannot_pin):
self.getfinalizer = getfinalizer
self.getlightfinalizer = getlightfinalizer
self.is_varsize = is_varsize
@@ -91,6 +93,7 @@
self.get_custom_trace = get_custom_trace
self.fast_path_tracing = fast_path_tracing
self.has_gcptr = has_gcptr
+ self.cannot_pin = cannot_pin
def get_member_index(self, type_id):
return self.member_index(type_id)
@@ -168,6 +171,15 @@
def can_move(self, addr):
return False
+ def pin(self, addr):
+ return False
+
+ def unpin(self, addr):
+ pass
+
+ def _is_pinned(self, addr):
+ return False
+
def set_max_heap_size(self, size):
raise NotImplementedError
diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
--- a/rpython/memory/gc/incminimark.py
+++ b/rpython/memory/gc/incminimark.py
@@ -64,16 +64,16 @@
from rpython.rlib.debug import ll_assert, debug_print, debug_start, debug_stop
from rpython.rlib.objectmodel import specialize
-
#
# Handles the objects in 2 generations:
#
# * young objects: allocated in the nursery if they are not too large, or
# raw-malloced otherwise. The nursery is a fixed-size memory buffer of
# 4MB by default. When full, we do a minor collection;
-# the surviving objects from the nursery are moved outside, and the
-# non-surviving raw-malloced objects are freed. All surviving objects
-# become old.
+# - surviving objects from the nursery are moved outside and become old,
+# - non-surviving raw-malloced objects are freed,
+# - and pinned objects are kept at their place inside the nursery and stay
+# young.
#
# * old objects: never move again. These objects are either allocated by
# minimarkpage.py (if they are small), or raw-malloced (if they are not
@@ -81,7 +81,6 @@
#
WORD = LONG_BIT // 8
-NULL = llmemory.NULL
first_gcflag = 1 << (LONG_BIT//2)
@@ -132,7 +131,21 @@
# a minor collection.
GCFLAG_VISITED_RMY = first_gcflag << 8
-_GCFLAG_FIRST_UNUSED = first_gcflag << 9 # the first unused bit
+# The following flag is set on nursery objects to keep them in the nursery.
+# This means that a young object with this flag is not moved out
+# of the nursery during a minor collection. See pin()/unpin() for further
+# details.
+GCFLAG_PINNED = first_gcflag << 9
+
+# The following flag is set only on objects outside the nursery
+# (i.e. old objects). Therefore we can reuse GCFLAG_PINNED as it is used for
+# the same feature (object pinning) and GCFLAG_PINNED is only used on nursery
+# objects.
+# If this flag is set, the flagged object is already an element of
+# 'old_objects_pointing_to_pinned' and doesn't have to be added again.
+GCFLAG_PINNED_OBJECT_PARENT_KNOWN = GCFLAG_PINNED
+
+_GCFLAG_FIRST_UNUSED = first_gcflag << 10 # the first unused bit
# States for the incremental GC
@@ -168,7 +181,8 @@
inline_simple_malloc_varsize = True
needs_write_barrier = True
prebuilt_gc_objects_are_static_roots = False
- malloc_zero_filled = False # xxx experiment with False
+ can_usually_pin_objects = True
+ malloc_zero_filled = False
gcflag_extra = GCFLAG_EXTRA
# All objects start with a HDR, i.e. with a field 'tid' which contains
@@ -270,6 +284,7 @@
self.max_heap_size = 0.0
self.max_heap_size_already_raised = False
self.max_delta = float(r_uint(-1))
+ self.max_number_of_pinned_objects = 0 # computed later
#
self.card_page_indices = card_page_indices
if self.card_page_indices > 0:
@@ -281,9 +296,9 @@
# it gives a lower bound on the allowed size of the nursery.
self.nonlarge_max = large_object - 1
#
- self.nursery = NULL
- self.nursery_free = NULL
- self.nursery_top = NULL
+ self.nursery = llmemory.NULL
+ self.nursery_free = llmemory.NULL
+ self.nursery_top = llmemory.NULL
self.debug_tiny_nursery = -1
self.debug_rotating_nurseries = lltype.nullptr(NURSARRAY)
self.extra_threshold = 0
@@ -350,6 +365,22 @@
# minor collection.
self.nursery_objects_shadows = self.AddressDict()
#
+ # A sorted deque containing addresses of pinned objects.
+ # This collection is used to make sure we don't overwrite pinned objects.
+ # Each minor collection creates a new deque containing the active pinned
+ # objects. The addresses are used to set the next 'nursery_top'.
+ self.nursery_barriers = self.AddressDeque()
+ #
+ # Counter tracking how many pinned objects currently reside inside
+ # the nursery.
+ self.pinned_objects_in_nursery = 0
+ #
+ # Keeps track of old objects pointing to pinned objects. These objects
+ # must be traced every minor collection. Without tracing them the
+ # referenced pinned object wouldn't be visited and therefore collected.
+ self.old_objects_pointing_to_pinned = self.AddressStack()
+ self.updated_old_objects_pointing_to_pinned = False
+ #
# Allocate a nursery. In case of auto_nursery_size, start by
# allocating a very small nursery, enough to do things like look
# up the env var, which requires the GC; and then really
@@ -423,6 +454,10 @@
llarena.arena_free(self.nursery)
self.nursery_size = newsize
self.allocate_nursery()
+ #
+ # Estimate this number conservatively
+ bigobj = self.nonlarge_max + 1
+ self.max_number_of_pinned_objects = self.nursery_size / (bigobj * 2)
def _nursery_memory_size(self):
extra = self.nonlarge_max + 1
@@ -558,10 +593,7 @@
#
# Get the memory from the nursery. If there is not enough space
# there, do a collect first.
- result = self.nursery_free
- self.nursery_free = result + totalsize
- if self.nursery_free > self.nursery_top:
- result = self.collect_and_reserve(result, totalsize)
+ result = self.collect_and_reserve(rawtotalsize)
#
# Build the object.
llarena.arena_reserve(result, totalsize)
@@ -617,10 +649,7 @@
#
# Get the memory from the nursery. If there is not enough space
# there, do a collect first.
- result = self.nursery_free
- self.nursery_free = result + totalsize
- if self.nursery_free > self.nursery_top:
- result = self.collect_and_reserve(result, totalsize)
+ result = self.collect_and_reserve(raw_malloc_usage(totalsize))
#
# Build the object.
llarena.arena_reserve(result, totalsize)
@@ -644,7 +673,7 @@
self.minor_and_major_collection()
- def collect_and_reserve(self, prev_result, totalsize):
+ def collect_and_reserve(self, totalsize):
"""To call when nursery_free overflows nursery_top.
First check if the nursery_top is the real top, otherwise we
can just move the top of one cleanup and continue
@@ -653,26 +682,51 @@
and finally reserve 'totalsize' bytes at the start of the
now-empty nursery.
"""
- self.minor_collection()
- #
- # If the gc_state is not STATE_SCANNING, we're in the middle of
- # an incremental major collection. In this case, always progress
- # one step. If the gc_state is STATE_SCANNING, wait until there
- # is too much garbage before starting the next major collection.
- if (self.gc_state != STATE_SCANNING or
- self.get_total_memory_used() >
- self.next_major_collection_threshold):
- self.major_collection_step()
+ if self.nursery_free + totalsize <= self.nursery_top:
+ result = self.nursery_free
+ self.nursery_free = result + totalsize
+ return result
+
+ minor_collection_count = 0
+ while True:
+ if self.nursery_barriers.non_empty():
+ size_gc_header = self.gcheaderbuilder.size_gc_header
+ pinned_obj_size = size_gc_header + self.get_size(
+ self.nursery_top + size_gc_header)
+
+ self.nursery_free = self.nursery_top + pinned_obj_size
+ self.nursery_top = self.nursery_barriers.popleft()
+ else:
+ minor_collection_count += 1
+ self.minor_collection()
+ if minor_collection_count == 1:
+ #
+ # If the gc_state is not STATE_SCANNING, we're in the middle of
+ # an incremental major collection. In this case, always progress
+ # one step. If the gc_state is STATE_SCANNING, wait until there
+ # is too much garbage before starting the next major collection.
+ if (self.gc_state != STATE_SCANNING or
+ self.get_total_memory_used() >
+ self.next_major_collection_threshold):
+ self.major_collection_step()
+ #
+ # The nursery might not be empty now, because of
+ # execute_finalizers(). If it is almost full again,
+ # we need to fix it with another call to minor_collection().
+ if self.nursery_free + totalsize > self.nursery_top:
+ self.minor_collection()
+ #
+ else:
+ ll_assert(minor_collection_count == 2,
+ "Seeing minor_collection() at least twice."
+ "Too many pinned objects?")
#
- # The nursery might not be empty now, because of
- # execute_finalizers(). If it is almost full again,
- # we need to fix it with another call to minor_collection().
- if self.nursery_free + totalsize > self.nursery_top:
- self.minor_collection()
- #
- result = self.nursery_free
- self.nursery_free = result + totalsize
- ll_assert(self.nursery_free <= self.nursery_top, "nursery overflow")
+ result = self.nursery_free
+ if self.nursery_free + totalsize <= self.nursery_top:
+ self.nursery_free = result + totalsize
+ ll_assert(self.nursery_free <= self.nursery_top, "nursery overflow")
+ break
+ #
#
if self.debug_tiny_nursery >= 0: # for debugging
if self.nursery_top - self.nursery_free > self.debug_tiny_nursery:
@@ -845,6 +899,44 @@
"""Overrides the parent can_move()."""
return self.is_in_nursery(obj)
+ def pin(self, obj):
+ if self.pinned_objects_in_nursery >= self.max_number_of_pinned_objects:
+ return False
+ if not self.is_in_nursery(obj):
+ # old objects are already non-moving, therefore pinning
+ # makes no sense. If you run into this case, you may forgot
+ # to check can_move(obj).
+ return False
+ if self._is_pinned(obj):
+ # already pinned, we do not allow to pin it again.
+ # Reason: It would be possible that the first caller unpins
+ # while the second caller thinks it's still pinned.
+ return False
+ #
+ obj_type_id = self.get_type_id(obj)
+ if self.cannot_pin(obj_type_id):
+ # objects containing GC pointers can't be pinned. If we would add
+ # it, we would have to track all pinned objects and trace them
+ # every minor collection to make sure the referenced object are
+ # kept alive. Right now this is not a use case that's needed.
+ # The check above also tests for being a less common kind of
+ # object: a weakref, or one with any kind of finalizer.
+ return False
+ #
+ self.header(obj).tid |= GCFLAG_PINNED
+ self.pinned_objects_in_nursery += 1
+ return True
+
+
+ def unpin(self, obj):
+ ll_assert(self._is_pinned(obj),
+ "unpin: object is already not pinned")
+ #
+ self.header(obj).tid &= ~GCFLAG_PINNED
+ self.pinned_objects_in_nursery -= 1
+
+ def _is_pinned(self, obj):
+ return (self.header(obj).tid & GCFLAG_PINNED) != 0
def shrink_array(self, obj, smallerlength):
#
@@ -897,7 +989,7 @@
def is_in_nursery(self, addr):
ll_assert(llmemory.cast_adr_to_int(addr) & 1 == 0,
"odd-valued (i.e. tagged) pointer unexpected here")
- return self.nursery <= addr < self.nursery_top
+ return self.nursery <= addr < self.nursery + self.nursery_size
def appears_to_be_young(self, addr):
# "is a valid addr to a young object?"
@@ -1000,11 +1092,18 @@
def debug_check_object(self, obj):
# We are after a minor collection, and possibly after a major
- # collection step. No object should be in the nursery
- ll_assert(not self.is_in_nursery(obj),
- "object in nursery after collection")
- ll_assert(self.header(obj).tid & GCFLAG_VISITED_RMY == 0,
- "GCFLAG_VISITED_RMY after collection")
+ # collection step. No object should be in the nursery (except
+ # pinned ones)
+ if not self._is_pinned(obj):
+ ll_assert(not self.is_in_nursery(obj),
+ "object in nursery after collection")
+ ll_assert(self.header(obj).tid & GCFLAG_VISITED_RMY == 0,
+ "GCFLAG_VISITED_RMY after collection")
+ ll_assert(self.header(obj).tid & GCFLAG_PINNED == 0,
+ "GCFLAG_PINNED outside the nursery after collection")
+ else:
+ ll_assert(self.is_in_nursery(obj),
+ "pinned object not in nursery")
if self.gc_state == STATE_SCANNING:
self._debug_check_object_scanning(obj)
@@ -1040,6 +1139,12 @@
pass # black -> gray
elif self.header(obj).tid & GCFLAG_NO_HEAP_PTRS != 0:
pass # black -> white-but-prebuilt-so-dont-care
+ elif self._is_pinned(obj):
+ # black -> pinned: the pinned object is a white one as
+ # every minor collection visits them and takes care of
+ # visiting pinned objects.
+ # XXX (groggi) double check with fijal/armin
+ pass # black -> pinned
else:
ll_assert(False, "black -> white pointer found")
@@ -1048,9 +1153,9 @@
# but this flag is progressively removed in the sweeping phase.
# All objects should have this flag, except if they
- # don't have any GC pointer
+ # don't have any GC pointer or are pinned objects
typeid = self.get_type_id(obj)
- if self.has_gcptr(typeid):
+ if self.has_gcptr(typeid) and not self._is_pinned(obj):
ll_assert(self.header(obj).tid & GCFLAG_TRACK_YOUNG_PTRS != 0,
"missing GCFLAG_TRACK_YOUNG_PTRS")
# the GCFLAG_FINALIZATION_ORDERING should not be set between coll.
@@ -1264,6 +1369,17 @@
one of the following flags a bit too eagerly, which means we'll have
a bit more objects to track, but being on the safe side.
"""
+ # obscuuuure. The flag 'updated_old_objects_pointing_to_pinned'
+ # is set to True when 'old_objects_pointing_to_pinned' is modified.
+ # Here, when it was modified, then we do a write_barrier() on
+ # all items in that list (there should only be a small number,
+ # so we don't care). The goal is that the logic that follows below
+ # works as expected...
+ if self.updated_old_objects_pointing_to_pinned:
+ self.old_objects_pointing_to_pinned.foreach(
+ self._wb_old_object_pointing_to_pinned, None)
+ self.updated_old_objects_pointing_to_pinned = False
+ #
source_hdr = self.header(source_addr)
dest_hdr = self.header(dest_addr)
if dest_hdr.tid & GCFLAG_TRACK_YOUNG_PTRS == 0:
@@ -1324,6 +1440,21 @@
self.old_objects_with_cards_set.append(dest_addr)
dest_hdr.tid |= GCFLAG_CARDS_SET
+ def _wb_old_object_pointing_to_pinned(self, obj, ignore):
+ self.write_barrier(obj)
+
+ def record_pinned_object_with_shadow(self, obj, new_shadow_object_dict):
+ # checks if the pinned object has a shadow and if so add it to the
+ # dict of shadows.
+ obj = obj + self.gcheaderbuilder.size_gc_header
+ shadow = self.nursery_objects_shadows.get(obj)
+ if shadow != llmemory.NULL:
+ # visit shadow to keep it alive
+ # XXX seems like it is save to set GCFLAG_VISITED, however
+ # should be double checked
+ self.header(shadow).tid |= GCFLAG_VISITED
+ new_shadow_object_dict.setitem(obj, shadow)
+
# ----------
# Nursery collection
@@ -1333,6 +1464,19 @@
#
debug_start("gc-minor")
#
+ # All nursery barriers are invalid from this point on. They
+ # are evaluated anew as part of the minor collection.
+ self.nursery_barriers.delete()
+ #
+ # Keeps track of surviving pinned objects. See also '_trace_drag_out()'
+ # where this stack is filled. Pinning an object only prevents it from
+ # being moved, not from being collected if it is not reachable anymore.
+ self.surviving_pinned_objects = self.AddressStack()
+ # The following counter keeps track of alive and pinned young objects
+ # inside the nursery. We reset it here and increace it in
+ # '_trace_drag_out()'.
+ self.pinned_objects_in_nursery = 0
+ #
# Before everything else, remove from 'old_objects_pointing_to_young'
# the young arrays.
if self.young_rawmalloced_objects:
@@ -1357,6 +1501,21 @@
self.nursery_surviving_size = 0
self.collect_roots_in_nursery()
#
+ # visit all objects that are known for pointing to pinned
+ # objects. This way we populate 'surviving_pinned_objects'
+ # with pinned object that are (only) visible from an old
+ # object.
+ # Additionally we create a new list as it may be that an old object
+ # no longer points to a pinned one. Such old objects won't be added
+ # again to 'old_objects_pointing_to_pinned'.
+ if self.old_objects_pointing_to_pinned.non_empty():
+ current_old_objects_pointing_to_pinned = \
+ self.old_objects_pointing_to_pinned
+ self.old_objects_pointing_to_pinned = self.AddressStack()
+ current_old_objects_pointing_to_pinned.foreach(
+ self._visit_old_objects_pointing_to_pinned, None)
+ current_old_objects_pointing_to_pinned.delete()
+ #
while True:
# If we are using card marking, do a partial trace of the arrays
# that are flagged with GCFLAG_CARDS_SET.
@@ -1385,27 +1544,83 @@
if self.young_objects_with_light_finalizers.non_empty():
self.deal_with_young_objects_with_finalizers()
#
- # Clear this mapping.
+ # Clear this mapping. Without pinned objects we just clear the dict
+ # as all objects in the nursery are dragged out of the nursery and, if
+ # needed, into their shadow. However, if we have pinned objects we have
+ # to check if those pinned object have a shadow and keep a dictionary
+ # filled with shadow information for them as they stay in the nursery.
if self.nursery_objects_shadows.length() > 0:
- self.nursery_objects_shadows.clear()
+ if self.surviving_pinned_objects.non_empty():
+ new_shadows = self.AddressDict()
+ self.surviving_pinned_objects.foreach(
+ self.record_pinned_object_with_shadow, new_shadows)
+ self.nursery_objects_shadows.delete()
+ self.nursery_objects_shadows = new_shadows
+ else:
+ self.nursery_objects_shadows.clear()
#
# Walk the list of young raw-malloced objects, and either free
# them or make them old.
if self.young_rawmalloced_objects:
self.free_young_rawmalloced_objects()
#
- # All live nursery objects are out, and the rest dies. Fill
- # the nursery up to the cleanup point with zeros
+ # All live nursery objects are out of the nursery or pinned inside
+ # the nursery. Create nursery barriers to protect the pinned objects,
+ # fill the rest of the nursery with zeros and reset the current nursery
+ # pointer.
+ size_gc_header = self.gcheaderbuilder.size_gc_header
+ nursery_barriers = self.AddressDeque()
+ prev = self.nursery
+ self.surviving_pinned_objects.sort()
+ assert self.pinned_objects_in_nursery == \
+ self.surviving_pinned_objects.length()
+ while self.surviving_pinned_objects.non_empty():
+ #
+ cur = self.surviving_pinned_objects.pop()
+ assert cur >= prev
+ #
+ # clear the arena between the last pinned object (or arena start)
+ # and the pinned object
+ pinned_obj_size = llarena.getfakearenaaddress(cur) - prev
+ if self.gc_nursery_debug:
+ llarena.arena_reset(prev, pinned_obj_size, 3)
+ else:
+ llarena.arena_reset(prev, pinned_obj_size, 0)
+ # XXX: debug_rotate_nursery missing here
+ #
+ # clean up object's flags
+ obj = cur + size_gc_header
+ self.header(obj).tid &= ~GCFLAG_VISITED
+ #
+ # create a new nursery barrier for the pinned object
+ nursery_barriers.append(cur)
+ #
+ # update 'prev' to the end of the 'cur' object
+ prev = prev + pinned_obj_size + \
+ (size_gc_header + self.get_size(obj))
+ #
+ # reset everything after the last pinned object till the end of the arena
if self.gc_nursery_debug:
- llarena.arena_reset(self.nursery, self.nursery_size, 3)
+ llarena.arena_reset(prev, self.nursery + self.nursery_size - prev, 3)
else:
- llarena.arena_reset(self.nursery, self.nursery_size, 0)
- self.debug_rotate_nursery()
+ llarena.arena_reset(prev, self.nursery + self.nursery_size - prev, 0)
+ #
+ nursery_barriers.append(self.nursery + self.nursery_size)
+ self.nursery_barriers = nursery_barriers
+ self.surviving_pinned_objects.delete()
+ #
+ # XXX gc-minimark-pinning does a debug_rotate_nursery() here (groggi)
self.nursery_free = self.nursery
- self.nursery_top = self.nursery + self.nursery_size
+ self.nursery_top = self.nursery_barriers.popleft()
+ #
+ # clear GCFLAG_PINNED_OBJECT_PARENT_KNOWN from all parents in the list.
+ self.old_objects_pointing_to_pinned.foreach(
+ self._reset_flag_old_objects_pointing_to_pinned, None)
#
debug_print("minor collect, total memory used:",
self.get_total_memory_used())
+ debug_print("number of pinned objects:",
+ self.pinned_objects_in_nursery)
if self.DEBUG >= 2:
self.debug_check_consistency() # expensive!
#
@@ -1413,6 +1628,12 @@
#
debug_stop("gc-minor")
+ def _reset_flag_old_objects_pointing_to_pinned(self, obj, ignore):
+ assert self.header(obj).tid & GCFLAG_PINNED_OBJECT_PARENT_KNOWN
+ self.header(obj).tid &= ~GCFLAG_PINNED_OBJECT_PARENT_KNOWN
+
+ def _visit_old_objects_pointing_to_pinned(self, obj, ignore):
+ self.trace(obj, self._trace_drag_out, obj)
def collect_roots_in_nursery(self):
# we don't need to trace prebuilt GcStructs during a minor collect:
@@ -1520,7 +1741,7 @@
"""obj must not be in the nursery. This copies all the
young objects it references out of the nursery.
"""
- self.trace(obj, self._trace_drag_out, None)
+ self.trace(obj, self._trace_drag_out, obj)
def trace_and_drag_out_of_nursery_partial(self, obj, start, stop):
"""Like trace_and_drag_out_of_nursery(), but limited to the array
@@ -1529,14 +1750,14 @@
ll_assert(start < stop, "empty or negative range "
"in trace_and_drag_out_of_nursery_partial()")
#print 'trace_partial:', start, stop, '\t', obj
- self.trace_partial(obj, start, stop, self._trace_drag_out, None)
+ self.trace_partial(obj, start, stop, self._trace_drag_out, obj)
def _trace_drag_out1(self, root):
- self._trace_drag_out(root, None)
+ self._trace_drag_out(root, llmemory.NULL)
def _trace_drag_out1_marking_phase(self, root):
- self._trace_drag_out(root, None)
+ self._trace_drag_out(root, llmemory.NULL)
#
# We are in the MARKING state: we must also record this object
# if it was young. Don't bother with old objects in general,
@@ -1549,7 +1770,7 @@
if not self.header(obj).tid & GCFLAG_VISITED:
self.more_objects_to_trace.append(obj)
- def _trace_drag_out(self, root, ignored):
+ def _trace_drag_out(self, root, parent):
obj = root.address[0]
#print '_trace_drag_out(%x: %r)' % (hash(obj.ptr._obj), obj)
#
@@ -1567,7 +1788,7 @@
return
#
size_gc_header = self.gcheaderbuilder.size_gc_header
- if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
+ if self.header(obj).tid & (GCFLAG_HAS_SHADOW | GCFLAG_PINNED) == 0:
#
# Common case: 'obj' was not already forwarded (otherwise
# tid == -42, containing all flags), and it doesn't have the
@@ -1584,10 +1805,36 @@
root.address[0] = self.get_forwarding_address(obj)
return
#
+ elif self._is_pinned(obj):
+ hdr = self.header(obj)
+ #
+ # track parent of pinned object specially. This mus be done before
+ # checking for GCFLAG_VISITED: it may be that the same pinned object
+ # is reachable from multiple sources (e.g. two old objects pointing
+ # to the same pinned object). In such a case we need all parents
+ # of the pinned object in the list. Otherwise he pinned object could
+ # become dead and be removed just because the first parent of it
+ # is dead and collected.
+ if parent != llmemory.NULL and \
+ not self.header(parent).tid & GCFLAG_PINNED_OBJECT_PARENT_KNOWN:
+ #
+ self.old_objects_pointing_to_pinned.append(parent)
+ self.updated_old_objects_pointing_to_pinned = True
+ self.header(parent).tid |= GCFLAG_PINNED
+ #
+ if hdr.tid & GCFLAG_VISITED:
+ return
+ #
+ hdr.tid |= GCFLAG_VISITED
+ #
+ self.surviving_pinned_objects.append(
+ llarena.getfakearenaaddress(obj - size_gc_header))
+ self.pinned_objects_in_nursery += 1
+ return
else:
# First visit to an object that has already a shadow.
newobj = self.nursery_objects_shadows.get(obj)
- ll_assert(newobj != NULL, "GCFLAG_HAS_SHADOW but no shadow found")
+ ll_assert(newobj != llmemory.NULL, "GCFLAG_HAS_SHADOW but no shadow found")
newhdr = newobj - size_gc_header
#
# Remove the flag GCFLAG_HAS_SHADOW, so that it doesn't get
@@ -1737,8 +1984,13 @@
debug_start("gc-collect-step")
debug_print("starting gc state: ", GC_STATES[self.gc_state])
# Debugging checks
- ll_assert(self.nursery_free == self.nursery,
- "nursery not empty in major_collection_step()")
+ if self.pinned_objects_in_nursery == 0:
+ ll_assert(self.nursery_free == self.nursery,
+ "nursery not empty in major_collection_step()")
+ else:
+ # XXX try to add some similar check to the above one for the case
+ # that the nursery still contains some pinned objects (groggi)
+ pass
self.debug_check_consistency()
@@ -1800,9 +2052,21 @@
# Light finalizers
if self.old_objects_with_light_finalizers.non_empty():
self.deal_with_old_objects_with_finalizers()
- #objects_to_trace processed fully, can move on to sweeping
+ # objects_to_trace processed fully, can move on to sweeping
self.ac.mass_free_prepare()
self.start_free_rawmalloc_objects()
+ #
+ # get rid of objects pointing to pinned objects that were not
+ # visited
+ if self.old_objects_pointing_to_pinned.non_empty():
+ new_old_objects_pointing_to_pinned = self.AddressStack()
+ self.old_objects_pointing_to_pinned.foreach(
+ self._sweep_old_objects_pointing_to_pinned,
+ new_old_objects_pointing_to_pinned)
+ self.old_objects_pointing_to_pinned.delete()
+ self.old_objects_pointing_to_pinned = \
+ new_old_objects_pointing_to_pinned
+ self.updated_old_objects_pointing_to_pinned = True
self.gc_state = STATE_SWEEPING
#END MARKING
elif self.gc_state == STATE_SWEEPING:
@@ -1881,6 +2145,10 @@
debug_print("stopping, now in gc state: ", GC_STATES[self.gc_state])
debug_stop("gc-collect-step")
+ def _sweep_old_objects_pointing_to_pinned(self, obj, new_list):
+ if self.header(obj).tid & GCFLAG_VISITED:
+ new_list.append(obj)
+
def _free_if_unvisited(self, hdr):
size_gc_header = self.gcheaderbuilder.size_gc_header
obj = hdr + size_gc_header
@@ -1965,7 +2233,13 @@
def _collect_ref_stk(self, root):
obj = root.address[0]
llop.debug_nonnull_pointer(lltype.Void, obj)
- self.objects_to_trace.append(obj)
+ if not self._is_pinned(obj):
+ # XXX: check if this is the right way (groggi).
+ # A pinned object can be on the stack. Such an object is handled
+ # by minor collections and shouldn't be specially handled by
+ # major collections. Therefore we only add not pinned objects to the
+ # list below.
+ self.objects_to_trace.append(obj)
def _collect_ref_rec(self, root, ignored):
self.objects_to_trace.append(root.address[0])
@@ -1995,8 +2269,10 @@
# flag set, then the object should be in 'prebuilt_root_objects',
# and the GCFLAG_VISITED will be reset at the end of the
# collection.
+ # Objects with GCFLAG_PINNED can't have gcptrs (see pin()), they can be
+ # ignored.
hdr = self.header(obj)
- if hdr.tid & (GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS):
+ if hdr.tid & (GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS | GCFLAG_PINNED):
return 0
#
# It's the first time. We set the flag VISITED. The trick is
@@ -2048,7 +2324,7 @@
# collection
if self.header(obj).tid & GCFLAG_HAS_SHADOW:
shadow = self.nursery_objects_shadows.get(obj)
- ll_assert(shadow != NULL,
+ ll_assert(shadow != llmemory.NULL,
"GCFLAG_HAS_SHADOW but no shadow found")
else:
shadow = self._allocate_shadow(obj)
@@ -2233,6 +2509,9 @@
# ----------
# Weakrefs
+ # XXX (groggi): weakref pointing to pinned object not supported.
+ # XXX (groggi): missing asserts/checks for the missing feature.
+
# The code relies on the fact that no weakref can be an old object
# weakly pointing to a young object. Indeed, weakrefs are immutable
# so they cannot point to an object that was created after it.
@@ -2255,6 +2534,11 @@
(obj + offset).address[0] = self.get_forwarding_address(
pointing_to)
else:
+ # If the target is pinned, then we reach this point too.
+ # It means that a hypothetical RPython interpreter that
+ # would let you take a weakref to a pinned object (strange
+ # thing not possible at all in PyPy) might see these
+ # weakrefs marked as dead too early.
(obj + offset).address[0] = llmemory.NULL
continue # no need to remember this weakref any longer
#
diff --git a/rpython/memory/gc/test/test_object_pinning.py b/rpython/memory/gc/test/test_object_pinning.py
new file mode 100644
--- /dev/null
+++ b/rpython/memory/gc/test/test_object_pinning.py
@@ -0,0 +1,919 @@
+import py
+from rpython.rtyper.lltypesystem import lltype, llmemory, llarena
+from rpython.memory.gc.incminimark import IncrementalMiniMarkGC, WORD
+from test_direct import BaseDirectGCTest
+
+T = lltype.GcForwardReference()
+T.become(lltype.GcStruct('pinning_test_struct2',
+ ('someInt', lltype.Signed)))
+
+S = lltype.GcForwardReference()
+S.become(lltype.GcStruct('pinning_test_struct1',
+ ('someInt', lltype.Signed),
+ ('next', lltype.Ptr(T)),
+ ('data', lltype.Ptr(T))))
+
+class PinningGCTest(BaseDirectGCTest):
+
+ def setup_method(self, meth):
+ BaseDirectGCTest.setup_method(self, meth)
+ max = getattr(meth, 'max_number_of_pinned_objects', 20)
+ self.gc.max_number_of_pinned_objects = max
+
+ def test_pin_can_move(self):
+ # even a pinned object is considered to be movable. Only the caller
+ # of pin() knows if it is currently movable or not.
+ ptr = self.malloc(T)
+ adr = llmemory.cast_ptr_to_adr(ptr)
+ assert self.gc.can_move(adr)
+ assert self.gc.pin(adr)
+ assert self.gc.can_move(adr)
+
+ def test_pin_twice(self):
+ ptr = self.malloc(T)
+ adr = llmemory.cast_ptr_to_adr(ptr)
+ assert self.gc.pin(adr)
+ assert not self.gc.pin(adr)
+
+ def test_unpin_not_pinned(self):
+ # this test checks a requirement of the unpin() interface
+ ptr = self.malloc(S)
+ py.test.raises(Exception,
+ self.gc.unpin, llmemory.cast_ptr_to_adr(ptr))
+
+ def test__is_pinned(self):
+ ptr = self.malloc(T)
+ adr = llmemory.cast_ptr_to_adr(ptr)
+ assert not self.gc._is_pinned(adr)
+ assert self.gc.pin(adr)
+ assert self.gc._is_pinned(adr)
+ self.gc.unpin(adr)
+ assert not self.gc._is_pinned(adr)
+
+ def test_prebuilt_not_pinnable(self):
+ ptr = lltype.malloc(T, immortal=True)
+ self.consider_constant(ptr)
+ assert not self.gc.pin(llmemory.cast_ptr_to_adr(ptr))
+ self.gc.collect()
+ assert not self.gc.pin(llmemory.cast_ptr_to_adr(ptr))
+
+ # XXX test with multiple mallocs, and only part of them is pinned
+
+ def test_random(self):
+ # scenario: create bunch of objects. randomly pin, unpin, add to
+ # stackroots and remove from stackroots.
+ import random
+
+ for i in xrange(10**3):
+ obj = self.malloc(T)
+ obj.someInt = 100
+ #
+ if random.random() < 0.5:
+ self.stackroots.append(obj)
+ print("+stack")
+ if random.random() < 0.5:
+ self.gc.pin(llmemory.cast_ptr_to_adr(obj))
+ print("+pin")
+ self.gc.debug_gc_step(random.randint(1, 4))
+ for o in self.stackroots[:]:
+ assert o.someInt == 100
+ o_adr = llmemory.cast_ptr_to_adr(o)
+ if random.random() < 0.1 and self.gc._is_pinned(o_adr):
+ print("-pin")
+ self.gc.unpin(o_adr)
+ if random.random() < 0.1:
+ print("-stack")
+ self.stackroots.remove(o)
+
+
+class TestIncminimark(PinningGCTest):
+ from rpython.memory.gc.incminimark import IncrementalMiniMarkGC as GCClass
+ from rpython.memory.gc.incminimark import STATE_SCANNING
+
+ def test_try_pin_gcref_containing_type(self):
+ # scenario: incminimark's object pinning can't pin objects that may
+ # contain GC pointers
+ obj = self.malloc(S)
+ assert not self.gc.pin(llmemory.cast_ptr_to_adr(obj))
+
+
+ def test_pin_old(self):
+ # scenario: try pinning an old object. This should be not possible and
+ # we want to make sure everything stays as it is.
+ old_ptr = self.malloc(S)
+ old_ptr.someInt = 900
+ self.stackroots.append(old_ptr)
+ assert self.stackroots[0] == old_ptr # test assumption
+ self.gc.collect()
+ old_ptr = self.stackroots[0]
+ # now we try to pin it
+ old_adr = llmemory.cast_ptr_to_adr(old_ptr)
+ assert not self.gc.is_in_nursery(old_adr)
+ assert not self.gc.pin(old_adr)
+ assert self.gc.pinned_objects_in_nursery == 0
+
+
+ def pin_pin_pinned_object_count(self, collect_func):
+ # scenario: pin two objects that are referenced from stackroots. Check
+ # if the pinned objects count is correct, even after an other collection
+ pinned1_ptr = self.malloc(T)
+ pinned1_ptr.someInt = 100
+ self.stackroots.append(pinned1_ptr)
+ #
+ pinned2_ptr = self.malloc(T)
+ pinned2_ptr.someInt = 200
+ self.stackroots.append(pinned2_ptr)
+ #
+ assert self.gc.pin(llmemory.cast_ptr_to_adr(pinned1_ptr))
+ assert self.gc.pinned_objects_in_nursery == 1
+ assert self.gc.pin(llmemory.cast_ptr_to_adr(pinned2_ptr))
+ assert self.gc.pinned_objects_in_nursery == 2
+ #
+ collect_func()
+ #
+ assert self.gc.pinned_objects_in_nursery == 2
+
+ def test_pin_pin_pinned_object_count_minor_collection(self):
+ self.pin_pin_pinned_object_count(self.gc.minor_collection)
+
+ def test_pin_pin_pinned_object_count_major_collection(self):
+ self.pin_pin_pinned_object_count(self.gc.collect)
+
+
+ def pin_unpin_pinned_object_count(self, collect_func):
+ # scenario: pin an object and check the pinned object count. Unpin it
+ # and check the count again.
+ pinned_ptr = self.malloc(T)
+ pinned_ptr.someInt = 100
+ self.stackroots.append(pinned_ptr)
+ pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
+ #
+ assert self.gc.pinned_objects_in_nursery == 0
+ assert self.gc.pin(pinned_adr)
+ assert self.gc.pinned_objects_in_nursery == 1
+ collect_func()
+ assert self.gc.pinned_objects_in_nursery == 1
+ self.gc.unpin(pinned_adr)
+ assert self.gc.pinned_objects_in_nursery == 0
+ collect_func()
+ assert self.gc.pinned_objects_in_nursery == 0
+
+ def test_pin_unpin_pinned_object_count_minor_collection(self):
+ self.pin_unpin_pinned_object_count(self.gc.minor_collection)
+
+ def test_pin_unpin_pinned_object_count_major_collection(self):
+ self.pin_unpin_pinned_object_count(self.gc.collect)
+
+
+ def pinned_obj_in_stackroot(self, collect_func):
+ # scenario: a pinned object that is part of the stack roots. Check if
+ # it is not moved
+ #
+ ptr = self.malloc(T)
+ ptr.someInt = 100
+ self.stackroots.append(ptr)
+ assert self.stackroots[0] == ptr # validate our assumption
+
+ adr = llmemory.cast_ptr_to_adr(ptr)
+ assert self.gc.is_in_nursery(adr) # to be sure
+ assert self.gc.pin(adr)
+ #
+ # the object shouldn't move from now on
+ collect_func()
+ #
+ # check if it is still at the same location as expected
+ adr_after_collect = llmemory.cast_ptr_to_adr(self.stackroots[0])
+ assert self.gc.is_in_nursery(adr_after_collect)
+ assert adr == adr_after_collect
+ assert self.gc._is_pinned(adr)
+ assert ptr.someInt == 100
+ assert self.gc.pinned_objects_in_nursery == 1
+
+ def test_pinned_obj_in_stackroot_minor_collection(self):
+ self.pinned_obj_in_stackroot(self.gc.minor_collection)
+
+ def test_pinned_obj_in_stackroot_full_major_collection(self):
+ self.pinned_obj_in_stackroot(self.gc.collect)
+
+ def test_pinned_obj_in_stackroots_stepwise_major_collection(self):
+ # scenario: same as for 'pinned_obj_in_stackroot' with minor change
+ # that we do stepwise major collection and check in each step for
+ # a correct state
+ #
+ ptr = self.malloc(T)
+ ptr.someInt = 100
+ self.stackroots.append(ptr)
+ assert self.stackroots[0] == ptr # validate our assumption
+
+ adr = llmemory.cast_ptr_to_adr(ptr)
+ assert self.gc.is_in_nursery(adr)
+ assert self.gc.pin(adr)
+ #
+ # the object shouldn't move from now on. Do a full round of major
+ # steps and check each time for correct state
+ #
+ # check that we start at the expected point
+ assert self.gc.gc_state == self.STATE_SCANNING
+ done = False
+ while not done:
+ self.gc.debug_gc_step()
+ # check that the pinned object didn't move
+ ptr_after_collection = self.stackroots[0]
+ adr_after_collection = llmemory.cast_ptr_to_adr(ptr_after_collection)
+ assert self.gc.is_in_nursery(adr_after_collection)
+ assert adr == adr_after_collection
+ assert self.gc._is_pinned(adr)
+ assert ptr.someInt == 100
+ assert self.gc.pinned_objects_in_nursery == 1
+ # as the object is referenced from the stackroots, the gc internal
+ # 'old_objects_pointing_to_pinned' should be empty
+ assert not self.gc.old_objects_pointing_to_pinned.non_empty()
+ #
+ # break condition
+ done = self.gc.gc_state == self.STATE_SCANNING
+
+
+ def pin_unpin_moved_stackroot(self, collect_func):
+ # scenario: test if the pinned object is moved after being unpinned.
+ # the second part of the scenario is the tested one. The first part
+ # is already tests by other tests.
+ ptr = self.malloc(T)
+ ptr.someInt = 100
+ self.stackroots.append(ptr)
+ assert self.stackroots[0] == ptr # validate our assumption
+
+ adr = llmemory.cast_ptr_to_adr(ptr)
+ assert self.gc.pin(adr)
+
+ collect_func()
+ #
+ # from here on the test really starts. previouse logic is already tested
+ #
+ self.gc.unpin(adr)
+ assert not self.gc._is_pinned(adr)
+ assert self.gc.is_in_nursery(adr)
+ #
+ # now we do another collection and the object should be moved out of
+ # the nursery.
+ collect_func()
+ new_adr = llmemory.cast_ptr_to_adr(self.stackroots[0])
+ assert not self.gc.is_in_nursery(new_adr)
+ assert self.stackroots[0].someInt == 100
+ with py.test.raises(RuntimeError) as exinfo:
+ ptr.someInt = 200
+ assert "freed" in str(exinfo.value)
+
+ def test_pin_unpin_moved_stackroot_minor_collection(self):
+ self.pin_unpin_moved_stackroot(self.gc.minor_collection)
+
+ def test_pin_unpin_moved_stackroot_major_collection(self):
+ self.pin_unpin_moved_stackroot(self.gc.collect)
+
+
+ def pin_referenced_from_old(self, collect_func):
+ # scenario: an old object points to a pinned one. Check if the pinned
+ # object is correctly kept in the nursery and not moved.
+ #
+ # create old object
+ old_ptr = self.malloc(S)
+ old_ptr.someInt = 900
+ self.stackroots.append(old_ptr)
+ assert self.stackroots[0] == old_ptr # validate our assumption
+ collect_func() # make it old: move it out of the nursery
+ old_ptr = self.stackroots[0]
+ assert not self.gc.is_in_nursery(llmemory.cast_ptr_to_adr(old_ptr))
+ #
+ # create young pinned one and let the old one reference the young one
+ pinned_ptr = self.malloc(T)
+ pinned_ptr.someInt = 100
+ self.write(old_ptr, 'next', pinned_ptr)
+ pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
+ assert self.gc.pin(pinned_adr)
+ assert self.gc.is_in_nursery(pinned_adr)
+ assert old_ptr.next.someInt == 100
+ assert self.gc.pinned_objects_in_nursery == 1
+ #
+ # do a collection run and make sure the pinned one didn't move
+ collect_func()
+ assert old_ptr.next.someInt == pinned_ptr.someInt == 100
+ assert llmemory.cast_ptr_to_adr(old_ptr.next) == pinned_adr
+ assert self.gc.is_in_nursery(pinned_adr)
+
+ def test_pin_referenced_from_old_minor_collection(self):
+ self.pin_referenced_from_old(self.gc.minor_collection)
+
+ def test_pin_referenced_from_old_major_collection(self):
+ self.pin_referenced_from_old(self.gc.collect)
+
+ def test_pin_referenced_from_old_stepwise_major_collection(self):
+ # scenario: same as in 'pin_referenced_from_old'. However,
+ # this time we do a major collection step by step and check
+ # between steps that the states are as expected.
+ #
+ # create old object
+ old_ptr = self.malloc(S)
+ old_ptr.someInt = 900
+ self.stackroots.append(old_ptr)
+ assert self.stackroots[0] == old_ptr # validate our assumption
+ self.gc.minor_collection() # make it old: move it out of the nursery
+ old_ptr = self.stackroots[0]
+ old_adr = llmemory.cast_ptr_to_adr(old_ptr)
+ assert not self.gc.is_in_nursery(old_adr)
+ #
+ # create young pinned one and let the old one reference the young one
+ pinned_ptr = self.malloc(T)
+ pinned_ptr.someInt = 100
+ self.write(old_ptr, 'next', pinned_ptr)
+ pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
+ assert self.gc.pin(pinned_adr)
+ assert self.gc.is_in_nursery(pinned_adr)
+ assert old_ptr.next.someInt == 100
+ assert self.gc.pinned_objects_in_nursery == 1
+ #
+ # stepwise major collection with validation between steps
+ # check that we start at the expected point
+ assert self.gc.gc_state == self.STATE_SCANNING
+ done = False
+ while not done:
+ self.gc.debug_gc_step()
+ #
+ # make sure pinned object didn't move
+ assert old_ptr.next.someInt == pinned_ptr.someInt == 100
+ assert llmemory.cast_ptr_to_adr(old_ptr.next) == pinned_adr
+ assert self.gc.is_in_nursery(pinned_adr)
+ assert self.gc.pinned_objects_in_nursery == 1
+ #
+ # validate that the old object is part of the internal list
+ # 'old_objects_pointing_to_pinned' as expected.
+ should_be_old_adr = self.gc.old_objects_pointing_to_pinned.pop()
+ assert should_be_old_adr == old_adr
+ self.gc.old_objects_pointing_to_pinned.append(should_be_old_adr)
+ #
+ # break condition
+ done = self.gc.gc_state == self.STATE_SCANNING
+
+
+ def pin_referenced_from_old_remove_ref(self, collect_func):
+ # scenario: an old object points to a pinned one. We remove the
+ # reference from the old one. So nothing points to the pinned object.
+ # After this the pinned object should be collected (it's dead).
+ #
+ # Create the objects and get them to our initial state (this is not
+ # tested here, should be already tested by other tests)
+ old_ptr = self.malloc(S)
+ old_ptr.someInt = 900
+ self.stackroots.append(old_ptr)
+ assert self.stackroots[0] == old_ptr # check assumption
+ collect_func() # make it old
+ old_ptr = self.stackroots[0]
+ #
+ pinned_ptr = self.malloc(T)
+ pinned_ptr.someInt = 100
+ self.write(old_ptr, 'next', pinned_ptr)
+ pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
+ assert self.gc.pin(pinned_adr)
+ #
+ collect_func()
+ # from here on we have our initial state for this test.
+ #
+ # first check some basic assumptions.
+ assert self.gc.is_in_nursery(pinned_adr)
+ assert self.gc._is_pinned(pinned_adr)
+ # remove the reference
+ self.write(old_ptr, 'next', lltype.nullptr(T))
+ # from now on the pinned object is dead. Do a collection and make sure
+ # old object still there and the pinned one is gone.
+ collect_func()
+ assert self.stackroots[0].someInt == 900
+ assert not self.gc.old_objects_pointing_to_pinned.non_empty()
+ with py.test.raises(RuntimeError) as exinfo:
+ pinned_ptr.someInt = 200
+ assert "freed" in str(exinfo.value)
+
+ def test_pin_referenced_from_old_remove_ref_minor_collection(self):
+ self.pin_referenced_from_old_remove_ref(self.gc.minor_collection)
+
+ def test_pin_referenced_from_old_remove_ref_major_collection(self):
+ self.pin_referenced_from_old_remove_ref(self.gc.collect)
+
+
+ def pin_referenced_from_old_remove_old(self, collect_func):
+ # scenario: an old object referenced a pinned object. After removing
+ # the stackroot reference to the old object, bot objects (old and pinned)
+ # must be collected.
+ # This test is important as we expect not reachable pinned objects to
+ # be collected. At the same time we have an internal list of objects
+ # pointing to pinned ones and we must make sure that because of it the
+ # old/pinned object survive.
+ #
+ # create the objects and get them to the initial state for this test.
+ # Everything on the way to the initial state should be covered by
+ # other tests.
+ old_ptr = self.malloc(S)
+ old_ptr.someInt = 900
+ self.stackroots.append(old_ptr)
+ collect_func()
+ old_ptr = self.stackroots[0]
+ #
+ pinned_ptr = self.malloc(T)
+ pinned_ptr.someInt = 100
+ self.write(old_ptr, 'next', pinned_ptr)
+ assert self.gc.pin(llmemory.cast_ptr_to_adr(pinned_ptr))
+ #
+ collect_func()
+ #
+ # now we have our initial state: old object referenced from stackroots.
+ # Old object referencing a young pinned one. Next step is to make some
+ # basic checks that we got the expected state.
+ assert not self.gc.is_in_nursery(llmemory.cast_ptr_to_adr(old_ptr))
+ assert self.gc.is_in_nursery(llmemory.cast_ptr_to_adr(pinned_ptr))
+ assert pinned_ptr == old_ptr.next
+ #
+ # now we remove the old object from the stackroots...
+ self.stackroots.remove(old_ptr)
+ # ... and do a major collection (otherwise the old object wouldn't be
+ # gone).
+ self.gc.collect()
+ # check that both objects are gone
+ assert not self.gc.old_objects_pointing_to_pinned.non_empty()
+ with py.test.raises(RuntimeError) as exinfo_old:
+ old_ptr.someInt = 800
+ assert "freed" in str(exinfo_old.value)
+ #
+ with py.test.raises(RuntimeError) as exinfo_pinned:
+ pinned_ptr.someInt = 200
+ assert "freed" in str(exinfo_pinned.value)
+
+ def test_pin_referenced_from_old_remove_old_minor_collection(self):
+ self.pin_referenced_from_old_remove_old(self.gc.minor_collection)
+
+ def test_pin_referenced_from_old_remove_old_major_collection(self):
+ self.pin_referenced_from_old_remove_old(self.gc.collect)
+
+
+ def pin_referenced_from_young_in_stackroots(self, collect_func):
+ # scenario: a young object is referenced from the stackroots. This
+ # young object points to a young pinned object. We check if everything
+ # behaves as expected after a collection: the young object is moved out
+ # of the nursery while the pinned one stays where it is.
+ #
+ root_ptr = self.malloc(S)
+ root_ptr.someInt = 900
+ self.stackroots.append(root_ptr)
+ assert self.stackroots[0] == root_ptr # validate assumption
+ #
+ pinned_ptr = self.malloc(T)
+ pinned_ptr.someInt = 100
+ self.write(root_ptr, 'next', pinned_ptr)
+ pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
+ assert self.gc.pin(pinned_adr)
+ # check both are in nursery
+ assert self.gc.is_in_nursery(llmemory.cast_ptr_to_adr(root_ptr))
+ assert self.gc.is_in_nursery(pinned_adr)
+ #
+ # no old object yet pointing to a pinned one
+ assert not self.gc.old_objects_pointing_to_pinned.non_empty()
+ #
+ # now we do a collection and check if the result is as expected
+ collect_func()
+ #
+ # check if objects are where we expect them
+ root_ptr = self.stackroots[0]
+ assert not self.gc.is_in_nursery(llmemory.cast_ptr_to_adr(root_ptr))
+ assert self.gc.is_in_nursery(pinned_adr)
+ # and as 'root_ptr' object is now old, it should be tracked specially
+ should_be_root_adr = self.gc.old_objects_pointing_to_pinned.pop()
+ assert should_be_root_adr == llmemory.cast_ptr_to_adr(root_ptr)
+ self.gc.old_objects_pointing_to_pinned.append(should_be_root_adr)
+ # check that old object still points to the pinned one as expected
+ assert root_ptr.next == pinned_ptr
+
+ def test_pin_referenced_from_young_in_stackroots_minor_collection(self):
+ self.pin_referenced_from_young_in_stackroots(self.gc.minor_collection)
+
+ def test_pin_referenced_from_young_in_stackroots_major_collection(self):
+ self.pin_referenced_from_young_in_stackroots(self.gc.collect)
+
+
+ def pin_referenced_from_prebuilt(self, collect_func):
+ # scenario: a prebuilt object points to a pinned object. Check if the
+ # pinned object doesn't move and is still accessible.
+ #
+ prebuilt_ptr = lltype.malloc(S, immortal=True)
+ prebuilt_ptr.someInt = 900
+ self.consider_constant(prebuilt_ptr)
+ prebuilt_adr = llmemory.cast_ptr_to_adr(prebuilt_ptr)
+ collect_func()
+ #
+ pinned_ptr = self.malloc(T)
+ pinned_ptr.someInt = 100
+ self.write(prebuilt_ptr, 'next', pinned_ptr)
+ pinned_adr = llmemory.cast_ptr_to_adr(pinned_ptr)
+ assert self.gc.pin(pinned_adr)
+ #
+ # check if everything is as expected
+ assert not self.gc.is_in_nursery(prebuilt_adr)
+ assert self.gc.is_in_nursery(pinned_adr)
+ assert pinned_ptr == prebuilt_ptr.next
+ assert pinned_ptr.someInt == 100
+ #
+ # do a collection and check again
+ collect_func()
+ assert self.gc.is_in_nursery(pinned_adr)
+ assert pinned_ptr == prebuilt_ptr.next
+ assert pinned_ptr.someInt == 100
+
+ def test_pin_referenced_from_prebuilt_minor_collection(self):
+ self.pin_referenced_from_prebuilt(self.gc.minor_collection)
+
+ def test_pin_referenced_from_prebuilt_major_collection(self):
+ self.pin_referenced_from_prebuilt(self.gc.collect)
+
More information about the pypy-commit
mailing list