[pypy-svn] r74569 - in pypy/branch/blackhole-improvement/pypy: jit/backend jit/backend/llgraph jit/codewriter jit/metainterp rpython/lltypesystem
arigo at codespeak.net
arigo at codespeak.net
Wed May 19 11:10:06 CEST 2010
Author: arigo
Date: Wed May 19 11:10:04 2010
New Revision: 74569
Modified:
pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/llimpl.py
pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/runner.py
pypy/branch/blackhole-improvement/pypy/jit/backend/model.py
pypy/branch/blackhole-improvement/pypy/jit/codewriter/jitcode.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/blackhole.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/compile.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/executor.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/history.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/optimizeopt.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/pyjitpl.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/resoperation.py
pypy/branch/blackhole-improvement/pypy/jit/metainterp/resume.py
pypy/branch/blackhole-improvement/pypy/rpython/lltypesystem/llmemory.py
Log:
General progress.
Modified: pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/llimpl.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/llimpl.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/llimpl.py Wed May 19 11:10:04 2010
@@ -1060,31 +1060,25 @@
def frame_int_getvalue(frame, num):
frame = _from_opaque(frame)
assert num >= 0
- return frame.fail_args[num]
+ x = frame.fail_args[num]
+ assert lltype.typeOf(x) is lltype.Signed
+ return x
def frame_float_getvalue(frame, num):
frame = _from_opaque(frame)
assert num >= 0
- return frame.fail_args[num]
+ x = frame.fail_args[num]
+ assert lltype.typeOf(x) is lltype.Float
+ return x
def frame_ptr_getvalue(frame, num):
frame = _from_opaque(frame)
assert num >= 0
- return frame.fail_args[num]
+ x = frame.fail_args[num]
+ assert lltype.typeOf(x) == llmemory.GCREF
+ return x
-def frame_get_value_kind(frame, num):
- frame = _from_opaque(frame)
- assert num >= 0
- TYPE = lltype.typeOf(frame.fail_args[num])
- if TYPE is lltype.Signed:
- return INT
- if TYPE == llmemory.GCREF:
- return REF
- if TYPE is lltype.Float:
- return FLOAT
- raise TypeError("frame.fail_args[%d] is of type %r" % (num, TYPE))
-
-def get_latest_value_count(frame):
+def frame_get_value_count(frame):
frame = _from_opaque(frame)
return len(frame.fail_args)
Modified: pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/runner.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/runner.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/backend/llgraph/runner.py Wed May 19 11:10:04 2010
@@ -240,9 +240,6 @@
def get_latest_value_float(self, index):
return llimpl.frame_float_getvalue(self.latest_frame, index)
- def get_latest_value_kind(self, index):
- return llimpl.frame_get_value_kind(self.latest_frame, index)
-
def get_latest_value_count(self):
return llimpl.frame_get_value_count(self.latest_frame)
Modified: pypy/branch/blackhole-improvement/pypy/jit/backend/model.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/backend/model.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/backend/model.py Wed May 19 11:10:04 2010
@@ -85,11 +85,6 @@
or from 'args' if it was a FINISH). Returns a ptr or an obj."""
raise NotImplementedError
- def get_latest_value_kind(self, index):
- """Return the kind (history.INT, REF or FLOAT) of the index'th
- argument to the last executed operation."""
- raise NotImplementedError
-
def get_latest_value_count(self):
"""Return how many values are ready to be returned by
get_latest_value_xxx()."""
Modified: pypy/branch/blackhole-improvement/pypy/jit/codewriter/jitcode.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/codewriter/jitcode.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/codewriter/jitcode.py Wed May 19 11:10:04 2010
@@ -97,6 +97,18 @@
def _missing_liveness(self, pc):
raise MissingLiveness("missing liveness[%d]\n%s" % (pc, self.dump()))
+ def follow_jump(self, position):
+ """Assuming that 'position' points just after a bytecode
+ instruction that ends with a label, follow that label."""
+ code = self.code
+ position -= 2
+ assert position >= 0
+ if not we_are_translated():
+ assert position in self._alllabels
+ labelvalue = ord(code[position]) | (ord(code[position+1])<<8)
+ assert labelvalue < len(code)
+ return labelvalue
+
def dump(self):
if self._ssarepr is None:
return '<no dump available>'
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/blackhole.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/blackhole.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/blackhole.py Wed May 19 11:10:04 2010
@@ -61,6 +61,7 @@
self.setup_insns(asm.insns)
self.setup_descrs(asm.descrs)
self.metainterp_sd = metainterp_sd
+ self.num_interpreters = 0
self._freeze_()
def _freeze_(self):
@@ -234,7 +235,8 @@
if len(self.blackholeinterps) > 0:
return self.blackholeinterps.pop()
else:
- return BlackholeInterpreter(self)
+ self.num_interpreters += 1
+ return BlackholeInterpreter(self, self.num_interpreters)
def release_interp(self, interp):
interp.cleanup_registers()
@@ -243,12 +245,13 @@
class BlackholeInterpreter(object):
- def __init__(self, builder):
+ def __init__(self, builder, count_interpreter):
self.builder = builder
self.cpu = builder.cpu
self.dispatch_loop = builder.dispatch_loop
self.descrs = builder.descrs
self.op_catch_exception = builder.op_catch_exception
+ self.count_interpreter = count_interpreter
#
if we_are_translated():
default_i = 0
@@ -263,6 +266,9 @@
self.registers_f = [default_f] * 256
self.jitcode = None
+ def __repr__(self):
+ return '<BHInterp #%d>' % self.count_interpreter
+
def setposition(self, jitcode, position):
if jitcode is not self.jitcode:
# the real performance impact of the following code is unclear,
@@ -356,18 +362,6 @@
registers[j] = constants[i]
i -= 1
- def follow_jump(self):
- """Assuming that self.position points just after a bytecode
- instruction that ends with a label, follow that label."""
- code = self.jitcode.code
- position = self.position - 2
- assert position >= 0
- if not we_are_translated():
- assert position in self.jitcode._alllabels
- labelvalue = ord(code[position]) | (ord(code[position+1])<<8)
- assert labelvalue < len(code)
- self.position = labelvalue
-
# ----------
@arguments("i", "i", returns="i")
@@ -956,8 +950,7 @@
# XXX virtualizable
_prepare_resume_from_failure(blackholeinterp, resumedescr.guard_opnum)
try:
- blackholeinterp = _resume_mainloop(
- metainterp_sd.blackholeinterpbuilder, blackholeinterp)
+ blackholeinterp = _resume_mainloop(blackholeinterp)
finally:
metainterp_sd.profiler.end_blackhole()
debug_stop('jit-blackhole')
@@ -965,12 +958,12 @@
# normally (in general we get a ContinueRunningNormally exception).
_done_with_this_frame(blackholeinterp)
-def _resume_mainloop(blackholeinterpbuilder, blackholeinterp):
+def _resume_mainloop(blackholeinterp):
while True:
try:
blackholeinterp.run()
finally:
- blackholeinterpbuilder.release_interp(blackholeinterp)
+ blackholeinterp.builder.release_interp(blackholeinterp)
#...x.x.x...
assert blackholeinterp.nextblackholeinterp is None # XXX
break
@@ -980,7 +973,8 @@
def _prepare_resume_from_failure(blackholeinterp, opnum):
from pypy.jit.metainterp.resoperation import rop
if opnum == rop.GUARD_TRUE: # a goto_if_not_xxx that jumps only now
- blackholeinterp.follow_jump()
+ blackholeinterp.position = blackholeinterp.jitcode.follow_jump(
+ blackholeinterp.position)
elif opnum == rop.GUARD_FALSE: # a goto_if_not that stops jumping
pass
else:
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/compile.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/compile.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/compile.py Wed May 19 11:10:04 2010
@@ -239,12 +239,17 @@
self._counter = ~i # use ~(index_of_guarded_box_in_fail_args)
def handle_fail(self, metainterp_sd):
- from pypy.jit.metainterp.blackhole import resume_in_blackhole
- return resume_in_blackhole(metainterp_sd, self)
- XXX
+ if self.must_compile(metainterp_sd):
+ return self._trace_and_compile_from_bridge(metainterp_sd)
+ else:
+ from pypy.jit.metainterp.blackhole import resume_in_blackhole
+ resume_in_blackhole(metainterp_sd, self)
+
+ def _trace_and_compile_from_bridge(self, metainterp_sd):
from pypy.jit.metainterp.pyjitpl import MetaInterp
metainterp = MetaInterp(metainterp_sd)
return metainterp.handle_guard_failure(self)
+ _trace_and_compile_from_bridge._dont_inline_ = True
def must_compile(self, metainterp_sd):
trace_eagerness = metainterp_sd.state.trace_eagerness
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/executor.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/executor.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/executor.py Wed May 19 11:10:04 2010
@@ -85,6 +85,9 @@
return None
raise AssertionError("bad rettype")
+do_call_pure = do_call
+do_call_loopinvariant = do_call
+
def do_getarrayitem_gc(metainterp, arraybox, indexbox, arraydescr):
cpu = metainterp.cpu
array = arraybox.getref_base()
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/history.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/history.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/history.py Wed May 19 11:10:04 2010
@@ -255,7 +255,7 @@
return llmemory.cast_int_to_adr(self.value)
def _get_hash_(self):
- return integer_hash(self.value)
+ return make_hashable_int(self.value)
def set_future_value(self, cpu, j):
cpu.set_future_value_int(j, self.value)
@@ -492,7 +492,7 @@
return llmemory.cast_int_to_adr(self.value)
def _get_hash_(self):
- return integer_hash(self.value)
+ return make_hashable_int(self.value)
def set_future_value(self, cpu, j):
cpu.set_future_value_int(j, self.value)
@@ -655,7 +655,7 @@
except lltype.DelayedPointer:
return -2 # xxx risk of changing hash...
-def integer_hash(i):
+def make_hashable_int(i):
if not we_are_translated() and isinstance(i, llmemory.AddressAsInt):
# Warning: such a hash changes at the time of translation
adr = llmemory.cast_int_to_adr(i)
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/optimizeopt.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/optimizeopt.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/optimizeopt.py Wed May 19 11:10:04 2010
@@ -16,7 +16,7 @@
from pypy.jit.metainterp.typesystem import llhelper, oohelper
from pypy.rlib.objectmodel import we_are_translated
from pypy.rpython.lltypesystem import lltype
-from pypy.jit.metainterp.history import AbstractDescr
+from pypy.jit.metainterp.history import AbstractDescr, make_hashable_int
def optimize_loop_1(metainterp_sd, loop):
"""Optimize loop.operations to make it match the input of loop.specnodes
@@ -953,7 +953,8 @@
if not funcvalue.is_constant():
self.optimize_default(op)
return
- resvalue = self.loop_invariant_results.get(op.args[0].getint(), None)
+ key = make_hashable_int(op.args[0].getint())
+ resvalue = self.loop_invariant_results.get(key, None)
if resvalue is not None:
self.make_equal_to(op.result, resvalue)
return
@@ -962,7 +963,7 @@
op.opnum = rop.CALL
self.optimize_default(op)
resvalue = self.getvalue(op.result)
- self.loop_invariant_results[op.args[0].getint()] = resvalue
+ self.loop_invariant_results[key] = resvalue
optimize_ops = _findall(Optimizer, 'optimize_')
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/pyjitpl.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/pyjitpl.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/pyjitpl.py Wed May 19 11:10:04 2010
@@ -26,10 +26,6 @@
# ____________________________________________________________
-def check_args(*args):
- for arg in args:
- assert isinstance(arg, (Box, Const))
-
def arguments(*args):
def decorate(func):
func.argtypes = args
@@ -83,6 +79,13 @@
i -= 1
copy_constants._annspecialcase_ = 'specialize:arg(3)'
+ def cleanup_registers(self):
+ # To avoid keeping references alive, this cleans up the registers_r.
+ # It does not clear the references set by copy_constants(), but
+ # these are all prebuilt constants anyway.
+ for i in range(self.jitcode.num_regs_r()):
+ self.registers_r[i] = None
+
# ------------------------------
# Decoding of the JitCode
@@ -266,39 +269,39 @@
self.opimpl_goto_if_not(condbox, target)
''' % (_opimpl, _opimpl.upper())).compile()
- def follow_jump(self):
- _op_goto_if_not = self.metainterp.staticdata._op_goto_if_not
- assert ord(self.bytecode[self.pc]) == _op_goto_if_not
- self.pc += 1 # past the bytecode for 'goto_if_not'
- target = self.load_3byte() # load the 'target' argument
- self.pc = target # jump
-
- def ignore_next_guard_nullness(self, opnum):
- _op_ooisnull = self.metainterp.staticdata._op_ooisnull
- _op_oononnull = self.metainterp.staticdata._op_oononnull
- bc = ord(self.bytecode[self.pc])
- if bc == _op_ooisnull:
- if opnum == rop.GUARD_ISNULL:
- res = ConstInt(0)
- else:
- res = ConstInt(1)
- else:
- assert bc == _op_oononnull
- if opnum == rop.GUARD_ISNULL:
- res = ConstInt(1)
- else:
- res = ConstInt(0)
- self.pc += 1 # past the bytecode for ptr_iszero/ptr_nonzero
- self.load_int() # past the 'box' argument
- self.make_result_box(res)
-
- def dont_follow_jump(self):
- _op_goto_if_not = self.metainterp.staticdata._op_goto_if_not
- assert ord(self.bytecode[self.pc]) == _op_goto_if_not
- self.pc += 1 # past the bytecode for 'goto_if_not'
- self.load_3byte() # past the 'target' argument
- self.load_int() # past the 'box' argument
- self.ignore_varargs() # past the 'livelist' argument
+## def follow_jump(self):
+## _op_goto_if_not = self.metainterp.staticdata._op_goto_if_not
+## assert ord(self.bytecode[self.pc]) == _op_goto_if_not
+## self.pc += 1 # past the bytecode for 'goto_if_not'
+## target = self.load_3byte() # load the 'target' argument
+## self.pc = target # jump
+
+## def ignore_next_guard_nullness(self, opnum):
+## _op_ooisnull = self.metainterp.staticdata._op_ooisnull
+## _op_oononnull = self.metainterp.staticdata._op_oononnull
+## bc = ord(self.bytecode[self.pc])
+## if bc == _op_ooisnull:
+## if opnum == rop.GUARD_ISNULL:
+## res = ConstInt(0)
+## else:
+## res = ConstInt(1)
+## else:
+## assert bc == _op_oononnull
+## if opnum == rop.GUARD_ISNULL:
+## res = ConstInt(1)
+## else:
+## res = ConstInt(0)
+## self.pc += 1 # past the bytecode for ptr_iszero/ptr_nonzero
+## self.load_int() # past the 'box' argument
+## self.make_result_box(res)
+
+## def dont_follow_jump(self):
+## _op_goto_if_not = self.metainterp.staticdata._op_goto_if_not
+## assert ord(self.bytecode[self.pc]) == _op_goto_if_not
+## self.pc += 1 # past the bytecode for 'goto_if_not'
+## self.load_3byte() # past the 'target' argument
+## self.load_int() # past the 'box' argument
+## self.ignore_varargs() # past the 'livelist' argument
@arguments("box", "descr")
def opimpl_switch(self, valuebox, switchdict):
@@ -905,12 +908,8 @@
else:
raise AssertionError(box.type)
- def setup_resume_at_op(self, pc, env):
- if not we_are_translated():
- check_args(*env)
+ def setup_resume_at_op(self, pc):
self.pc = pc
- xxxxxxxxxxxxxxxxxxxxxxx
- self.env = env
## values = ' '.join([box.repr_rpython() for box in self.env])
## log('setup_resume_at_op %s:%d [%s] %d' % (self.jitcode.name,
## self.pc, values,
@@ -1213,7 +1212,6 @@
class MetaInterp(object):
in_recursion = 0
- _already_allocated_resume_virtuals = None
def __init__(self, staticdata):
self.staticdata = staticdata
@@ -1255,6 +1253,7 @@
# we save the freed MIFrames to avoid needing to re-create new
# MIFrame objects all the time; they are a bit big, with their
# 3*256 register entries.
+ frame.cleanup_registers()
self.free_frames_list.append(frame)
def finishframe(self, resultbox):
@@ -1516,8 +1515,6 @@
self.current_merge_points = [(original_greenkey, -1)]
self.resumekey = key
self.seen_can_enter_jit = False
- xxx
- started_as_blackhole = self.is_blackholing()
try:
self.prepare_resume_from_failure(key.guard_opnum)
self.interpret()
@@ -1525,8 +1522,7 @@
except GenerateMergePoint, gmp:
return self.designate_target_loop(gmp)
except ContinueRunningNormallyBase:
- if not started_as_blackhole:
- key.reset_counter_from_failure(self)
+ key.reset_counter_from_failure(self)
raise
def remove_consts_and_duplicates(self, boxes, endindex, duplicates):
@@ -1599,17 +1595,20 @@
return loop_token
def prepare_resume_from_failure(self, opnum):
+ frame = self.framestack[-1]
if opnum == rop.GUARD_TRUE: # a goto_if_not that jumps only now
- self.framestack[-1].follow_jump()
+ frame.pc = frame.jitcode.follow_jump(frame.pc)
elif opnum == rop.GUARD_FALSE: # a goto_if_not that stops jumping
- self.framestack[-1].dont_follow_jump()
+ pass
elif (opnum == rop.GUARD_NO_EXCEPTION or opnum == rop.GUARD_EXCEPTION
or opnum == rop.GUARD_NOT_FORCED):
- self.handle_exception()
+ xxx #self.handle_exception()
elif opnum == rop.GUARD_NO_OVERFLOW: # an overflow now detected
- self.raise_overflow_error()
+ xxx #self.raise_overflow_error()
elif opnum == rop.GUARD_NONNULL or opnum == rop.GUARD_ISNULL:
- self.framestack[-1].ignore_next_guard_nullness(opnum)
+ xxx #self.framestack[-1].ignore_next_guard_nullness(opnum)
+ else:
+ raise NotImplementedError(opnum)
def compile(self, original_boxes, live_arg_boxes, start):
num_green_args = self.staticdata.num_green_args
@@ -1705,23 +1704,13 @@
return original_boxes
def initialize_state_from_guard_failure(self, resumedescr):
- XXX
# guard failure: rebuild a complete MIFrame stack
+ debug_start('jit-tracing')
+ self.staticdata.profiler.start_tracing()
self.in_recursion = -1 # always one portal around
- inputargs_and_holes = self.cpu.make_boxes_from_latest_values(
- resumedescr)
- must_compile = resumedescr.must_compile(self.staticdata,
- inputargs_and_holes)
- if must_compile:
- debug_start('jit-tracing')
- self.history = history.History()
- self.history.inputargs = [box for box in inputargs_and_holes if box]
- self.staticdata.profiler.start_tracing()
- else:
- debug_start('jit-blackhole')
- self.staticdata.profiler.start_blackhole()
- self.history = None # this means that is_blackholing() is true
- self.rebuild_state_after_failure(resumedescr, inputargs_and_holes)
+ self.history = history.History()
+ inputargs_and_holes = self.rebuild_state_after_failure(resumedescr)
+ self.history.inputargs = [box for box in inputargs_and_holes if box]
def initialize_virtualizable(self, original_boxes):
vinfo = self.staticdata.virtualizable_info
@@ -1834,12 +1823,15 @@
def assert_no_exception(self):
assert not self.last_exc_value_box
- def rebuild_state_after_failure(self, resumedescr, newboxes):
+ def rebuild_state_after_failure(self, resumedescr):
vinfo = self.staticdata.virtualizable_info
self.framestack = []
expect_virtualizable = vinfo is not None
- virtualizable_boxes, virtualref_boxes = resume.rebuild_from_resumedata(
- self, newboxes, resumedescr, expect_virtualizable)
+ boxlists = resume.rebuild_from_resumedata(self, resumedescr,
+ expect_virtualizable)
+ #inputargs_and_holes, virtualizable_boxes, virtualref_boxes = boxlists
+ inputargs_and_holes = boxlists # XXX
+ virtualref_boxes = [] # XXX
#
# virtual refs: make the vrefs point to the freshly allocated virtuals
self.virtualref_boxes = virtualref_boxes
@@ -1854,7 +1846,7 @@
# boxes, in whichever direction is appropriate
if expect_virtualizable:
self.virtualizable_boxes = virtualizable_boxes
- if self._already_allocated_resume_virtuals is not None:
+ if 0: ## self._already_allocated_resume_virtuals is not None:
# resuming from a ResumeGuardForcedDescr: load the new values
# currently stored on the virtualizable fields
self.load_fields_from_virtualizable()
@@ -1866,13 +1858,14 @@
virtualizable_box = self.virtualizable_boxes[-1]
virtualizable = vinfo.unwrap_virtualizable_box(virtualizable_box)
assert not virtualizable.vable_token
- if self._already_allocated_resume_virtuals is not None:
+ if 0: ## self._already_allocated_resume_virtuals is not None:
# resuming from a ResumeGuardForcedDescr: load the new values
# currently stored on the virtualizable fields
self.load_fields_from_virtualizable()
else:
# normal case: fill the virtualizable with the local boxes
self.synchronize_virtualizable()
+ return inputargs_and_holes
def check_synchronized_virtualizable(self):
if not we_are_translated():
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/resoperation.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/resoperation.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/resoperation.py Wed May 19 11:10:04 2010
@@ -134,7 +134,7 @@
'_NOSIDEEFFECT_FIRST', # ----- start of no_side_effect operations -----
'_ALWAYS_PURE_FIRST', # ----- start of always_pure operations -----
- 'OOSEND_PURE', # ootype operation
+ #'OOSEND_PURE', # ootype operation
'CALL_PURE',
#
'CAST_PTR_TO_INT/1',
@@ -198,8 +198,8 @@
'UNICODEGETITEM/2',
#
# ootype operations
- 'INSTANCEOF/1db',
- 'SUBCLASSOF/2b',
+ #'INSTANCEOF/1db',
+ #'SUBCLASSOF/2b',
#
'_ALWAYS_PURE_LAST', # ----- end of always_pure operations -----
@@ -222,7 +222,7 @@
'STRSETITEM/3',
'UNICODESETITEM/3',
'NEWUNICODE/1',
- 'RUNTIMENEW/1', # ootype operation
+ #'RUNTIMENEW/1', # ootype operation
'COND_CALL_GC_WB', # [objptr, newvalue] (for the write barrier)
'DEBUG_MERGE_POINT/1', # debugging only
'VIRTUAL_REF_FINISH/2',
@@ -232,7 +232,7 @@
'CALL_ASSEMBLER',
'CALL_MAY_FORCE',
'CALL_LOOPINVARIANT',
- 'OOSEND', # ootype operation
+ #'OOSEND', # ootype operation
'_CANRAISE_LAST', # ----- end of can_raise operations -----
'_OVF_FIRST', # ----- start of is_ovf operations -----
Modified: pypy/branch/blackhole-improvement/pypy/jit/metainterp/resume.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/jit/metainterp/resume.py (original)
+++ pypy/branch/blackhole-improvement/pypy/jit/metainterp/resume.py Wed May 19 11:10:04 2010
@@ -1,5 +1,7 @@
import sys, os
-from pypy.jit.metainterp.history import Box, Const, ConstInt, INT, REF, FLOAT
+from pypy.jit.metainterp.history import Box, Const, ConstInt
+from pypy.jit.metainterp.history import BoxInt, BoxPtr, BoxFloat
+from pypy.jit.metainterp.history import INT, REF, FLOAT, HOLE
from pypy.jit.metainterp.resoperation import rop
from pypy.jit.metainterp import jitprof
from pypy.rpython.lltypesystem import rffi
@@ -98,6 +100,9 @@
UNASSIGNED = tag(-1<<13, TAGBOX)
UNASSIGNEDVIRTUAL = tag(-1<<13, TAGVIRTUAL)
NULLREF = tag(-1, TAGCONST)
+TYPEBARRIER = tag(-2, TAGCONST)
+ # nums = [..BoxInts.., TYPEBARRIER, ..BoxPtrs..]
+ # and optionally an extra [TYPEBARRIER, ..BoxFloats..]
class ResumeDataLoopMemo(object):
@@ -161,8 +166,19 @@
numb1, liveboxes, v = self.number(values, snapshot.prev)
n = len(liveboxes)-v
boxes = snapshot.boxes
+ #
+ if not we_are_translated():
+ # verifies that 'boxes' are in order: all INTs, then all REFs,
+ # and finally all FLOATs.
+ _kind2count = {INT: 1, REF: 2, FLOAT: 3}
+ kinds = [_kind2count[box.type] for box in boxes]
+ assert kinds == sorted(kinds)
+ #
length = len(boxes)
- nums = [UNASSIGNED] * length
+ numslength = length + 1 + (length > 0 and boxes[-1].type == FLOAT)
+ nums = [UNASSIGNED] * numslength
+ prevkind = INT
+ j = 0
for i in range(length):
box = boxes[i]
value = values.get(box, None)
@@ -181,7 +197,23 @@
tagged = tag(n, TAGBOX)
n += 1
liveboxes[box] = tagged
- nums[i] = tagged
+ #
+ if box.type != prevkind:
+ nums[j] = TYPEBARRIER
+ j += 1
+ if prevkind == INT and box.type == FLOAT:
+ nums[j] = TYPEBARRIER
+ j += 1
+ prevkind = box.type
+ #
+ nums[j] = tagged
+ j += 1
+ #
+ if j == length:
+ nums[j] = TYPEBARRIER
+ j += 1
+ assert j == numslength
+ #
numb = Numbering(numb1, nums)
self.numberings[snapshot] = numb, liveboxes, v
return numb, liveboxes.copy(), v
@@ -426,9 +458,9 @@
AbstractVirtualStructInfo.__init__(self, fielddescrs)
self.known_class = known_class
- def allocate(self, cpu):
- from pypy.jit.metainterp.executor import exec_new_with_vtable
- return exec_new_with_vtable(cpu, self.known_class)
+ @specialize.argtype(1)
+ def allocate(self, decoder):
+ return decoder.allocate_with_vtable(self.known_class)
def debug_prints(self):
debug_print("\tvirtualinfo", self.known_class.repr_rpython())
@@ -468,24 +500,94 @@
for i in self.fieldnums:
debug_print("\t\t", str(untag(i)))
+# ____________________________________________________________
+
+class AbstractResumeDataReader(object):
+ """A base mixin containing the logic to reconstruct virtuals out of
+ guard failure. There are two implementations of this mixin:
+ ResumeDataBoxReader for when we are compiling (i.e. when we have a
+ metainterp), and ResumeDataDirectReader for when we are merely
+ blackholing and want the best performance.
+ """
+ _mixin_ = True
+ virtuals = None
+
+ def _prepare(self, cpu, storage):
+ self.cpu = cpu
+ self.cur_numb = storage.rd_numb
+ self.consts = storage.rd_consts
+ self._prepare_virtuals(storage.rd_virtuals)
+ self._prepare_pendingfields(storage.rd_pendingfields)
+
+ def _prepare_virtuals(self, virtuals):
+ if virtuals:
+ self.virtuals = [None] * len(virtuals)
+ for i in range(len(virtuals)):
+ vinfo = virtuals[i]
+ if vinfo is not None:
+ self.virtuals[i] = vinfo.allocate(self)
+ for i in range(len(virtuals)):
+ vinfo = virtuals[i]
+ if vinfo is not None:
+ vinfo.setfields(self, self.virtuals[i])
-def rebuild_from_resumedata(metainterp, newboxes, storage,
+ def _prepare_pendingfields(self, pendingfields):
+ if pendingfields is not None:
+ for descr, num, fieldnum in pendingfields:
+ struct = self.decode_ref(num)
+ self.setfield(descr, struct, fieldnum)
+
+ def _prepare_next_section(self):
+ numb = self.cur_numb
+ self.cur_numb = numb.prev
+ nums = numb.nums
+ length = len(nums)
+ count_i = count_r = count_f = 0
+ i = 0
+ while True:
+ num = nums[i]
+ i += 1
+ if tagged_eq(num, TYPEBARRIER):
+ break
+ self.write_an_int(count_i, self.decode_int(num))
+ count_i += 1
+ while i < length:
+ num = nums[i]
+ i += 1
+ if tagged_eq(num, TYPEBARRIER):
+ break
+ self.write_a_ref(count_r, self.decode_ref(num))
+ count_r += 1
+ while i < length:
+ num = nums[i]
+ i += 1
+ assert not tagged_eq(num, TYPEBARRIER)
+ self.write_a_float(count_f, self.decode_float(num))
+ count_f += 1
+
+ def done(self):
+ self.cpu.clear_latest_values()
+
+# ---------- when resuming for pyjitpl.py, make boxes ----------
+
+def rebuild_from_resumedata(metainterp, storage,
expects_virtualizables):
- resumereader = ResumeDataReader(storage, newboxes, metainterp)
+ resumereader = ResumeDataBoxReader(storage, metainterp)
virtualizable_boxes = None
if expects_virtualizables:
- virtualizable_boxes = resumereader.consume_boxes()
- virtualref_boxes = resumereader.consume_boxes()
+ XXX # virtualizable_boxes = resumereader.consume_boxes()
+ resumereader.consume_boxes([], [], []) # XXX virtualref
frameinfo = storage.rd_frame_info_list
while True:
- env = resumereader.consume_boxes()
f = metainterp.newframe(frameinfo.jitcode)
- f.setup_resume_at_op(frameinfo.pc, frameinfo.exception_target, env)
+ f.setup_resume_at_op(frameinfo.pc)
+ resumereader.consume_boxes(f.registers_i, f.registers_r, f.registers_f)
frameinfo = frameinfo.prev
if frameinfo is None:
break
metainterp.framestack.reverse()
- return virtualizable_boxes, virtualref_boxes
+ resumereader.done()
+ return resumereader.liveboxes
def force_from_resumedata(metainterp, newboxes, storage,
expects_virtualizables):
@@ -496,57 +598,34 @@
virtualref_boxes = resumereader.consume_boxes()
return virtualizable_boxes, virtualref_boxes, resumereader.virtuals
+class ResumeDataBoxReader(AbstractResumeDataReader):
-class ResumeDataBoxReader(object):
- virtuals = None
-
- def __init__(self, storage, liveboxes, metainterp=None):
- self.cur_numb = storage.rd_numb
- self.consts = storage.rd_consts
- self.liveboxes = liveboxes
- self.cpu = metainterp.cpu
- self._prepare_virtuals(metainterp, storage.rd_virtuals)
- self._prepare_pendingfields(metainterp, storage.rd_pendingfields)
+ def __init__(self, storage, metainterp):
+ self.metainterp = metainterp
+ self.liveboxes = [None] * metainterp.cpu.get_latest_value_count()
+ self._prepare(metainterp.cpu, storage)
+
+ def consume_boxes(self, boxes_i, boxes_r, boxes_f):
+ self.boxes_i = boxes_i
+ self.boxes_r = boxes_r
+ self.boxes_f = boxes_f
+ self._prepare_next_section()
- def _prepare_virtuals(self, metainterp, virtuals):
- if virtuals:
- v = metainterp._already_allocated_resume_virtuals
- if v is not None:
- self.virtuals = v
- return
- self.virtuals = [None] * len(virtuals)
- for i in range(len(virtuals)):
- vinfo = virtuals[i]
- if vinfo is not None:
- self.virtuals[i] = vinfo.allocate(metainterp)
- for i in range(len(virtuals)):
- vinfo = virtuals[i]
- if vinfo is not None:
- vinfo.setfields(metainterp, self.virtuals[i],
- self._decode_box)
+ def allocate_with_vtable(self, known_class):
+ xxx
+ def setfield(self, descr, struct, fieldnum):
+ xxx
+ def setarrayitem(self, arraydescr, array, index, fieldnum):
+ xxx
- def _prepare_pendingfields(self, metainterp, pendingfields):
- if pendingfields:
- if metainterp._already_allocated_resume_virtuals is not None:
- return
- for descr, num, fieldnum in pendingfields:
- box = self._decode_box(num)
- fieldbox = self._decode_box(fieldnum)
- metainterp.execute_and_record(rop.SETFIELD_GC,
- descr, box, fieldbox)
+ def decode_int(self, tagged):
+ return self.decode_box(tagged, INT)
+ def decode_ref(self, tagged):
+ return self.decode_box(tagged, REF)
+ def decode_float(self, tagged):
+ return self.decode_box(tagged, FLOAT)
- def consume_boxes(self):
- numb = self.cur_numb
- assert numb is not None
- nums = numb.nums
- n = len(nums)
- boxes = [None] * n
- for i in range(n):
- boxes[i] = self._decode_box(nums[i])
- self.cur_numb = numb.prev
- return boxes
-
- def _decode_box(self, tagged, kind):
+ def decode_box(self, tagged, kind):
num, tag = untag(tagged)
if tag == TAGCONST:
if tagged_eq(tagged, NULLREF):
@@ -560,16 +639,42 @@
return ConstInt(num)
else:
assert tag == TAGBOX
- return self.liveboxes[num]
+ box = self.liveboxes[num]
+ if box is None:
+ box = self.load_box_from_cpu(num, kind)
+ return box
+
+ def load_box_from_cpu(self, num, kind):
+ if num < 0:
+ num += len(self.liveboxes)
+ assert num >= 0
+ if kind == INT:
+ box = BoxInt(self.cpu.get_latest_value_int(num))
+ elif kind == REF:
+ box = BoxPtr(self.cpu.get_latest_value_ref(num))
+ elif kind == FLOAT:
+ box = BoxFloat(self.cpu.get_latest_value_float(num))
+ else:
+ assert 0, "bad kind: %d" % ord(kind)
+ self.liveboxes[num] = box
+ return box
+
+ def write_an_int(self, index, box):
+ self.boxes_i[index] = box
+ def write_a_ref(self, index, box):
+ self.boxes_r[index] = box
+ def write_a_float(self, index, box):
+ self.boxes_f[index] = box
+# ---------- when resuming for blackholing, get direct values ----------
def blackhole_from_resumedata(blackholeinterpbuilder, storage,
expects_virtualizables):
- resumereader = ResumeDataDirectReader(storage, blackholeinterpbuilder.cpu)
+ resumereader = ResumeDataDirectReader(blackholeinterpbuilder.cpu, storage)
if expects_virtualizables:
XXX
#virtualref_boxes = resumereader.consume_boxes()
- resumereader.consume_one_section(None) # XXX
+ resumereader.consume_one_section(None) # virtualref XXX
#
# First get a chain of blackhole interpreters whose length is given
# by the depth of rd_frame_info_list. The first one we get must be
@@ -599,87 +704,42 @@
resumereader.done()
return firstbh
-class ResumeDataDirectReader(object):
- virtuals = None
+class ResumeDataDirectReader(AbstractResumeDataReader):
- def __init__(self, storage, cpu):
- self.cur_numb = storage.rd_numb
- self.consts = storage.rd_consts
- self.cpu = cpu
- self._prepare_virtuals(storage.rd_virtuals)
- self._prepare_pendingfields(storage.rd_pendingfields)
+ def __init__(self, cpu, storage):
+ self._prepare(cpu, storage)
- def _prepare_virtuals(self, virtuals):
- if virtuals:
- self.virtuals = [None] * len(virtuals)
- for i in range(len(virtuals)):
- vinfo = virtuals[i]
- if vinfo is not None:
- self.virtuals[i] = vinfo.allocate(self.cpu)
- for i in range(len(virtuals)):
- vinfo = virtuals[i]
- if vinfo is not None:
- vinfo.setfields(self, self.virtuals[i])
+ def consume_one_section(self, blackholeinterp):
+ self.blackholeinterp = blackholeinterp
+ self._prepare_next_section()
- def _prepare_pendingfields(self, pendingfields):
- if pendingfields is not None:
- for descr, num, fieldnum in pendingfields:
- struct = self._decode_ref(num)
- self.setfield(descr, struct, fieldnum)
+ def allocate_with_vtable(self, known_class):
+ from pypy.jit.metainterp.executor import exec_new_with_vtable
+ return exec_new_with_vtable(self.cpu, self.known_class)
def setfield(self, descr, struct, fieldnum):
if descr.is_pointer_field():
- newvalue = self._decode_ref(fieldnum)
+ newvalue = self.decode_ref(fieldnum)
self.cpu.bh_setfield_gc_r(struct, descr, newvalue)
elif descr.is_float_field():
- newvalue = self._decode_float(fieldnum)
+ newvalue = self.decode_float(fieldnum)
self.cpu.bh_setfield_gc_f(struct, descr, newvalue)
else:
- newvalue = self._decode_int(fieldnum)
+ newvalue = self.decode_int(fieldnum)
self.cpu.bh_setfield_gc_i(struct, descr, newvalue)
def setarrayitem(self, arraydescr, array, index, fieldnum):
if arraydescr.is_array_of_pointers():
- newvalue = self._decode_ref(fieldnum)
+ newvalue = self.decode_ref(fieldnum)
self.cpu.bh_setarrayitem_gc_r(arraydescr, array, index, newvalue)
elif arraydescr.is_array_of_floats():
- newvalue = self._decode_float(fieldnum)
+ newvalue = self.decode_float(fieldnum)
self.cpu.bh_setarrayitem_gc_f(arraydescr, array, index, newvalue)
else:
- newvalue = self._decode_int(fieldnum)
+ newvalue = self.decode_int(fieldnum)
self.cpu.bh_setarrayitem_gc_i(arraydescr, array, index, newvalue)
- def consume_one_section(self, blackholeinterp):
- numb = self.cur_numb
- count_i = count_r = count_f = 0
- for num in numb.nums:
- kind = self._decode_kind(num)
- if kind == INT:
- blackholeinterp.setarg_i(count_i, self._decode_int(num))
- count_i += 1
- elif kind == REF:
- blackholeinterp.setarg_r(count_r, self._decode_ref(num))
- count_r += 1
- elif kind == FLOAT:
- blackholeinterp.setarg_f(count_f, self._decode_float(num))
- count_f += 1
- self.cur_numb = numb.prev
-
- def _decode_kind(self, tagged):
- num, tag = untag(tagged)
- if tag == TAGCONST:
- if tagged_eq(tagged, NULLREF):
- return REF
- return self.consts[num].type
- elif tag == TAGVIRTUAL:
- return REF
- elif tag == TAGINT:
- return INT
- else:
- assert tag == TAGBOX
- return self.cpu.get_latest_value_kind(num)
-
- def _decode_int(self, tagged):
+ def decode_int(self, tagged):
num, tag = untag(tagged)
if tag == TAGCONST:
return self.consts[num].getint()
@@ -691,7 +751,7 @@
num += self.cpu.get_latest_value_count()
return self.cpu.get_latest_value_int(num)
- def _decode_ref(self, tagged):
+ def decode_ref(self, tagged):
num, tag = untag(tagged)
if tag == TAGCONST:
if tagged_eq(tagged, NULLREF):
@@ -707,7 +767,7 @@
num += self.cpu.get_latest_value_count()
return self.cpu.get_latest_value_ref(num)
- def _decode_float(self, tagged):
+ def decode_float(self, tagged):
num, tag = untag(tagged)
if tag == TAGCONST:
return self.consts[num].getfloat()
@@ -717,8 +777,14 @@
num += self.cpu.get_latest_value_count()
return self.cpu.get_latest_value_float(num)
- def done(self):
- self.cpu.clear_latest_values()
+ def write_an_int(self, index, int):
+ self.blackholeinterp.setarg_i(index, int)
+
+ def write_a_ref(self, index, ref):
+ self.blackholeinterp.setarg_r(index, ref)
+
+ def write_a_float(self, index, float):
+ self.blackholeinterp.setarg_f(index, float)
# ____________________________________________________________
Modified: pypy/branch/blackhole-improvement/pypy/rpython/lltypesystem/llmemory.py
==============================================================================
--- pypy/branch/blackhole-improvement/pypy/rpython/lltypesystem/llmemory.py (original)
+++ pypy/branch/blackhole-improvement/pypy/rpython/lltypesystem/llmemory.py Wed May 19 11:10:04 2010
@@ -7,6 +7,7 @@
import weakref
from pypy.rlib.objectmodel import Symbolic
from pypy.rpython.lltypesystem import lltype
+from pypy.tool.uid import uid
class AddressOffset(Symbolic):
@@ -507,6 +508,11 @@
if isinstance(other, AddressAsInt):
return cmp(self.adr, other.adr)
return Symbolic.__cmp__(self, other)
+ def __repr__(self):
+ try:
+ return '<AddressAsInt %s>' % (self.adr.ptr,)
+ except AttributeError:
+ return '<AddressAsInt at 0x%x>' % (uid(self),)
# ____________________________________________________________
More information about the Pypy-commit
mailing list