[pypy-svn] pypy default: When forcing a virtual that is actually an immutable struct, if it
arigo
commits-noreply at bitbucket.org
Fri Apr 8 17:52:59 CEST 2011
Author: Armin Rigo <arigo at tunes.org>
Branch:
Changeset: r43226:0db17e3bbb64
Date: 2011-04-08 17:51 +0200
http://bitbucket.org/pypy/pypy/changeset/0db17e3bbb64/
Log: When forcing a virtual that is actually an immutable struct, if it
contains only constants, then it can become a constant struct.
Should help e.g. to remove W_IntObject(0) that were still malloced
in the operations sent to the backend.
diff --git a/pypy/jit/backend/llgraph/runner.py b/pypy/jit/backend/llgraph/runner.py
--- a/pypy/jit/backend/llgraph/runner.py
+++ b/pypy/jit/backend/llgraph/runner.py
@@ -25,12 +25,13 @@
class Descr(history.AbstractDescr):
def __init__(self, ofs, typeinfo, extrainfo=None, name=None,
- arg_types=None):
+ arg_types=None, count_fields_if_immut=-1):
self.ofs = ofs
self.typeinfo = typeinfo
self.extrainfo = extrainfo
self.name = name
self.arg_types = arg_types
+ self.count_fields_if_immut = count_fields_if_immut
def get_arg_types(self):
return self.arg_types
@@ -63,6 +64,9 @@
def as_vtable_size_descr(self):
return self
+ def count_fields_if_immutable(self):
+ return self.count_fields_if_immut
+
def __lt__(self, other):
raise TypeError("cannot use comparison on Descrs")
def __le__(self, other):
@@ -109,12 +113,14 @@
return False
def getdescr(self, ofs, typeinfo='?', extrainfo=None, name=None,
- arg_types=None):
- key = (ofs, typeinfo, extrainfo, name, arg_types)
+ arg_types=None, count_fields_if_immut=-1):
+ key = (ofs, typeinfo, extrainfo, name, arg_types,
+ count_fields_if_immut)
try:
return self._descrs[key]
except KeyError:
- descr = Descr(ofs, typeinfo, extrainfo, name, arg_types)
+ descr = Descr(ofs, typeinfo, extrainfo, name, arg_types,
+ count_fields_if_immut)
self._descrs[key] = descr
return descr
@@ -284,7 +290,8 @@
def sizeof(self, S):
assert not isinstance(S, lltype.Ptr)
- return self.getdescr(symbolic.get_size(S))
+ count = heaptracker.count_fields_if_immutable(S)
+ return self.getdescr(symbolic.get_size(S), count_fields_if_immut=count)
class LLtypeCPU(BaseCPU):
diff --git a/pypy/jit/metainterp/history.py b/pypy/jit/metainterp/history.py
--- a/pypy/jit/metainterp/history.py
+++ b/pypy/jit/metainterp/history.py
@@ -179,6 +179,9 @@
"""
raise NotImplementedError
+ def count_fields_if_immutable(self):
+ return -1
+
def _clone_if_mutable(self):
return self
def clone_if_mutable(self):
diff --git a/pypy/jit/metainterp/test/test_compile.py b/pypy/jit/metainterp/test/test_compile.py
--- a/pypy/jit/metainterp/test/test_compile.py
+++ b/pypy/jit/metainterp/test/test_compile.py
@@ -86,6 +86,8 @@
metainterp.history = History()
metainterp.history.operations = loop.operations[:]
metainterp.history.inputargs = loop.inputargs[:]
+ cpu._all_size_descrs_with_vtable = (
+ LLtypeMixin.cpu._all_size_descrs_with_vtable)
#
loop_tokens = []
loop_token = compile_new_loop(metainterp, loop_tokens, [], 0, None)
diff --git a/pypy/jit/codewriter/heaptracker.py b/pypy/jit/codewriter/heaptracker.py
--- a/pypy/jit/codewriter/heaptracker.py
+++ b/pypy/jit/codewriter/heaptracker.py
@@ -10,6 +10,30 @@
def int2adr(int):
return llmemory.cast_int_to_adr(int)
+def count_fields_if_immutable(STRUCT):
+ assert isinstance(STRUCT, lltype.GcStruct)
+ if STRUCT._hints.get('immutable', False):
+ try:
+ return _count_fields(STRUCT)
+ except ValueError:
+ pass
+ return -1
+
+def _count_fields(STRUCT):
+ if STRUCT == rclass.OBJECT:
+ return 0 # don't count 'typeptr'
+ result = 0
+ for fieldname, TYPE in STRUCT._flds.items():
+ if TYPE is lltype.Void:
+ pass # ignore Voids
+ elif not isinstance(TYPE, lltype.ContainerType):
+ result += 1
+ elif isinstance(TYPE, lltype.GcStruct):
+ result += _count_fields(TYPE)
+ else:
+ raise ValueError(TYPE)
+ return result
+
# ____________________________________________________________
def has_gcstruct_a_vtable(GCSTRUCT):
diff --git a/pypy/jit/backend/llsupport/test/test_descr.py b/pypy/jit/backend/llsupport/test/test_descr.py
--- a/pypy/jit/backend/llsupport/test/test_descr.py
+++ b/pypy/jit/backend/llsupport/test/test_descr.py
@@ -18,12 +18,33 @@
descr_t = get_size_descr(c0, T)
assert descr_s.size == symbolic.get_size(S, False)
assert descr_t.size == symbolic.get_size(T, False)
+ assert descr_s.count_fields_if_immutable() == -1
+ assert descr_t.count_fields_if_immutable() == -1
assert descr_s == get_size_descr(c0, S)
assert descr_s != get_size_descr(c1, S)
#
descr_s = get_size_descr(c1, S)
assert isinstance(descr_s.size, Symbolic)
+ assert descr_s.count_fields_if_immutable() == -1
+def test_get_size_descr_immut():
+ S = lltype.GcStruct('S', hints={'immutable': True})
+ T = lltype.GcStruct('T', ('parent', S),
+ ('x', lltype.Char),
+ hints={'immutable': True})
+ U = lltype.GcStruct('U', ('parent', T),
+ ('u', lltype.Ptr(T)),
+ ('v', lltype.Signed),
+ hints={'immutable': True})
+ V = lltype.GcStruct('V', ('parent', U),
+ ('miss1', lltype.Void),
+ ('miss2', lltype.Void),
+ hints={'immutable': True})
+ for STRUCT, expected in [(S, 0), (T, 1), (U, 3), (V, 3)]:
+ for translated in [False, True]:
+ c0 = GcCache(translated)
+ descr_s = get_size_descr(c0, STRUCT)
+ assert descr_s.count_fields_if_immutable() == expected
def test_get_field_descr():
U = lltype.Struct('U')
diff --git a/pypy/jit/backend/llsupport/descr.py b/pypy/jit/backend/llsupport/descr.py
--- a/pypy/jit/backend/llsupport/descr.py
+++ b/pypy/jit/backend/llsupport/descr.py
@@ -43,9 +43,14 @@
class SizeDescr(AbstractDescr):
size = 0 # help translation
+ is_immutable = False
- def __init__(self, size):
+ def __init__(self, size, count_fields_if_immut=-1):
self.size = size
+ self.count_fields_if_immut = count_fields_if_immut
+
+ def count_fields_if_immutable(self):
+ return self.count_fields_if_immut
def repr_of_descr(self):
return '<SizeDescr %s>' % self.size
@@ -62,15 +67,15 @@
return cache[STRUCT]
except KeyError:
size = symbolic.get_size(STRUCT, gccache.translate_support_code)
+ count_fields_if_immut = heaptracker.count_fields_if_immutable(STRUCT)
if heaptracker.has_gcstruct_a_vtable(STRUCT):
- sizedescr = SizeDescrWithVTable(size)
+ sizedescr = SizeDescrWithVTable(size, count_fields_if_immut)
else:
- sizedescr = SizeDescr(size)
+ sizedescr = SizeDescr(size, count_fields_if_immut)
gccache.init_size_descr(STRUCT, sizedescr)
cache[STRUCT] = sizedescr
return sizedescr
-
# ____________________________________________________________
# FieldDescrs
diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py
--- a/pypy/jit/metainterp/optimizeopt/optimizer.py
+++ b/pypy/jit/metainterp/optimizeopt/optimizer.py
@@ -514,12 +514,10 @@
break
else:
# all constant arguments: constant-fold away
- argboxes = [self.get_constant_box(op.getarg(i))
- for i in range(op.numargs())]
- resbox = execute_nonspec(self.cpu, None,
- op.getopnum(), argboxes, op.getdescr())
- # FIXME: Don't we need to check for an overflow here?
- self.make_constant(op.result, resbox.constbox())
+ resbox = self.constant_fold(op)
+ # note that INT_xxx_OVF is not done from here, and the
+ # overflows in the INT_xxx operations are ignored
+ self.make_constant(op.result, resbox)
return
# did we do the exact same operation already?
@@ -538,6 +536,13 @@
if nextop:
self.emit_operation(nextop)
+ def constant_fold(self, op):
+ argboxes = [self.get_constant_box(op.getarg(i))
+ for i in range(op.numargs())]
+ resbox = execute_nonspec(self.cpu, None,
+ op.getopnum(), argboxes, op.getdescr())
+ return resbox.constbox()
+
#def optimize_GUARD_NO_OVERFLOW(self, op):
# # otherwise the default optimizer will clear fields, which is unwanted
# # in this case
diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py
--- a/pypy/jit/metainterp/test/test_optimizeopt.py
+++ b/pypy/jit/metainterp/test/test_optimizeopt.py
@@ -4960,6 +4960,58 @@
p2 = new_with_vtable(ConstClass(node_vtable))
setfield_gc(p2, i1, descr=nextdescr)
"""
+ py.test.skip("no test here")
+
+ def test_immutable_not(self):
+ ops = """
+ []
+ p0 = new_with_vtable(ConstClass(intobj_noimmut_vtable))
+ setfield_gc(p0, 42, descr=noimmut_intval)
+ escape(p0)
+ jump()
+ """
+ self.optimize_loop(ops, ops)
+
+ def test_immutable_variable(self):
+ ops = """
+ [i0]
+ p0 = new_with_vtable(ConstClass(intobj_immut_vtable))
+ setfield_gc(p0, i0, descr=immut_intval)
+ escape(p0)
+ jump(i0)
+ """
+ self.optimize_loop(ops, ops)
+
+ def test_immutable_incomplete(self):
+ ops = """
+ []
+ p0 = new_with_vtable(ConstClass(intobj_immut_vtable))
+ escape(p0)
+ jump()
+ """
+ self.optimize_loop(ops, ops)
+
+ def test_immutable_constantfold(self):
+ ops = """
+ []
+ p0 = new_with_vtable(ConstClass(intobj_immut_vtable))
+ setfield_gc(p0, 1242, descr=immut_intval)
+ escape(p0)
+ jump()
+ """
+ from pypy.rpython.lltypesystem import lltype, llmemory
+ class IntObj1242(object):
+ _TYPE = llmemory.GCREF.TO
+ def __eq__(self, other):
+ return other.container.intval == 1242
+ self.namespace['intobj1242'] = lltype._ptr(llmemory.GCREF,
+ IntObj1242())
+ expected = """
+ []
+ escape(ConstPtr(intobj1242))
+ jump()
+ """
+ self.optimize_loop(ops, expected)
# ----------
def optimize_strunicode_loop(self, ops, optops, preamble=None):
diff --git a/pypy/jit/metainterp/test/test_optimizeutil.py b/pypy/jit/metainterp/test/test_optimizeutil.py
--- a/pypy/jit/metainterp/test/test_optimizeutil.py
+++ b/pypy/jit/metainterp/test/test_optimizeutil.py
@@ -68,6 +68,16 @@
nodeobjvalue = lltype.cast_opaque_ptr(llmemory.GCREF, nodeobj)
refdescr = cpu.fielddescrof(NODEOBJ, 'ref')
+ INTOBJ_NOIMMUT = lltype.GcStruct('INTOBJ_NOIMMUT', ('parent', OBJECT),
+ ('intval', lltype.Signed))
+ INTOBJ_IMMUT = lltype.GcStruct('INTOBJ_IMMUT', ('parent', OBJECT),
+ ('intval', lltype.Signed),
+ hints={'immutable': True})
+ intobj_noimmut_vtable = lltype.malloc(OBJECT_VTABLE, immortal=True)
+ intobj_immut_vtable = lltype.malloc(OBJECT_VTABLE, immortal=True)
+ noimmut_intval = cpu.fielddescrof(INTOBJ_NOIMMUT, 'intval')
+ immut_intval = cpu.fielddescrof(INTOBJ_IMMUT, 'intval')
+
arraydescr = cpu.arraydescrof(lltype.GcArray(lltype.Signed))
floatarraydescr = cpu.arraydescrof(lltype.GcArray(lltype.Float))
@@ -155,6 +165,8 @@
register_known_gctype(cpu, node_vtable2, NODE2)
register_known_gctype(cpu, u_vtable, U)
register_known_gctype(cpu, jit_virtual_ref_vtable,vrefinfo.JIT_VIRTUAL_REF)
+ register_known_gctype(cpu, intobj_noimmut_vtable, INTOBJ_NOIMMUT)
+ register_known_gctype(cpu, intobj_immut_vtable, INTOBJ_IMMUT)
namespace = locals()
diff --git a/pypy/jit/metainterp/optimizeopt/virtualize.py b/pypy/jit/metainterp/optimizeopt/virtualize.py
--- a/pypy/jit/metainterp/optimizeopt/virtualize.py
+++ b/pypy/jit/metainterp/optimizeopt/virtualize.py
@@ -4,6 +4,8 @@
from pypy.jit.metainterp.optimizeutil import descrlist_dict
from pypy.rlib.objectmodel import we_are_translated
from pypy.jit.metainterp.optimizeopt import optimizer
+from pypy.jit.metainterp.executor import execute
+from pypy.jit.codewriter.heaptracker import vtable2descr
class AbstractVirtualValue(optimizer.OptValue):
@@ -72,28 +74,53 @@
assert isinstance(fieldvalue, optimizer.OptValue)
self._fields[ofs] = fieldvalue
+ def _get_descr(self):
+ raise NotImplementedError
+
+ def _is_immutable_and_filled_with_constants(self):
+ count = self._get_descr().count_fields_if_immutable()
+ if count != len(self._fields): # always the case if count == -1
+ return False
+ for value in self._fields.itervalues():
+ subbox = value.force_box()
+ if not isinstance(subbox, Const):
+ return False
+ return True
+
def _really_force(self):
- assert self.source_op is not None
+ op = self.source_op
+ assert op is not None
# ^^^ This case should not occur any more (see test_bug_3).
#
if not we_are_translated():
- self.source_op.name = 'FORCE ' + self.source_op.name
- newoperations = self.optimizer.newoperations
- newoperations.append(self.source_op)
- self.box = box = self.source_op.result
- #
- iteritems = self._fields.iteritems()
- if not we_are_translated(): #random order is fine, except for tests
- iteritems = list(iteritems)
- iteritems.sort(key = lambda (x,y): x.sort_key())
- for ofs, value in iteritems:
- if value.is_null():
- continue
- subbox = value.force_box()
- op = ResOperation(rop.SETFIELD_GC, [box, subbox], None,
- descr=ofs)
+ op.name = 'FORCE ' + self.source_op.name
+
+ if self._is_immutable_and_filled_with_constants():
+ box = self.optimizer.constant_fold(op)
+ self.make_constant(box)
+ for ofs, value in self._fields.iteritems():
+ subbox = value.force_box()
+ assert isinstance(subbox, Const)
+ execute(self.optimizer.cpu, None, rop.SETFIELD_GC,
+ ofs, box, subbox)
+ # keep self._fields, because it's all immutable anyway
+ else:
+ newoperations = self.optimizer.newoperations
newoperations.append(op)
- self._fields = None
+ self.box = box = op.result
+ #
+ iteritems = self._fields.iteritems()
+ if not we_are_translated(): #random order is fine, except for tests
+ iteritems = list(iteritems)
+ iteritems.sort(key = lambda (x,y): x.sort_key())
+ for ofs, value in iteritems:
+ if value.is_null():
+ continue
+ subbox = value.force_box()
+ op = ResOperation(rop.SETFIELD_GC, [box, subbox], None,
+ descr=ofs)
+ newoperations.append(op)
+ self._fields = None
def _get_field_descr_list(self):
_cached_sorted_fields = self._cached_sorted_fields
@@ -168,6 +195,9 @@
fielddescrs = self._get_field_descr_list()
return modifier.make_virtual(self.known_class, fielddescrs)
+ def _get_descr(self):
+ return vtable2descr(self.optimizer.cpu, self.known_class.getint())
+
def __repr__(self):
cls_name = self.known_class.value.adr.ptr._obj._TYPE._name
if self._fields is None:
@@ -185,6 +215,9 @@
fielddescrs = self._get_field_descr_list()
return modifier.make_vstruct(self.structdescr, fielddescrs)
+ def _get_descr(self):
+ return self.structdescr
+
class VArrayValue(AbstractVirtualValue):
def __init__(self, optimizer, arraydescr, size, keybox, source_op=None):
More information about the Pypy-commit
mailing list