[pypy-svn] r78940 - in pypy/branch/jit-unroll-loops/pypy/jit/metainterp: optimizeopt test

hakanardo at codespeak.net hakanardo at codespeak.net
Tue Nov 9 22:00:10 CET 2010


Author: hakanardo
Date: Tue Nov  9 22:00:06 2010
New Revision: 78940

Modified:
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/__init__.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/fficall.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/heap.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/intbounds.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/optimizer.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/rewrite.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/string.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/unroll.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/virtualize.py
   pypy/branch/jit-unroll-loops/pypy/jit/metainterp/test/test_optimizeopt.py
Log:
Recreate the optimizer after the preamble to get full controll over what status information probagates fron the preamble to the loop.

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/__init__.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/__init__.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/__init__.py	Tue Nov  9 22:00:06 2010
@@ -5,7 +5,7 @@
 from pypy.jit.metainterp.optimizeopt.heap import OptHeap
 from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall
 from pypy.jit.metainterp.optimizeopt.string import OptString
-from pypy.jit.metainterp.optimizeopt.unroll import OptUnroll
+from pypy.jit.metainterp.optimizeopt.unroll import optimize_unroll
 
 def optimize_loop_1(metainterp_sd, loop, unroll=True):
     """Optimize loop.operations to remove internal overheadish operations. 
@@ -19,11 +19,12 @@
                      OptFfiCall(),
                     ]
     if unroll:
-        optimizations.insert(0, OptUnroll())
         opt_str.enabled = False # FIXME: Workaround to disable string optimisation
                                 # during preamble but to keep it during the loop
-    optimizer = Optimizer(metainterp_sd, loop, optimizations)
-    optimizer.propagate_all_forward()
+        optimize_unroll(metainterp_sd, loop, optimizations)
+    else:
+        optimizer = Optimizer(metainterp_sd, loop, optimizations)
+        optimizer.propagate_all_forward()
 
 def optimize_bridge_1(metainterp_sd, bridge):
     """The same, but for a bridge. """

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/fficall.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/fficall.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/fficall.py	Tue Nov  9 22:00:06 2010
@@ -67,6 +67,10 @@
     def __init__(self):
         self.funcinfo = None
 
+    def reconstruct_for_next_iteration(self):
+        return OptFfiCall()
+        # FIXME: Should any status be saved for next iteration?
+
     def begin_optimization(self, funcval, op):
         self.rollback_maybe()
         self.funcinfo = FuncInfo(funcval, self.optimizer.cpu, op)

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/heap.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/heap.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/heap.py	Tue Nov  9 22:00:06 2010
@@ -23,6 +23,15 @@
         self.lazy_setfields = {}
         self.lazy_setfields_descrs = []     # keys (at least) of previous dict
 
+    def reconstruct_for_next_iteration(self):
+        self.force_all_lazy_setfields()
+        assert not self.lazy_setfields_descrs
+        assert not self.lazy_setfields
+        new = OptHeap()
+        new.cached_fields = self.cached_fields
+        new.cached_arrayitems = self.cached_arrayitems
+        return new
+
     def clean_caches(self):
         self.cached_fields.clear()
         self.cached_arrayitems.clear()
@@ -149,9 +158,6 @@
         self.clean_caches()
 
 
-    def force_at_end_of_preamble(self):
-        self.force_all_lazy_setfields()
-
     def turned_constant(self, value):
         assert value.is_constant()
         newvalue = self.getvalue(value.box)

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/intbounds.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/intbounds.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/intbounds.py	Tue Nov  9 22:00:06 2010
@@ -13,6 +13,10 @@
         self.posponedop = None
         self.nextop = None
 
+    def reconstruct_for_next_iteration(self):
+        assert self.posponedop is None
+        return self 
+
     def propagate_forward(self, op):
         if op.is_ovf():
             self.posponedop = op

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/optimizer.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/optimizer.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/optimizer.py	Tue Nov  9 22:00:06 2010
@@ -199,6 +199,11 @@
     def turned_constant(self, value):
         pass
 
+    def reconstruct_for_next_iteration(self):
+        #return self.__class__()
+        raise NotImplementedError
+    
+
 class Optimizer(Optimization):
 
     def __init__(self, metainterp_sd, loop, optimizations=None):
@@ -214,6 +219,8 @@
         self.producer = {}
         self.pendingfields = []
         self.posponedop = None
+        self.exception_might_have_happened = False
+        self.newoperations = []
 
         if optimizations:
             self.first_optimization = optimizations[0]
@@ -233,6 +240,28 @@
         self.resumedata_memo = resume.ResumeDataLoopMemo(self.metainterp_sd)
         for o in self.optimizations:
             o.force_at_end_of_preamble()
+            
+    def reconstruct_for_next_iteration(self):
+        optimizations = [o.reconstruct_for_next_iteration() for o in 
+                         self.optimizations]
+        optimizations = self.optimizations
+        new = Optimizer(self.metainterp_sd, self.loop, optimizations)
+        new.values = self.values
+        new.interned_refs = self.interned_refs
+        new.bool_boxes = self.bool_boxes
+        new.loop_invariant_results = self.loop_invariant_results
+        new.pure_operations = self.pure_operations
+        new.producer = self.producer
+        assert self.posponedop is None
+
+        # FIXME: HACK!! Add a reconstruct_for_next_iteration to
+        # the values instead and reconstruct them in the same manner.
+        # That should also give us a clean solution for enabling
+        # OptString in the preamble that forces it's virtuals before
+        # the loop
+        for v in new.values.values(): 
+            v.optimizer = new
+        return new
 
     def turned_constant(self, value):
         for o in self.optimizations:
@@ -322,8 +351,6 @@
             return CVAL_ZERO
 
     def propagate_all_forward(self):
-        self.exception_might_have_happened = False
-        self.newoperations = []
         self.i = 0
         while self.i < len(self.loop.operations):
             op = self.loop.operations[self.i]

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/rewrite.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/rewrite.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/rewrite.py	Tue Nov  9 22:00:06 2010
@@ -9,6 +9,9 @@
     """Rewrite operations into equivalent, cheaper operations.
        This includes already executed operations and constants.
     """
+
+    def reconstruct_for_next_iteration(self):
+        return self
     
     def propagate_forward(self, op):
         args = self.optimizer.make_args_key(op)

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/string.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/string.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/string.py	Tue Nov  9 22:00:06 2010
@@ -366,6 +366,10 @@
 class OptString(optimizer.Optimization):
     "Handling of strings and unicodes."
     enabled = True
+
+    def reconstruct_for_next_iteration(self):
+        self.enabled = True
+        return self
     
     def make_vstring_plain(self, box, source_op, mode):
         vvalue = VStringPlainValue(self.optimizer, box, source_op, mode)
@@ -656,8 +660,6 @@
         else:
             self.emit_operation(op)
 
-    def force_at_end_of_preamble(self):
-        self.enabled = True
 
 optimize_ops = _findall(OptString, 'optimize_')
 

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/unroll.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/unroll.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/unroll.py	Tue Nov  9 22:00:06 2010
@@ -3,37 +3,51 @@
 from pypy.jit.metainterp.compile import ResumeGuardDescr
 from pypy.jit.metainterp.resume import Snapshot
 
-class OptUnroll(Optimization):
+# FXIME: Introduce some VirtualOptimizer super class instead
+
+def optimize_unroll(metainterp_sd, loop, optimizations):
+    opt = UnrollOptimizer(metainterp_sd, loop, optimizations)
+    opt.propagate_all_forward()
+
+class UnrollOptimizer(Optimization):
     """Unroll the loop into two iterations. The first one will
     become the preamble or entry bridge (don't think there is a
     distinction anymore)"""
     
-    def setup(self):
+    def __init__(self, metainterp_sd, loop, optimizations):
+        self.optimizer = Optimizer(metainterp_sd, loop, optimizations)
         self.cloned_operations = []
         for op in self.optimizer.loop.operations:
             self.cloned_operations.append(op.clone())
         
             
-    def propagate_forward(self, op):
+    def propagate_all_forward(self):
+        loop = self.optimizer.loop
+        jumpop = loop.operations[-1]
+        if jumpop.getopnum() == rop.JUMP:
+            loop.operations = loop.operations[:-1]
+        else:
+            loopop = None
+
+        self.optimizer.propagate_all_forward()
+
 
-        if op.getopnum() == rop.JUMP:
-            self.optimizer.force_at_end_of_preamble()
-            loop = self.optimizer.loop
-            assert op.getdescr() is loop.token
+        if jumpop:
+            assert jumpop.getdescr() is loop.token
             loop.preamble.operations = self.optimizer.newoperations
-            self.optimizer.newoperations = []
-            jump_args = op.getarglist()
-            op.initarglist([])
-            # Exceptions not caught in one iteration should not propagate to the next
-            self.optimizer.exception_might_have_happened = False
+
+            self.optimizer = self.optimizer.reconstruct_for_next_iteration()
+
+            jump_args = jumpop.getarglist()
+            jumpop.initarglist([])            
             inputargs = self.inline(self.cloned_operations,
                                     loop.inputargs, jump_args)
             loop.inputargs = inputargs
             jmp = ResOperation(rop.JUMP, loop.inputargs[:], None)
             jmp.setdescr(loop.token)
             loop.preamble.operations.append(jmp)
-        else:
-            self.emit_operation(op)
+
+            loop.operations = self.optimizer.newoperations
 
     def inline(self, loop_operations, loop_args, jump_args):
         self.argmap = argmap = {}
@@ -62,6 +76,7 @@
             else:
                 args = newop.getarglist()
             newop.initarglist([self.inline_arg(a) for a in args])
+            #print 'P:', newop
             
             if newop.result:
                 old_result = newop.result
@@ -71,8 +86,8 @@
             descr = newop.getdescr()
             if isinstance(descr, ResumeGuardDescr):
                 descr.rd_snapshot = self.inline_snapshot(descr.rd_snapshot)
-                
-            self.emit_operation(newop)
+
+            self.optimizer.first_optimization.propagate_forward(newop)
 
         # Remove jump to make sure forced code are placed before it
         newoperations = self.optimizer.newoperations
@@ -83,7 +98,7 @@
         boxes_created_this_iteration = {}
         jumpargs = jmp.getarglist()
 
-        # FIXME: Should also loop over operations added by forcing things in this loop 
+        # FIXME: Should also loop over operations added by forcing things in this loop
         for op in newoperations: 
             #print 'E: ', str(op)
             boxes_created_this_iteration[op.result] = True

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/virtualize.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/virtualize.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/optimizeopt/virtualize.py	Tue Nov  9 22:00:06 2010
@@ -228,6 +228,9 @@
 class OptVirtualize(optimizer.Optimization):
     "Virtualize objects until they escape."
 
+    def reconstruct_for_next_iteration(self):
+        return self
+
     def make_virtual(self, known_class, box, source_op=None):
         vvalue = VirtualValue(self.optimizer, known_class, box, source_op)
         self.make_equal_to(box, vvalue)

Modified: pypy/branch/jit-unroll-loops/pypy/jit/metainterp/test/test_optimizeopt.py
==============================================================================
--- pypy/branch/jit-unroll-loops/pypy/jit/metainterp/test/test_optimizeopt.py	(original)
+++ pypy/branch/jit-unroll-loops/pypy/jit/metainterp/test/test_optimizeopt.py	Tue Nov  9 22:00:06 2010
@@ -3994,6 +3994,15 @@
         jump(p0, p1, p3, p5, p7, p8, p14, 1)
         """
         self.optimize_loop(ops, expected)
+
+    def test_inputargs_added_by_forcing_jumpargs(self):
+        # FXIME: Can this occur?
+        ops = """
+        [p0, p1, pinv]
+        i1 = getfield_gc(pinv, descr=valuedescr)
+        p2 = new_with_vtable(ConstClass(node_vtable))
+        setfield_gc(p2, i1, descr=nextdescr)
+        """
         
     # ----------
     def optimize_strunicode_loop(self, ops, optops, preamble=None):



More information about the Pypy-commit mailing list