[Python-checkins] CVS: python/dist/src/Tools/compiler/compiler pyassem.py,1.19.2.1,1.19.2.2
Jeremy Hylton
jhylton@users.sourceforge.net
Mon, 17 Dec 2001 15:53:13 -0800
Update of /cvsroot/python/python/dist/src/Tools/compiler/compiler
In directory usw-pr-cvs1:/tmp/cvs-serv28544
Modified Files:
Tag: release21-maint
pyassem.py
Log Message:
Backport bugfixes since rev 1.19 on the trunk.
Brief summary:
Improve stack depth calculation, 1.24, 1.25, 1.28
Wrong co_lntob, 1.20.
XXX_NMAE ops should affect varnames, 1.21.
Fix list comp code gen, 1.26.
Fix _convert_NAME() for class bodies, 1.26.
Index: pyassem.py
===================================================================
RCS file: /cvsroot/python/python/dist/src/Tools/compiler/compiler/Attic/pyassem.py,v
retrieving revision 1.19.2.1
retrieving revision 1.19.2.2
diff -C2 -d -r1.19.2.1 -r1.19.2.2
*** pyassem.py 2001/06/27 14:03:30 1.19.2.1
--- pyassem.py 2001/12/17 23:53:10 1.19.2.2
***************
*** 1,4 ****
--- 1,6 ----
"""A flow graph representation for Python bytecode"""
+ from __future__ import nested_scopes
+
import dis
import new
***************
*** 8,11 ****
--- 10,15 ----
from compiler import misc
+ from compiler.consts import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, \
+ CO_VARKEYWORDS
def xxx_sort(l):
***************
*** 54,58 ****
# however. :-( If a client needs to remove these edges, call
# pruneEdges().
!
self.current.addNext(block)
self.startBlock(block)
--- 58,62 ----
# however. :-( If a client needs to remove these edges, call
# pruneEdges().
!
self.current.addNext(block)
self.startBlock(block)
***************
*** 109,113 ****
# XXX This is a total mess. There must be a better way to get
# the code blocks in the right order.
!
self.fixupOrderHonorNext(blocks, default_next)
self.fixupOrderForward(blocks, default_next)
--- 113,117 ----
# XXX This is a total mess. There must be a better way to get
# the code blocks in the right order.
!
self.fixupOrderHonorNext(blocks, default_next)
self.fixupOrderForward(blocks, default_next)
***************
*** 115,119 ****
def fixupOrderHonorNext(self, blocks, default_next):
"""Fix one problem with DFS.
!
The DFS uses child block, but doesn't know about the special
"next" block. As a result, the DFS can order blocks so that a
--- 119,123 ----
def fixupOrderHonorNext(self, blocks, default_next):
"""Fix one problem with DFS.
!
The DFS uses child block, but doesn't know about the special
"next" block. As a result, the DFS can order blocks so that a
***************
*** 195,204 ****
chains.insert(goes_before, c)
-
del blocks[:]
for c in chains:
for b in c:
blocks.append(b)
!
def getBlocks(self):
return self.blocks.elements()
--- 199,207 ----
chains.insert(goes_before, c)
del blocks[:]
for c in chains:
for b in c:
blocks.append(b)
!
def getBlocks(self):
return self.blocks.elements()
***************
*** 207,211 ****
"""Return nodes appropriate for use with dominator"""
return self.entry
!
def getContainedGraphs(self):
l = []
--- 210,214 ----
"""Return nodes appropriate for use with dominator"""
return self.entry
!
def getContainedGraphs(self):
l = []
***************
*** 246,250 ****
insts = map(str, self.insts)
return "<block %s %d:\n%s>" % (self.label, self.bid,
! string.join(insts, '\n'))
def emit(self, inst):
--- 249,253 ----
insts = map(str, self.insts)
return "<block %s %d:\n%s>" % (self.label, self.bid,
! string.join(insts, '\n'))
def emit(self, inst):
***************
*** 268,272 ****
_uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS',
! 'JUMP_ABSOLUTE', 'JUMP_FORWARD')
def pruneNext(self):
--- 271,275 ----
_uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS',
! 'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP')
def pruneNext(self):
***************
*** 312,320 ****
# flags for code objects
- CO_OPTIMIZED = 0x0001
- CO_NEWLOCALS = 0x0002
- CO_VARARGS = 0x0004
- CO_VARKEYWORDS = 0x0008
- CO_NESTED = 0x0010
# the FlowGraph is transformed in place; it exists in one of these states
--- 315,318 ----
***************
*** 327,339 ****
super_init = FlowGraph.__init__
! def __init__(self, name, filename, args=(), optimized=0):
self.super_init()
self.name = name
self.filename = filename
self.docstring = None
self.args = args # XXX
self.argcount = getArgCount(args)
if optimized:
! self.flags = CO_OPTIMIZED | CO_NEWLOCALS
else:
self.flags = 0
--- 325,339 ----
super_init = FlowGraph.__init__
! def __init__(self, name, filename, args=(), optimized=0, klass=None):
self.super_init()
self.name = name
+ assert isinstance(filename, types.StringType)
self.filename = filename
self.docstring = None
self.args = args # XXX
self.argcount = getArgCount(args)
+ self.klass = klass
if optimized:
! self.flags = CO_OPTIMIZED | CO_NEWLOCALS
else:
self.flags = 0
***************
*** 364,367 ****
--- 364,371 ----
self.argcount = self.argcount - 1
+ def checkFlag(self, flag):
+ if self.flags & flag:
+ return 1
+
def setFreeVars(self, names):
self.freevars = list(names)
***************
*** 373,376 ****
--- 377,381 ----
"""Get a Python code object"""
if self.stage == RAW:
+ self.computeStackDepth()
self.flattenGraph()
if self.stage == FLAT:
***************
*** 400,403 ****
--- 405,438 ----
sys.stdout = save
+ def computeStackDepth(self):
+ """Compute the max stack depth.
+
+ Approach is to compute the stack effect of each basic block.
+ Then find the path through the code with the largest total
+ effect.
+ """
+ depth = {}
+ exit = None
+ for b in self.getBlocks():
+ depth[b] = findDepth(b.getInstructions())
+
+ seen = {}
+
+ def max_depth(b, d):
+ if seen.has_key(b):
+ return d
+ seen[b] = 1
+ d = d + depth[b]
+ children = b.get_children()
+ if children:
+ return max([max_depth(c, d) for c in children])
+ else:
+ if not b.label == "exit":
+ return max_depth(self.exit, d)
+ else:
+ return d
+
+ self.stacksize = max_depth(self.entry, 0)
+
def flattenGraph(self):
"""Arrange the blocks in order and resolve jumps"""
***************
*** 431,435 ****
elif self.hasjabs.has_elt(opname):
insts[i] = opname, begin[inst[1]]
- self.stacksize = findDepth(self.insts)
self.stage = FLAT
--- 466,469 ----
***************
*** 449,454 ****
t = self.insts[i]
if len(t) == 2:
! opname = t[0]
! oparg = t[1]
conv = self._converters.get(opname, None)
if conv:
--- 483,487 ----
t = self.insts[i]
if len(t) == 2:
! opname, oparg = t
conv = self._converters.get(opname, None)
if conv:
***************
*** 470,477 ****
def _lookupName(self, name, list):
! """Return index of name in list, appending if necessary"""
t = type(name)
for i in range(len(list)):
- # must do a comparison on type first to prevent UnicodeErrors
if t == type(list[i]) and list[i] == name:
return i
--- 503,516 ----
def _lookupName(self, name, list):
! """Return index of name in list, appending if necessary
!
! This routine uses a list instead of a dictionary, because a
! dictionary can't store two different keys if the keys have the
! same value but different types, e.g. 2 and 2L. The compiler
! must treat these two separately, so it does an explicit type
! comparison before comparing the values.
! """
t = type(name)
for i in range(len(list)):
if t == type(list[i]) and list[i] == name:
return i
***************
*** 492,498 ****
_convert_DELETE_FAST = _convert_LOAD_FAST
def _convert_NAME(self, arg):
return self._lookupName(arg, self.names)
- _convert_LOAD_NAME = _convert_NAME
_convert_STORE_NAME = _convert_NAME
_convert_DELETE_NAME = _convert_NAME
--- 531,543 ----
_convert_DELETE_FAST = _convert_LOAD_FAST
+ def _convert_LOAD_NAME(self, arg):
+ if self.klass is None:
+ self._lookupName(arg, self.varnames)
+ return self._lookupName(arg, self.names)
+
def _convert_NAME(self, arg):
+ if self.klass is None:
+ self._lookupName(arg, self.varnames)
return self._lookupName(arg, self.names)
_convert_STORE_NAME = _convert_NAME
_convert_DELETE_NAME = _convert_NAME
***************
*** 526,530 ****
if name[:9] == "_convert_":
opname = name[9:]
! _converters[opname] = obj
del name, obj, opname
--- 571,575 ----
if name[:9] == "_convert_":
opname = name[9:]
! _converters[opname] = obj
del name, obj, opname
***************
*** 556,560 ****
def newCodeObject(self):
assert self.stage == DONE
! if self.flags == 0:
nlocals = 0
else:
--- 601,605 ----
def newCodeObject(self):
assert self.stage == DONE
! if (self.flags & CO_NEWLOCALS) == 0:
nlocals = 0
else:
***************
*** 563,566 ****
--- 608,612 ----
if self.flags & CO_VARKEYWORDS:
argcount = argcount - 1
+
return new.code(argcount, nlocals, self.stacksize, self.flags,
self.lnotab.getCode(), self.getConsts(),
***************
*** 582,586 ****
l.append(elt)
return tuple(l)
!
def isJump(opname):
if opname[:4] == 'JUMP':
--- 628,632 ----
l.append(elt)
return tuple(l)
!
def isJump(opname):
if opname[:4] == 'JUMP':
***************
*** 676,699 ****
def getTable(self):
return string.join(map(chr, self.lnotab), '')
!
class StackDepthTracker:
# XXX 1. need to keep track of stack depth on jumps
# XXX 2. at least partly as a result, this code is broken
! def findDepth(self, insts):
depth = 0
maxDepth = 0
for i in insts:
opname = i[0]
! delta = self.effect.get(opname, 0)
! if delta > 1:
! depth = depth + delta
! elif delta < 0:
! if depth > maxDepth:
! maxDepth = depth
depth = depth + delta
else:
- if depth > maxDepth:
- maxDepth = depth
# now check patterns
for pat, pat_delta in self.patterns:
--- 722,741 ----
def getTable(self):
return string.join(map(chr, self.lnotab), '')
!
class StackDepthTracker:
# XXX 1. need to keep track of stack depth on jumps
# XXX 2. at least partly as a result, this code is broken
! def findDepth(self, insts, debug=0):
depth = 0
maxDepth = 0
for i in insts:
opname = i[0]
! if debug:
! print i,
! delta = self.effect.get(opname, None)
! if delta is not None:
depth = depth + delta
else:
# now check patterns
for pat, pat_delta in self.patterns:
***************
*** 703,712 ****
break
# if we still haven't found a match
! if delta == 0:
meth = getattr(self, opname, None)
if meth is not None:
depth = depth + meth(i[1])
! if depth < 0:
! depth = 0
return maxDepth
--- 745,756 ----
break
# if we still haven't found a match
! if delta is None:
meth = getattr(self, opname, None)
if meth is not None:
depth = depth + meth(i[1])
! if depth > maxDepth:
! maxDepth = depth
! if debug:
! print depth, maxDepth
return maxDepth
***************
*** 729,735 ****
# PRINT_EXPR?
'PRINT_ITEM': -1,
- 'LOAD_LOCALS': 1,
'RETURN_VALUE': -1,
! 'EXEC_STMT': -2,
'BUILD_CLASS': -2,
'STORE_NAME': -1,
--- 773,778 ----
# PRINT_EXPR?
'PRINT_ITEM': -1,
'RETURN_VALUE': -1,
! 'EXEC_STMT': -3,
'BUILD_CLASS': -2,
'STORE_NAME': -1,
***************
*** 743,746 ****
--- 786,794 ----
'IMPORT_NAME': 0,
'IMPORT_FROM': 1,
+ 'LOAD_ATTR': 0, # unlike other loads
+ # close enough...
+ 'SETUP_EXCEPT': 3,
+ 'SETUP_FINALLY': 3,
+ 'FOR_ITER': 1,
}
# use pattern match
***************
*** 749,773 ****
('LOAD_', 1),
]
!
! # special cases:
! # UNPACK_SEQUENCE, BUILD_TUPLE,
! # BUILD_LIST, CALL_FUNCTION, MAKE_FUNCTION, BUILD_SLICE
def UNPACK_SEQUENCE(self, count):
! return count
def BUILD_TUPLE(self, count):
! return -count
def BUILD_LIST(self, count):
! return -count
def CALL_FUNCTION(self, argc):
hi, lo = divmod(argc, 256)
! return lo + hi * 2
def CALL_FUNCTION_VAR(self, argc):
! return self.CALL_FUNCTION(argc)+1
def CALL_FUNCTION_KW(self, argc):
! return self.CALL_FUNCTION(argc)+1
def CALL_FUNCTION_VAR_KW(self, argc):
! return self.CALL_FUNCTION(argc)+2
def MAKE_FUNCTION(self, argc):
return -argc
def BUILD_SLICE(self, argc):
if argc == 2:
--- 797,821 ----
('LOAD_', 1),
]
!
def UNPACK_SEQUENCE(self, count):
! return count-1
def BUILD_TUPLE(self, count):
! return -count+1
def BUILD_LIST(self, count):
! return -count+1
def CALL_FUNCTION(self, argc):
hi, lo = divmod(argc, 256)
! return -(lo + hi * 2)
def CALL_FUNCTION_VAR(self, argc):
! return self.CALL_FUNCTION(argc)-1
def CALL_FUNCTION_KW(self, argc):
! return self.CALL_FUNCTION(argc)-1
def CALL_FUNCTION_VAR_KW(self, argc):
! return self.CALL_FUNCTION(argc)-2
def MAKE_FUNCTION(self, argc):
return -argc
+ def MAKE_CLOSURE(self, argc):
+ # XXX need to account for free variables too!
+ return -argc
def BUILD_SLICE(self, argc):
if argc == 2:
***************
*** 775,778 ****
elif argc == 3:
return -2
!
findDepth = StackDepthTracker().findDepth
--- 823,828 ----
elif argc == 3:
return -2
! def DUP_TOPX(self, argc):
! return argc
!
findDepth = StackDepthTracker().findDepth