[pypy-svn] r77826 - in pypy/branch/jitffi/pypy: interpreter/pyparser jit/backend jit/backend/x86/test jit/metainterp jit/metainterp/test jit/tl rpython/memory/gc
antocuni at codespeak.net
antocuni at codespeak.net
Tue Oct 12 13:41:53 CEST 2010
Author: antocuni
Date: Tue Oct 12 13:41:51 2010
New Revision: 77826
Added:
pypy/branch/jitffi/pypy/interpreter/pyparser/autopath.py (contents, props changed)
pypy/branch/jitffi/pypy/interpreter/pyparser/genpytokenize.py (contents, props changed)
pypy/branch/jitffi/pypy/interpreter/pyparser/pylexer.py (contents, props changed)
pypy/branch/jitffi/pypy/jit/backend/conftest.py (contents, props changed)
pypy/branch/jitffi/pypy/jit/backend/x86/test/test_string.py (contents, props changed)
pypy/branch/jitffi/pypy/jit/metainterp/optimize_nopspec.py (contents, props changed)
pypy/branch/jitffi/pypy/jit/metainterp/test/test_loop_nopspec.py (contents, props changed)
pypy/branch/jitffi/pypy/jit/tl/jittest.py (contents, props changed)
pypy/branch/jitffi/pypy/rpython/memory/gc/inspector.py (contents, props changed)
Log:
add more files that were lost during the merge :-(
Added: pypy/branch/jitffi/pypy/interpreter/pyparser/autopath.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/interpreter/pyparser/autopath.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,134 @@
+"""
+self cloning, automatic path configuration
+
+copy this into any subdirectory of pypy from which scripts need
+to be run, typically all of the test subdirs.
+The idea is that any such script simply issues
+
+ import autopath
+
+and this will make sure that the parent directory containing "pypy"
+is in sys.path.
+
+If you modify the master "autopath.py" version (in pypy/tool/autopath.py)
+you can directly run it which will copy itself on all autopath.py files
+it finds under the pypy root directory.
+
+This module always provides these attributes:
+
+ pypydir pypy root directory path
+ this_dir directory where this autopath.py resides
+
+"""
+
+def __dirinfo(part):
+ """ return (partdir, this_dir) and insert parent of partdir
+ into sys.path. If the parent directories don't have the part
+ an EnvironmentError is raised."""
+
+ import sys, os
+ try:
+ head = this_dir = os.path.realpath(os.path.dirname(__file__))
+ except NameError:
+ head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
+
+ error = None
+ while head:
+ partdir = head
+ head, tail = os.path.split(head)
+ if tail == part:
+ checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
+ if not os.path.exists(checkfile):
+ error = "Cannot find %r" % (os.path.normpath(checkfile),)
+ break
+ else:
+ error = "Cannot find the parent directory %r of the path %r" % (
+ partdir, this_dir)
+ if not error:
+ # check for bogus end-of-line style (e.g. files checked out on
+ # Windows and moved to Unix)
+ f = open(__file__.replace('.pyc', '.py'), 'r')
+ data = f.read()
+ f.close()
+ if data.endswith('\r\n') or data.endswith('\r'):
+ error = ("Bad end-of-line style in the .py files. Typically "
+ "caused by a zip file or a checkout done on Windows and "
+ "moved to Unix or vice-versa.")
+ if error:
+ raise EnvironmentError("Invalid source tree - bogus checkout! " +
+ error)
+
+ pypy_root = os.path.join(head, '')
+ try:
+ sys.path.remove(head)
+ except ValueError:
+ pass
+ sys.path.insert(0, head)
+
+ munged = {}
+ for name, mod in sys.modules.items():
+ if '.' in name:
+ continue
+ fn = getattr(mod, '__file__', None)
+ if not isinstance(fn, str):
+ continue
+ newname = os.path.splitext(os.path.basename(fn))[0]
+ if not newname.startswith(part + '.'):
+ continue
+ path = os.path.join(os.path.dirname(os.path.realpath(fn)), '')
+ if path.startswith(pypy_root) and newname != part:
+ modpaths = os.path.normpath(path[len(pypy_root):]).split(os.sep)
+ if newname != '__init__':
+ modpaths.append(newname)
+ modpath = '.'.join(modpaths)
+ if modpath not in sys.modules:
+ munged[modpath] = mod
+
+ for name, mod in munged.iteritems():
+ if name not in sys.modules:
+ sys.modules[name] = mod
+ if '.' in name:
+ prename = name[:name.rfind('.')]
+ postname = name[len(prename)+1:]
+ if prename not in sys.modules:
+ __import__(prename)
+ if not hasattr(sys.modules[prename], postname):
+ setattr(sys.modules[prename], postname, mod)
+
+ return partdir, this_dir
+
+def __clone():
+ """ clone master version of autopath.py into all subdirs """
+ from os.path import join, walk
+ if not this_dir.endswith(join('pypy','tool')):
+ raise EnvironmentError("can only clone master version "
+ "'%s'" % join(pypydir, 'tool',_myname))
+
+
+ def sync_walker(arg, dirname, fnames):
+ if _myname in fnames:
+ fn = join(dirname, _myname)
+ f = open(fn, 'rwb+')
+ try:
+ if f.read() == arg:
+ print "checkok", fn
+ else:
+ print "syncing", fn
+ f = open(fn, 'w')
+ f.write(arg)
+ finally:
+ f.close()
+ s = open(join(pypydir, 'tool', _myname), 'rb').read()
+ walk(pypydir, sync_walker, s)
+
+_myname = 'autopath.py'
+
+# set guaranteed attributes
+
+pypydir, this_dir = __dirinfo('pypy')
+import py # note: py is imported only AFTER the path has been set
+libpythondir = str(py.path.local(pypydir).dirpath().join('lib-python', '2.5.2'))
+libpythonmodifieddir = str(py.path.local(libpythondir).dirpath().join('modified-2.5.2'))
+
+if __name__ == '__main__':
+ __clone()
Added: pypy/branch/jitffi/pypy/interpreter/pyparser/genpytokenize.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/interpreter/pyparser/genpytokenize.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,306 @@
+#! /usr/bin/env python
+"""Module genPytokenize
+
+Generates finite state automata for recognizing Python tokens. These are hand
+coded versions of the regular expressions originally appearing in Ping's
+tokenize module in the Python standard library.
+
+When run from the command line, this should pretty print the DFA machinery.
+
+$Id: genPytokenize.py,v 1.1 2003/10/02 17:37:17 jriehl Exp $
+"""
+
+import autopath
+from pypy.interpreter.pyparser.pylexer import *
+from pypy.interpreter.pyparser.automata import NonGreedyDFA, DFA, DEFAULT
+
+def makePyPseudoDFA ():
+ import string
+ states = []
+ # ____________________________________________________________
+ def makeLineCont ():
+ return chain(states,
+ newArcPair(states, "\\"),
+ maybe(states, newArcPair(states, "\r")),
+ newArcPair(states, "\n"))
+ # ____________________________________________________________
+ # Ignore stuff
+ def makeWhitespace ():
+ return any(states, groupStr(states, " \f\t"))
+ # ____________________________________________________________
+ def makeComment ():
+ return chain(states,
+ newArcPair(states, "#"),
+ any(states, notGroupStr(states, "\r\n")))
+ # ____________________________________________________________
+ #ignore = chain(states,
+ # makeWhitespace(),
+ # any(states, chain(states,
+ # makeLineCont(),
+ # makeWhitespace())),
+ # maybe(states, makeComment()))
+ # ____________________________________________________________
+ # Names
+ name = chain(states,
+ groupStr(states, string.letters + "_"),
+ any(states, groupStr(states,
+ string.letters + string.digits + "_")))
+ # ____________________________________________________________
+ # Digits
+ def makeDigits ():
+ return groupStr(states, "0123456789")
+ # ____________________________________________________________
+ # Integer numbers
+ hexNumber = chain(states,
+ newArcPair(states, "0"),
+ groupStr(states, "xX"),
+ any(states, groupStr(states, "0123456789abcdefABCDEF")),
+ maybe(states, groupStr(states, "lL")))
+ octNumber = chain(states,
+ newArcPair(states, "0"),
+ any(states, groupStr(states, "01234567")),
+ maybe(states, groupStr(states, "lL")))
+ decNumber = chain(states,
+ groupStr(states, "123456789"),
+ any(states, makeDigits()),
+ maybe(states, groupStr(states, "lL")))
+ intNumber = group(states, hexNumber, octNumber, decNumber)
+ # ____________________________________________________________
+ # Exponents
+ def makeExp ():
+ return chain(states,
+ groupStr(states, "eE"),
+ maybe(states, groupStr(states, "+-")),
+ atleastonce(states, makeDigits()))
+ # ____________________________________________________________
+ # Floating point numbers
+ def makeFloat ():
+ pointFloat = chain(states,
+ group(states,
+ chain(states,
+ atleastonce(states, makeDigits()),
+ newArcPair(states, "."),
+ any(states, makeDigits())),
+ chain(states,
+ newArcPair(states, "."),
+ atleastonce(states, makeDigits()))),
+ maybe(states, makeExp()))
+ expFloat = chain(states,
+ atleastonce(states, makeDigits()),
+ makeExp())
+ return group(states, pointFloat, expFloat)
+ # ____________________________________________________________
+ # Imaginary numbers
+ imagNumber = group(states,
+ chain(states,
+ atleastonce(states, makeDigits()),
+ groupStr(states, "jJ")),
+ chain(states,
+ makeFloat(),
+ groupStr(states, "jJ")))
+ # ____________________________________________________________
+ # Any old number.
+ number = group(states, imagNumber, makeFloat(), intNumber)
+ # ____________________________________________________________
+ # Funny
+ operator = group(states,
+ chain(states,
+ chainStr(states, "**"),
+ maybe(states, newArcPair(states, "="))),
+ chain(states,
+ chainStr(states, ">>"),
+ maybe(states, newArcPair(states, "="))),
+ chain(states,
+ chainStr(states, "<<"),
+ maybe(states, newArcPair(states, "="))),
+ chainStr(states, "<>"),
+ chainStr(states, "!="),
+ chain(states,
+ chainStr(states, "//"),
+ maybe(states, newArcPair(states, "="))),
+ chain(states,
+ groupStr(states, "+-*/%&|^=<>"),
+ maybe(states, newArcPair(states, "="))),
+ newArcPair(states, "~"))
+ bracket = groupStr(states, "[](){}")
+ special = group(states,
+ chain(states,
+ maybe(states, newArcPair(states, "\r")),
+ newArcPair(states, "\n")),
+ groupStr(states, "@:;.,`"))
+ funny = group(states, operator, bracket, special)
+ # ____________________________________________________________
+ def makeStrPrefix ():
+ return chain(states,
+ maybe(states, groupStr(states, "uU")),
+ maybe(states, groupStr(states, "rR")))
+ # ____________________________________________________________
+ contStr = group(states,
+ chain(states,
+ makeStrPrefix(),
+ newArcPair(states, "'"),
+ any(states,
+ notGroupStr(states, "\n'\\")),
+ any(states,
+ chain(states,
+ newArcPair(states, "\\"),
+ newArcPair(states, DEFAULT),
+ any(states,
+ notGroupStr(states, "\n'\\")))),
+ group(states,
+ newArcPair(states, "'"),
+ makeLineCont())),
+ chain(states,
+ makeStrPrefix(),
+ newArcPair(states, '"'),
+ any(states,
+ notGroupStr(states, '\n"\\')),
+ any(states,
+ chain(states,
+ newArcPair(states, "\\"),
+ newArcPair(states, DEFAULT),
+ any(states,
+ notGroupStr(states, '\n"\\')))),
+ group(states,
+ newArcPair(states, '"'),
+ makeLineCont())))
+ triple = chain(states,
+ makeStrPrefix(),
+ group(states,
+ chainStr(states, "'''"),
+ chainStr(states, '"""')))
+ pseudoExtras = group(states,
+ makeLineCont(),
+ makeComment(),
+ triple)
+ pseudoToken = chain(states,
+ makeWhitespace(),
+ group(states,
+ pseudoExtras, number, funny, contStr, name))
+ dfaStates, dfaAccepts = nfaToDfa(states, *pseudoToken)
+ return DFA(dfaStates, dfaAccepts)
+
+# ______________________________________________________________________
+
+def makePyEndDFAMap ():
+ states = []
+ single = chain(states,
+ any(states, notGroupStr(states, "'\\")),
+ any(states,
+ chain(states,
+ newArcPair(states, "\\"),
+ newArcPair(states, DEFAULT),
+ any(states, notGroupStr(states, "'\\")))),
+ newArcPair(states, "'"))
+ singleDFA = DFA(*nfaToDfa(states, *single))
+ states = []
+ double = chain(states,
+ any(states, notGroupStr(states, '"\\')),
+ any(states,
+ chain(states,
+ newArcPair(states, "\\"),
+ newArcPair(states, DEFAULT),
+ any(states, notGroupStr(states, '"\\')))),
+ newArcPair(states, '"'))
+ doubleDFA = DFA(*nfaToDfa(states, *double))
+ states = []
+ single3 = chain(states,
+ any(states, notGroupStr(states, "'\\")),
+ any(states,
+ chain(states,
+ group(states,
+ chain(states,
+ newArcPair(states, "\\"),
+ newArcPair(states, DEFAULT)),
+ chain(states,
+ newArcPair(states, "'"),
+ notChainStr(states, "''"))),
+ any(states, notGroupStr(states, "'\\")))),
+ chainStr(states, "'''"))
+ single3DFA = NonGreedyDFA(*nfaToDfa(states, *single3))
+ states = []
+ double3 = chain(states,
+ any(states, notGroupStr(states, '"\\')),
+ any(states,
+ chain(states,
+ group(states,
+ chain(states,
+ newArcPair(states, "\\"),
+ newArcPair(states, DEFAULT)),
+ chain(states,
+ newArcPair(states, '"'),
+ notChainStr(states, '""'))),
+ any(states, notGroupStr(states, '"\\')))),
+ chainStr(states, '"""'))
+ double3DFA = NonGreedyDFA(*nfaToDfa(states, *double3))
+ map = {"'" : singleDFA,
+ '"' : doubleDFA,
+ "r" : None,
+ "R" : None,
+ "u" : None,
+ "U" : None}
+ for uniPrefix in ("", "u", "U", ):
+ for rawPrefix in ("", "r", "R"):
+ prefix = uniPrefix + rawPrefix
+ map[prefix + "'''"] = single3DFA
+ map[prefix + '"""'] = double3DFA
+ return map
+
+# ______________________________________________________________________
+
+def output(name, dfa_class, dfa):
+ import textwrap
+ i = 0
+ for line in textwrap.wrap(repr(dfa.accepts), width = 50):
+ if i == 0:
+ print "accepts =", line
+ else:
+ print " ", line
+ i += 1
+ import StringIO
+ print "states = ["
+ for numstate, state in enumerate(dfa.states):
+ print " #", numstate
+ s = StringIO.StringIO()
+ i = 0
+ for k, v in sorted(state.items()):
+ i += 1
+ if k == '\x00default':
+ k = "automata.DEFAULT"
+ else:
+ k = repr(k)
+ s.write(k)
+ s.write('::')
+ s.write(repr(v))
+ if i < len(state):
+ s.write(', ')
+ s.write('},')
+ i = 0
+ if len(state) <= 4:
+ text = [s.getvalue()]
+ else:
+ text = textwrap.wrap(s.getvalue(), width=36)
+ for line in text:
+ line = line.replace('::', ': ')
+ if i == 0:
+ print ' {' + line
+ else:
+ print ' ' + line
+ i += 1
+ print " ]"
+ print "%s = automata.%s(states, accepts)" % (name, dfa_class)
+ print
+
+def main ():
+ pseudoDFA = makePyPseudoDFA()
+ output("pseudoDFA", "DFA", pseudoDFA)
+ endDFAMap = makePyEndDFAMap()
+ output("double3DFA", "NonGreedyDFA", endDFAMap['"""'])
+ output("single3DFA", "NonGreedyDFA", endDFAMap["'''"])
+ output("singleDFA", "DFA", endDFAMap["'"])
+ output("doubleDFA", "DFA", endDFAMap['"'])
+
+# ______________________________________________________________________
+
+if __name__ == "__main__":
+ main()
Added: pypy/branch/jitffi/pypy/interpreter/pyparser/pylexer.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/interpreter/pyparser/pylexer.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,240 @@
+# Used by genpytokenize.py to generate the parser in pytokenize.py
+from pypy.interpreter.pyparser.automata import DFA, DEFAULT
+
+class EMPTY: pass
+
+def newArcPair (states, transitionLabel):
+ s1Index = len(states)
+ s2Index = s1Index + 1
+ states.append([(transitionLabel, s2Index)])
+ states.append([])
+ return s1Index, s2Index
+
+# ______________________________________________________________________
+
+def chain (states, *stateIndexPairs):
+ if len(stateIndexPairs) > 1:
+ start, lastFinish = stateIndexPairs[0]
+ for nStart, nFinish in stateIndexPairs[1:]:
+ states[lastFinish].append((EMPTY, nStart))
+ lastFinish = nFinish
+ return start, nFinish
+ else:
+ return stateIndexPairs[0]
+
+
+# ______________________________________________________________________
+
+def chainStr (states, str):
+ return chain(states, *map(lambda x : newArcPair(states, x), str))
+
+# ______________________________________________________________________
+
+def notChainStr (states, str):
+ """XXX I'm not sure this is how it should be done, but I'm going to
+ try it anyway. Note that for this case, I require only single character
+ arcs, since I would have to basically invert all accepting states and
+ non-accepting states of any sub-NFA's.
+ """
+ assert len(str) > 0
+ arcs = map(lambda x : newArcPair(states, x), str)
+ finish = len(states)
+ states.append([])
+ start, lastFinish = arcs[0]
+ states[start].append((EMPTY, finish))
+ for crntStart, crntFinish in arcs[1:]:
+ states[lastFinish].append((EMPTY, crntStart))
+ states[crntStart].append((EMPTY, finish))
+ return start, finish
+
+# ______________________________________________________________________
+
+def group (states, *stateIndexPairs):
+ if len(stateIndexPairs) > 1:
+ start = len(states)
+ finish = start + 1
+ startList = []
+ states.append(startList)
+ states.append([])
+ for eStart, eFinish in stateIndexPairs:
+ startList.append((EMPTY, eStart))
+ states[eFinish].append((EMPTY, finish))
+ return start, finish
+ else:
+ return stateIndexPairs[0]
+
+# ______________________________________________________________________
+
+def groupStr (states, str):
+ return group(states, *map(lambda x : newArcPair(states, x), str))
+
+# ______________________________________________________________________
+
+def notGroup (states, *stateIndexPairs):
+ """Like group, but will add a DEFAULT transition to a new end state,
+ causing anything in the group to not match by going to a dead state.
+ XXX I think this is right...
+ """
+ start, dead = group(states, *stateIndexPairs)
+ finish = len(states)
+ states.append([])
+ states[start].append((DEFAULT, finish))
+ return start, finish
+
+# ______________________________________________________________________
+
+def notGroupStr (states, str):
+ return notGroup(states, *map(lambda x : newArcPair(states, x), str))
+# ______________________________________________________________________
+
+def any (states, *stateIndexPairs):
+ start, finish = group(states, *stateIndexPairs)
+ states[finish].append((EMPTY, start))
+ return start, start
+
+# ______________________________________________________________________
+
+def maybe (states, *stateIndexPairs):
+ start, finish = group(states, *stateIndexPairs)
+ states[start].append((EMPTY, finish))
+ return start, finish
+
+# ______________________________________________________________________
+
+def atleastonce (states, *stateIndexPairs):
+ start, finish = group(states, *stateIndexPairs)
+ states[finish].append((EMPTY, start))
+ return start, finish
+
+# ______________________________________________________________________
+
+def closure (states, start, result = 0L):
+ if None == result:
+ result = 0L
+ if 0 == (result & (1L << start)):
+ result |= (1L << start)
+ for label, arrow in states[start]:
+ if label == EMPTY:
+ result |= closure(states, arrow, result)
+ return result
+
+# ______________________________________________________________________
+
+def nfaToDfa (states, start, finish):
+ tempStates = []
+ startClosure = closure(states, start)
+ crntTempState = [startClosure, [], 0 != (startClosure & (1L << finish))]
+ tempStates.append(crntTempState)
+ index = 0
+ while index < len(tempStates):
+ crntTempState = tempStates[index]
+ crntClosure, crntArcs, crntAccept = crntTempState
+ for index2 in range(0, len(states)):
+ if 0 != (crntClosure & (1L << index2)):
+ for label, nfaArrow in states[index2]:
+ if label == EMPTY:
+ continue
+ foundTempArc = False
+ for tempArc in crntArcs:
+ if tempArc[0] == label:
+ foundTempArc = True
+ break
+ if not foundTempArc:
+ tempArc = [label, -1, 0L]
+ crntArcs.append(tempArc)
+ tempArc[2] = closure(states, nfaArrow, tempArc[2])
+ for arcIndex in range(0, len(crntArcs)):
+ label, arrow, targetStates = crntArcs[arcIndex]
+ targetFound = False
+ arrow = 0
+ for destTempState in tempStates:
+ if destTempState[0] == targetStates:
+ targetFound = True
+ break
+ arrow += 1
+ if not targetFound:
+ assert arrow == len(tempStates)
+ newState = [targetStates, [], 0 != (targetStates &
+ (1L << finish))]
+ tempStates.append(newState)
+ crntArcs[arcIndex][1] = arrow
+ index += 1
+ tempStates = simplifyTempDfa(tempStates)
+ states = finalizeTempDfa(tempStates)
+ return states
+
+# ______________________________________________________________________
+
+def sameState (s1, s2):
+ """sameState(s1, s2)
+ Note:
+ state := [ nfaclosure : Long, [ arc ], accept : Boolean ]
+ arc := [ label, arrow : Int, nfaClosure : Long ]
+ """
+ if (len(s1[1]) != len(s2[1])) or (s1[2] != s2[2]):
+ return False
+ for arcIndex in range(0, len(s1[1])):
+ arc1 = s1[1][arcIndex]
+ arc2 = s2[1][arcIndex]
+ if arc1[:-1] != arc2[:-1]:
+ return False
+ return True
+
+# ______________________________________________________________________
+
+def simplifyTempDfa (tempStates):
+ """simplifyTempDfa (tempStates)
+ """
+ changes = True
+ deletedStates = []
+ while changes:
+ changes = False
+ for i in range(1, len(tempStates)):
+ if i in deletedStates:
+ continue
+ for j in range(0, i):
+ if j in deletedStates:
+ continue
+ if sameState(tempStates[i], tempStates[j]):
+ deletedStates.append(i)
+ for k in range(0, len(tempStates)):
+ if k in deletedStates:
+ continue
+ for arc in tempStates[k][1]:
+ if arc[1] == i:
+ arc[1] = j
+ changes = True
+ break
+ for stateIndex in deletedStates:
+ tempStates[stateIndex] = None
+ return tempStates
+# ______________________________________________________________________
+
+def finalizeTempDfa (tempStates):
+ """finalizeTempDfa (tempStates)
+
+ Input domain:
+ tempState := [ nfaClosure : Long, [ tempArc ], accept : Boolean ]
+ tempArc := [ label, arrow, nfaClosure ]
+
+ Output domain:
+ state := [ arcMap, accept : Boolean ]
+ """
+ states = []
+ accepts = []
+ stateMap = {}
+ tempIndex = 0
+ for tempIndex in range(0, len(tempStates)):
+ tempState = tempStates[tempIndex]
+ if None != tempState:
+ stateMap[tempIndex] = len(states)
+ states.append({})
+ accepts.append(tempState[2])
+ for tempIndex in stateMap.keys():
+ stateBitset, tempArcs, accepting = tempStates[tempIndex]
+ newIndex = stateMap[tempIndex]
+ arcMap = states[newIndex]
+ for tempArc in tempArcs:
+ arcMap[tempArc[0]] = stateMap[tempArc[1]]
+ return states, accepts
+
Added: pypy/branch/jitffi/pypy/jit/backend/conftest.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/jit/backend/conftest.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,35 @@
+"""
+This conftest adds options used by test/test_random and
+x86/test/test_zll_random.
+"""
+import py, random
+
+option = py.test.config.option
+
+def pytest_addoption(parser):
+ group = parser.getgroup('random test options')
+ group.addoption('--random-seed', action="store", type="int",
+ default=random.randrange(0, 10000),
+ dest="randomseed",
+ help="choose a fixed random seed")
+ group.addoption('--backend', action="store",
+ default='llgraph',
+ choices=['llgraph', 'x86'],
+ dest="backend",
+ help="select the backend to run the functions with")
+ group.addoption('--block-length', action="store", type="int",
+ default=30,
+ dest="block_length",
+ help="insert up to this many operations in each test")
+ group.addoption('--n-vars', action="store", type="int",
+ default=10,
+ dest="n_vars",
+ help="supply this many randomly-valued arguments to "
+ "the function")
+ group.addoption('--repeat', action="store", type="int",
+ default=15,
+ dest="repeat",
+ help="run the test this many times"),
+ group.addoption('--output', '-O', action="store", type="str",
+ default="", dest="output",
+ help="dump output to a file")
Added: pypy/branch/jitffi/pypy/jit/backend/x86/test/test_string.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/jit/backend/x86/test/test_string.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,9 @@
+import py
+from pypy.jit.metainterp.test import test_string
+from pypy.jit.backend.x86.test.test_basic import Jit386Mixin
+
+class TestString(Jit386Mixin, test_string.StringTests):
+ # for the individual tests see
+ # ====> ../../../metainterp/test/test_string.py
+ CALL = 'call'
+ CALL_PURE = 'call_pure'
Added: pypy/branch/jitffi/pypy/jit/metainterp/optimize_nopspec.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/jit/metainterp/optimize_nopspec.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,41 @@
+
+from pypy.rlib.debug import debug_start, debug_stop
+from pypy.jit.metainterp.optimizeopt import optimize_loop_1, optimize_bridge_1
+from pypy.jit.metainterp.optimizefindnode import PerfectSpecializationFinder
+from pypy.jit.metainterp.optimizefindnode import BridgeSpecializationFinder
+
+def optimize_loop(metainterp_sd, old_loop_tokens, loop):
+ debug_start("jit-optimize")
+ try:
+ return _optimize_loop(metainterp_sd, old_loop_tokens, loop)
+ finally:
+ debug_stop("jit-optimize")
+
+def _optimize_loop(metainterp_sd, old_loop_tokens, loop):
+ cpu = metainterp_sd.cpu
+ metainterp_sd.logger_noopt.log_loop(loop.inputargs, loop.operations)
+ finder = PerfectSpecializationFinder(cpu)
+ finder.find_nodes_loop(loop, False)
+ if old_loop_tokens:
+ return old_loop_tokens[0]
+ optimize_loop_1(metainterp_sd, loop)
+ return None
+
+def optimize_bridge(metainterp_sd, old_loop_tokens, bridge):
+ debug_start("jit-optimize")
+ try:
+ return _optimize_bridge(metainterp_sd, old_loop_tokens, bridge)
+ finally:
+ debug_stop("jit-optimize")
+
+def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge):
+ cpu = metainterp_sd.cpu
+ metainterp_sd.logger_noopt.log_loop(bridge.inputargs, bridge.operations)
+ finder = BridgeSpecializationFinder(cpu)
+ finder.find_nodes_bridge(bridge)
+ if old_loop_tokens:
+ old_loop_token = old_loop_tokens[0]
+ bridge.operations[-1].setdescr(old_loop_token) # patch jump target
+ optimize_bridge_1(metainterp_sd, bridge)
+ return old_loop_token
+ return None
Added: pypy/branch/jitffi/pypy/jit/metainterp/test/test_loop_nopspec.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/jit/metainterp/test/test_loop_nopspec.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,27 @@
+
+from pypy.jit.metainterp.test import test_loop, test_send
+from pypy.jit.metainterp.warmspot import ll_meta_interp
+from pypy.rlib.jit import OPTIMIZER_NO_PERFECTSPEC
+from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin
+
+class LoopNoPSpecTest(test_send.SendTests):
+ def meta_interp(self, func, args, **kwds):
+ return ll_meta_interp(func, args, optimizer=OPTIMIZER_NO_PERFECTSPEC,
+ CPUClass=self.CPUClass,
+ type_system=self.type_system,
+ **kwds)
+
+ def check_loops(self, *args, **kwds):
+ pass
+
+ def check_loop_count(self, count):
+ pass
+
+ def check_jumps(self, maxcount):
+ pass
+
+class TestLLtype(LoopNoPSpecTest, LLJitMixin):
+ pass
+
+class TestOOtype(LoopNoPSpecTest, OOJitMixin):
+ pass
Added: pypy/branch/jitffi/pypy/jit/tl/jittest.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/jit/tl/jittest.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,43 @@
+"""
+This file is imported by pypy.translation.driver when running the
+target --jittest. Feel free to hack it as needed; it is imported
+only after the '---> Checkpoint' fork.
+"""
+
+from pypy.conftest import option
+from pypy.rpython.lltypesystem import lltype
+from pypy.rpython.llinterp import LLInterpreter
+from pypy.rpython.annlowlevel import llstr
+from pypy.jit.metainterp import warmspot
+from pypy.rlib.jit import OPTIMIZER_FULL
+
+
+ARGS = ["jittest", "100"]
+
+
+def jittest(driver):
+ graph = driver.translator.graphs[0]
+ interp = LLInterpreter(driver.translator.rtyper, malloc_check=False)
+
+ def returns_null(T, *args, **kwds):
+ return lltype.nullptr(T)
+ interp.heap.malloc_nonmovable = returns_null # XXX
+
+ get_policy = driver.extra['jitpolicy']
+ jitpolicy = get_policy(driver)
+
+ from pypy.jit.backend.llgraph.runner import LLtypeCPU
+ apply_jit(jitpolicy, interp, graph, LLtypeCPU)
+
+
+def apply_jit(policy, interp, graph, CPUClass):
+ print 'warmspot.jittify_and_run() started...'
+ option.view = True
+ LIST = graph.getargs()[0].concretetype
+ lst = LIST.TO.ll_newlist(len(ARGS))
+ for i, arg in enumerate(ARGS):
+ lst.ll_setitem_fast(i, llstr(arg))
+ warmspot.jittify_and_run(interp, graph, [lst], policy=policy,
+ listops=True, CPUClass=CPUClass,
+ backendopt=True, inline=True,
+ optimizer=OPTIMIZER_FULL)
Added: pypy/branch/jitffi/pypy/rpython/memory/gc/inspector.py
==============================================================================
--- (empty file)
+++ pypy/branch/jitffi/pypy/rpython/memory/gc/inspector.py Tue Oct 12 13:41:51 2010
@@ -0,0 +1,200 @@
+"""
+Utility RPython functions to inspect objects in the GC.
+"""
+from pypy.rpython.lltypesystem import lltype, llmemory, rffi
+from pypy.rlib.objectmodel import free_non_gc_object
+from pypy.rpython.module.ll_os import underscore_on_windows
+from pypy.rlib import rposix
+
+from pypy.rpython.memory.support import AddressDict, get_address_stack
+
+
+# ---------- implementation of pypy.rlib.rgc.get_rpy_roots() ----------
+
+def _counting_rpy_root(gc, root):
+ gc._count_rpy += 1
+
+def _do_count_rpy_roots(gc):
+ gc._count_rpy = 0
+ gc.root_walker.walk_roots(
+ _counting_rpy_root,
+ _counting_rpy_root,
+ _counting_rpy_root)
+ return gc._count_rpy
+
+def _append_rpy_root(gc, root):
+ # Can use the gc list, but should not allocate!
+ # It is essential that the list is not resizable!
+ lst = gc._list_rpy
+ index = gc._count_rpy
+ if index >= len(lst):
+ raise ValueError
+ gc._count_rpy = index + 1
+ lst[index] = llmemory.cast_adr_to_ptr(root.address[0], llmemory.GCREF)
+
+def _do_append_rpy_roots(gc, lst):
+ gc._count_rpy = 0
+ gc._list_rpy = lst
+ gc.root_walker.walk_roots(
+ _append_rpy_root,
+ _append_rpy_root,
+ _append_rpy_root)
+ gc._list_rpy = None
+
+def get_rpy_roots(gc):
+ count = _do_count_rpy_roots(gc)
+ extra = 16
+ while True:
+ result = [lltype.nullptr(llmemory.GCREF.TO)] * (count + extra)
+ try:
+ _do_append_rpy_roots(gc, result)
+ except ValueError:
+ extra *= 3
+ else:
+ return result
+
+# ---------- implementation of pypy.rlib.rgc.get_rpy_referents() ----------
+
+def _count_rpy_referent(pointer, gc):
+ gc._count_rpy += 1
+
+def _do_count_rpy_referents(gc, gcref):
+ gc._count_rpy = 0
+ gc.trace(llmemory.cast_ptr_to_adr(gcref), _count_rpy_referent, gc)
+ return gc._count_rpy
+
+def _append_rpy_referent(pointer, gc):
+ # Can use the gc list, but should not allocate!
+ # It is essential that the list is not resizable!
+ lst = gc._list_rpy
+ index = gc._count_rpy
+ if index >= len(lst):
+ raise ValueError
+ gc._count_rpy = index + 1
+ lst[index] = llmemory.cast_adr_to_ptr(pointer.address[0],
+ llmemory.GCREF)
+
+def _do_append_rpy_referents(gc, gcref, lst):
+ gc._count_rpy = 0
+ gc._list_rpy = lst
+ gc.trace(llmemory.cast_ptr_to_adr(gcref), _append_rpy_referent, gc)
+
+def get_rpy_referents(gc, gcref):
+ count = _do_count_rpy_referents(gc, gcref)
+ result = [lltype.nullptr(llmemory.GCREF.TO)] * count
+ _do_append_rpy_referents(gc, gcref, result)
+ return result
+
+# ----------
+
+def get_rpy_memory_usage(gc, gcref):
+ return gc.get_size_incl_hash(llmemory.cast_ptr_to_adr(gcref))
+
+def get_rpy_type_index(gc, gcref):
+ typeid = gc.get_type_id(llmemory.cast_ptr_to_adr(gcref))
+ return gc.get_member_index(typeid)
+
+def is_rpy_instance(gc, gcref):
+ typeid = gc.get_type_id(llmemory.cast_ptr_to_adr(gcref))
+ return gc.is_rpython_class(typeid)
+
+# ----------
+
+raw_os_write = rffi.llexternal(underscore_on_windows+'write',
+ [rffi.INT, llmemory.Address, rffi.SIZE_T],
+ rffi.SIZE_T,
+ sandboxsafe=True, _nowrapper=True)
+
+AddressStack = get_address_stack()
+
+class HeapDumper:
+ _alloc_flavor_ = "raw"
+ BUFSIZE = 8192 # words
+
+ def __init__(self, gc, fd):
+ self.gc = gc
+ self.fd = rffi.cast(rffi.INT, fd)
+ self.writebuffer = lltype.malloc(rffi.LONGP.TO, self.BUFSIZE,
+ flavor='raw')
+ self.buf_count = 0
+ self.seen = AddressDict()
+ self.pending = AddressStack()
+
+ def delete(self):
+ self.seen.delete()
+ self.pending.delete()
+ lltype.free(self.writebuffer, flavor='raw')
+ free_non_gc_object(self)
+
+ def flush(self):
+ if self.buf_count > 0:
+ bytes = self.buf_count * rffi.sizeof(rffi.LONG)
+ count = raw_os_write(self.fd,
+ rffi.cast(llmemory.Address, self.writebuffer),
+ rffi.cast(rffi.SIZE_T, bytes))
+ if rffi.cast(lltype.Signed, count) != bytes:
+ raise OSError(rposix.get_errno(), "raw_os_write failed")
+ self.buf_count = 0
+ flush._dont_inline_ = True
+
+ def write(self, value):
+ x = self.buf_count
+ self.writebuffer[x] = value
+ x += 1
+ self.buf_count = x
+ if x == self.BUFSIZE:
+ self.flush()
+ write._always_inline_ = True
+
+ def write_marker(self):
+ self.write(0)
+ self.write(0)
+ self.write(0)
+ self.write(-1)
+
+ def writeobj(self, obj):
+ gc = self.gc
+ typeid = gc.get_type_id(obj)
+ self.write(llmemory.cast_adr_to_int(obj))
+ self.write(gc.get_member_index(typeid))
+ self.write(gc.get_size_incl_hash(obj))
+ gc.trace(obj, self._writeref, None)
+ self.write(-1)
+
+ def _writeref(self, pointer, _):
+ obj = pointer.address[0]
+ self.write(llmemory.cast_adr_to_int(obj))
+ self.add(obj)
+
+ def add(self, obj):
+ if not self.seen.contains(obj):
+ self.seen.setitem(obj, obj)
+ self.pending.append(obj)
+
+ def add_roots(self):
+ self.gc._heap_dumper = self
+ self.gc.root_walker.walk_roots(
+ _hd_add_root,
+ _hd_add_root,
+ _hd_add_root)
+ self.gc._heap_dumper = None
+ pendingroots = self.pending
+ self.pending = AddressStack()
+ self.walk(pendingroots)
+ pendingroots.delete()
+ self.write_marker()
+
+ def walk(self, pending):
+ while pending.non_empty():
+ self.writeobj(pending.pop())
+
+def _hd_add_root(gc, root):
+ gc._heap_dumper.add(root.address[0])
+
+def dump_rpy_heap(gc, fd):
+ heapdumper = HeapDumper(gc, fd)
+ heapdumper.add_roots()
+ heapdumper.walk(heapdumper.pending)
+ heapdumper.flush()
+ heapdumper.delete()
+ return True
More information about the Pypy-commit
mailing list