[pypy-svn] r22585 - in pypy/dist/pypy: annotation doc doc/statistic doc/weekly interpreter/pyparser jit/test lib/logic module/_socket/rpython module/recparser/test rpython rpython/module rpython/ootypesystem rpython/ootypesystem/test rpython/test tool tool/pytest/run-script translator translator/asm translator/asm/i386gen translator/backendopt translator/backendopt/test translator/c/test translator/goal translator/js translator/js/test translator/llvm/test translator/microbench translator/pickle translator/test

arigo at codespeak.net arigo at codespeak.net
Tue Jan 24 13:31:33 CET 2006


Author: arigo
Date: Tue Jan 24 13:31:23 2006
New Revision: 22585

Modified:
   pypy/dist/pypy/annotation/description.py   (props changed)
   pypy/dist/pypy/doc/constraints-and-logic.txt   (props changed)
   pypy/dist/pypy/doc/index-report.txt   (props changed)
   pypy/dist/pypy/doc/statistic/rebin.py   (props changed)
   pypy/dist/pypy/doc/weekly/summary-2005-12-09.txt   (props changed)
   pypy/dist/pypy/doc/weekly/summary-2005-12-16.txt   (props changed)
   pypy/dist/pypy/interpreter/pyparser/ebnfgrammar.py   (props changed)
   pypy/dist/pypy/jit/test/__init__.py   (props changed)
   pypy/dist/pypy/jit/test/test_jit_tl.py   (props changed)
   pypy/dist/pypy/lib/logic/   (props changed)
   pypy/dist/pypy/lib/logic/constraint.py   (props changed)
   pypy/dist/pypy/lib/logic/test_unification.py   (props changed)
   pypy/dist/pypy/lib/logic/test_variable.py   (props changed)
   pypy/dist/pypy/lib/logic/unification.py   (props changed)
   pypy/dist/pypy/lib/logic/variable.py   (props changed)
   pypy/dist/pypy/module/_socket/rpython/rsocket.py   (contents, props changed)
   pypy/dist/pypy/module/recparser/test/test_compilehooks.py   (props changed)
   pypy/dist/pypy/rpython/module/ll_js.py   (props changed)
   pypy/dist/pypy/rpython/ootypesystem/rbuiltin.py   (props changed)
   pypy/dist/pypy/rpython/ootypesystem/test/test_oopbc.py   (props changed)
   pypy/dist/pypy/rpython/rgenop.py   (props changed)
   pypy/dist/pypy/rpython/rjs.py   (props changed)
   pypy/dist/pypy/rpython/test/test_rgenop.py   (props changed)
   pypy/dist/pypy/tool/importfun.py   (props changed)
   pypy/dist/pypy/tool/pytest/run-script/   (props changed)
   pypy/dist/pypy/tool/release_dates.py   (props changed)
   pypy/dist/pypy/tool/statistic_irc_log.py   (props changed)
   pypy/dist/pypy/tool/statistic_over_time.py   (props changed)
   pypy/dist/pypy/translator/asm/i386gen/__init__.py   (contents, props changed)
   pypy/dist/pypy/translator/asm/i386gen/i386_assembler.py   (contents, props changed)
   pypy/dist/pypy/translator/asm/model.py   (props changed)
   pypy/dist/pypy/translator/asm/simulator.py   (contents, props changed)
   pypy/dist/pypy/translator/backendopt/escape.py   (props changed)
   pypy/dist/pypy/translator/backendopt/merge_if_blocks.py   (props changed)
   pypy/dist/pypy/translator/backendopt/test/test_escape.py   (props changed)
   pypy/dist/pypy/translator/backendopt/test/test_merge_if_blocks.py   (props changed)
   pypy/dist/pypy/translator/c/test/test_coroutine.py   (contents, props changed)
   pypy/dist/pypy/translator/goal/targetebnflexer.py   (props changed)
   pypy/dist/pypy/translator/goal/targetrecursivestandalone.py   (props changed)
   pypy/dist/pypy/translator/interactive.py   (props changed)
   pypy/dist/pypy/translator/js/conftest.py   (props changed)
   pypy/dist/pypy/translator/js/optimize.py   (props changed)
   pypy/dist/pypy/translator/js/support.py   (props changed)
   pypy/dist/pypy/translator/js/test/browsertest.py   (props changed)
   pypy/dist/pypy/translator/js/test/test_jseval.py   (props changed)
   pypy/dist/pypy/translator/js/test/test_merge_if_blocks.py   (props changed)
   pypy/dist/pypy/translator/llvm/test/test_merge_if_blocks.py   (props changed)
   pypy/dist/pypy/translator/microbench/   (props changed)
   pypy/dist/pypy/translator/microbench/microbench.py   (props changed)
   pypy/dist/pypy/translator/microbench/test_count1.py   (props changed)
   pypy/dist/pypy/translator/microbench/test_create1.py   (props changed)
   pypy/dist/pypy/translator/pickle/genpickle.py   (contents, props changed)
   pypy/dist/pypy/translator/test/test_geninterp.py   (contents, props changed)
   pypy/dist/pypy/translator/test/test_interactive.py   (props changed)
   pypy/dist/pypy/translator/test/test_rpystone.py   (contents, props changed)
Log:
fixeol


Modified: pypy/dist/pypy/module/_socket/rpython/rsocket.py
==============================================================================
--- pypy/dist/pypy/module/_socket/rpython/rsocket.py	(original)
+++ pypy/dist/pypy/module/_socket/rpython/rsocket.py	Tue Jan 24 13:31:23 2006
@@ -1,55 +1,55 @@
-"""
-Helper file for Python equivalents of socket specific calls.
-"""
-
-import socket
-
-# HACK: We have to prevent GC to collect the socket object we create within this
-# module. Because socket.close() is called on GC this can lead to strange
-# effects in corner cases where file descriptors are reused.
-socket_cache = {}
-keep_sockets_alive = []
-
-class ADDRINFO(object):
-    # a simulated addrinfo structure from C, i.e. a chained list
-    # returned by getaddrinfo()
-    def __init__(self, host, port, family, socktype, proto, flags):
-        addrinfo = socket.getaddrinfo(host, port,
-                                      family, socktype, proto, flags)
-        self._entries = iter(addrinfo)
-
-    def nextinfo(self):
-        try:
-            info = self._entries.next()
-        except StopIteration:
-            return [0] * 8
-
-        return info[:-1] + info[-1]
-
-    def free(self):
-        pass
-
-def getaddrinfo(host, port, family, socktype, proto, flags):
-    return ADDRINFO(host, port, family, socktype, proto, flags)
-
-def newsocket(family, type, protocol):
-    s = socket.socket(family, type, protocol)
-    fileno = s.fileno()
-    if socket_cache.has_key(fileno):
-        keep_sockets_alive.append(socket_cache[fileno])
-    socket_cache[fileno] = s
-    return fileno
-
-def connect(fd, sockname, family):
-    s = socket_cache[fd]
-    if family == socket.AF_INET:
-        s.connect(sockname[:2])
-    elif family == socket.AF_INET6:
-        s.connect(sockname)
-
-def getpeername(fd):
-    s = socket_cache[fd]
-    return s.getpeername()
-
-def freesockname(sockname):
-    pass
+"""
+Helper file for Python equivalents of socket specific calls.
+"""
+
+import socket
+
+# HACK: We have to prevent GC to collect the socket object we create within this
+# module. Because socket.close() is called on GC this can lead to strange
+# effects in corner cases where file descriptors are reused.
+socket_cache = {}
+keep_sockets_alive = []
+
+class ADDRINFO(object):
+    # a simulated addrinfo structure from C, i.e. a chained list
+    # returned by getaddrinfo()
+    def __init__(self, host, port, family, socktype, proto, flags):
+        addrinfo = socket.getaddrinfo(host, port,
+                                      family, socktype, proto, flags)
+        self._entries = iter(addrinfo)
+
+    def nextinfo(self):
+        try:
+            info = self._entries.next()
+        except StopIteration:
+            return [0] * 8
+
+        return info[:-1] + info[-1]
+
+    def free(self):
+        pass
+
+def getaddrinfo(host, port, family, socktype, proto, flags):
+    return ADDRINFO(host, port, family, socktype, proto, flags)
+
+def newsocket(family, type, protocol):
+    s = socket.socket(family, type, protocol)
+    fileno = s.fileno()
+    if socket_cache.has_key(fileno):
+        keep_sockets_alive.append(socket_cache[fileno])
+    socket_cache[fileno] = s
+    return fileno
+
+def connect(fd, sockname, family):
+    s = socket_cache[fd]
+    if family == socket.AF_INET:
+        s.connect(sockname[:2])
+    elif family == socket.AF_INET6:
+        s.connect(sockname)
+
+def getpeername(fd):
+    s = socket_cache[fd]
+    return s.getpeername()
+
+def freesockname(sockname):
+    pass

Modified: pypy/dist/pypy/translator/asm/i386gen/__init__.py
==============================================================================
--- pypy/dist/pypy/translator/asm/i386gen/__init__.py	(original)
+++ pypy/dist/pypy/translator/asm/i386gen/__init__.py	Tue Jan 24 13:31:23 2006
@@ -1 +1 @@
-#thats all
+#thats all

Modified: pypy/dist/pypy/translator/asm/i386gen/i386_assembler.py
==============================================================================
--- pypy/dist/pypy/translator/asm/i386gen/i386_assembler.py	(original)
+++ pypy/dist/pypy/translator/asm/i386gen/i386_assembler.py	Tue Jan 24 13:31:23 2006
@@ -1,44 +1,44 @@
-"""i386 Basic assembler...
-Designed to mirror the PPC assembler system, operands added as required.
-
-Current system needs to assemble given, code, link into python, and return python
-callabale.  (Stub routine currently given).
-
-"""
-
-class i386Assembler:
-
-    def __init__(self):
-        self._opcodes=[]
-
-    def __getattr__(self,attr):
-        def func(*args):
-            return self.op(attr,args)
-        return func
-
-    def op(self,opcode,*args):
-        self._opcodes.append((opcode,args))
-
-    def Make_func(cls,assembler,input='ii',output='i'):
-        return lambda x,y:x+y+1
-
-    Make_func=classmethod(Make_func)
-
-    def dump(self):
-        l=1000
-        for op in self._opcodes:
-            print '>>%d :%s' %(l,str(op))
-            l+=1
-
-make_func=i386Assembler.Make_func
-
-
-if __name__=='__main__':
-    a=i386Assembler()
-    a.op('mov','ax,''bx')
-
-    a.mov('spi','esi')
-    print a._opcodes
-    a.dump()
-
-
+"""i386 Basic assembler...
+Designed to mirror the PPC assembler system, operands added as required.
+
+Current system needs to assemble given, code, link into python, and return python
+callabale.  (Stub routine currently given).
+
+"""
+
+class i386Assembler:
+
+    def __init__(self):
+        self._opcodes=[]
+
+    def __getattr__(self,attr):
+        def func(*args):
+            return self.op(attr,args)
+        return func
+
+    def op(self,opcode,*args):
+        self._opcodes.append((opcode,args))
+
+    def Make_func(cls,assembler,input='ii',output='i'):
+        return lambda x,y:x+y+1
+
+    Make_func=classmethod(Make_func)
+
+    def dump(self):
+        l=1000
+        for op in self._opcodes:
+            print '>>%d :%s' %(l,str(op))
+            l+=1
+
+make_func=i386Assembler.Make_func
+
+
+if __name__=='__main__':
+    a=i386Assembler()
+    a.op('mov','ax,''bx')
+
+    a.mov('spi','esi')
+    print a._opcodes
+    a.dump()
+
+

Modified: pypy/dist/pypy/translator/asm/simulator.py
==============================================================================
--- pypy/dist/pypy/translator/asm/simulator.py	(original)
+++ pypy/dist/pypy/translator/asm/simulator.py	Tue Jan 24 13:31:23 2006
@@ -1,217 +1,217 @@
-""" IRM Simulator """
-import autopath
-from pypy.rpython.llinterp import LLFrame
-#from pypy.translator.asm.infregmachine import Instruction
-
-"""
-Notes on the register allocation algorithm:
-
-
-To convert our IRM to an FRM machine, we must perform some swapping of the registers.  This is in effect
-'paging', we are only allowed to perform XCHG operations on the slow (memory) registers, while we can do
-anything with our fast (CPU registers).
-
-There are various algorithms available, including the Linear Scan Algorithm (google this), but for now we
-have decided to implement a simple, but (hopefully) reasonably effective last-recently-used algortithm.
-
-Regardless of the swap algorithm , at each stage we must keep track of which IRM register is held in which
-FRM register.  Our original test-suite simply gave the register usages, and checked the swap/usage sequence.
-
-We need to rebuild the suite, checking the register map at EACH stage of the process.  Fiddly, but important!
-
-We need some notation:
-
-IRMxxx denotes an Infinite Register Machine that will use at most xxx registers
-
-FRMxxx.yyy denotes a finite-register machine, with xxx fast registers, and a total of yyy registers.
-
-"""
-
-
-def regmap(regperm):
-    """answer a map IRM notation -> current FRM notation"""
-    map={}
-    for reg in range(1,len(regperm)):
-        #print reg,map,regperm
-        map[reg]=regperm.index(reg)
-    return map
-
-def maxRegister(commands):
-    pool=[]
-    for cmd in commands:
-        if not isinstance(cmd,str):
-            pool+=cmd.registers_used()
-    if pool==[]:
-        return 1
-    return max(pool)
-
-
-def TranslateProgram(commands,nreg):
-    """answer this program into one which only uses nreg fast registers"""
-    totreg=maxRegister(commands)
-    assert nreg>=3 ,'Some commands may use 3 registers!!!!'
-    newprog=[]
-    pipe=[]
-    
-    # this must be as big as the total number of registers+1 (we start
-    # at index 1)
-    old2new=range(0,totreg+1)
-
-
-    for cmd in commands:
-        # if we use any registers, we must possibly swap first, and
-        # then remap
-        if isinstance(cmd,str) or cmd.name in ('J','JT','JF'):
-            # label or jump so  pass through
-            newprog.append(cmd)
-        else:
-            # so now remap the registers!
-
-            regused=cmd.registers_used()
-            t2p=[old2new[x] for x in regused]
-            for reg in regused:
-                goingin=regmap(old2new)[reg]
-                if goingin>nreg:
-                    if pipe[-1] not in t2p:
-                        index=-1
-                    elif pipe[-2] not in t2p:
-                        index=-2
-                    else:
-                        assert pipe[-3]!=goingin #this must be true for nreg>=3
-                        index=-3
-                    #now swap to end of pipe, so code as before works.
-                    pipe[index],pipe[-1]=pipe[-1],pipe[index]
-                    goingout=pipe[-1]
-                    newprog.append(Instruction('EXCH',(goingin,goingout)))
-                    old2new[goingout],old2new[goingin] = old2new[goingin],old2new[goingout]
-                    val=goingout
-                else:
-                    val=goingin
-                pipe=[val]+pipe
-
-                if len(pipe)>nreg:
-                    pipe.pop()   #this value fell out of the pipe
-                assert len(pipe)<=nreg
-            #now we can emit the command with registers remapped
-            rm=regmap(old2new)
-            newprog.append(cmd.renumber(rm))
-    return newprog
-
-
-
-class Machine:
-
-    def RunProgram(cls,commands,args=[],tracing=False):
-        nreg=maxRegister(commands)
-        machine=Machine(nreg,args)
-        machine._tracing = tracing
-        ip=0
-        if tracing:
-            print 'args', args
-        while not machine.stopped():
-            if ip>=len(commands):
-                return None
-            cmd=commands[ip]
-            if isinstance(cmd,str):
-                pass
-            elif cmd.name=='J':
-                ip=commands.index(cmd.arguments[0])
-            elif cmd.name=='JT':
-                c = machine.creg()
-                assert c is not None
-                if c:
-                    ip=commands.index(cmd.arguments[0])
-            else:
-                machine.op(cmd.name,*cmd.arguments)
-            ip+=1
-        if tracing:
-            print 'ret', machine._retval
-        return machine._retval
-    RunProgram=classmethod(RunProgram)
-
-
-    def __init__(self,nreg,args):
-        self._nreg=nreg
-        self._args=args
-        self._stopped=False
-        self._creg=None
-        self._tracing = False
-        self._registers=[None for x in range(nreg+1)]
-
-    def creg(self):
-        return self._creg
-
-    def registers(self):
-        return self._registers[1:]
-
-    def register(self, reg):
-        v = self._registers[reg]
-        assert v is not None
-        return v
-
-    def stopped(self):
-        return self._stopped
-
-    def op(self,opcode,*operands):
-        if self._tracing:
-            args = []
-            for arg in operands:
-                if isinstance(arg, int):
-                    args.append('r%s=%s'%(arg, self._registers[arg]))
-                else:
-                    args.append(arg)
-            print opcode, ', '.join(map(str, args))
-            #will want to trap later to defer unimplemented to the LLInterpreter...
-        m = getattr(self,opcode,None)
-        if m is not None:
-            m(*operands)
-        else:
-            self.llop(opcode, *operands)
-
-    def RETPYTHON(self,reg):
-        self._stopped=True
-        self._retval=self.register(reg)
-
-    def LIA(self,destination,argindex):
-        self._registers[destination]=self._args[argindex.value]
-
-    def LOAD(self,destination,immed):
-        self._registers[destination]=immed.value
-
-    def MOV(self,destination,source):
-        self._registers[destination]=self.register(source)
-
-    def EXCH(self,destination,source):
-        #self._registers[destination],self._registers[source]=self.register(source),self.register(destination)
-        self._registers[destination],self._registers[source]=self._registers[source],self._registers[destination]
-
-
-    def int_gt(self,rega,regb):
-        self._creg = self.register(rega) > self.register(regb)
-
-    def int_lt(self,rega,regb):
-        self._creg = self.register(rega) < self.register(regb)
-
-    def int_ge(self,rega,regb):
-        self._creg = self.register(rega) >= self.register(regb)
-
-    def int_le(self,rega,regb):
-        self._creg = self.register(rega) <= self.register(regb)
-
-    def int_eq(self,rega,regb):
-        self._creg = self.register(rega) == self.register(regb)
-
-    def int_ne(self,rega,regb):
-        self._creg = self.register(rega) != self.register(regb)
-
-
-    def llop(self, opcode, destination, *sources):
-        sourcevalues = []
-        for r in sources:
-            sourcevalues.append(self.register(r))
-        self._registers[destination] = LLFrame.__dict__['op_'+opcode](None, *sourcevalues)
-
-
-
-
-
+""" IRM Simulator """
+import autopath
+from pypy.rpython.llinterp import LLFrame
+#from pypy.translator.asm.infregmachine import Instruction
+
+"""
+Notes on the register allocation algorithm:
+
+
+To convert our IRM to an FRM machine, we must perform some swapping of the registers.  This is in effect
+'paging', we are only allowed to perform XCHG operations on the slow (memory) registers, while we can do
+anything with our fast (CPU registers).
+
+There are various algorithms available, including the Linear Scan Algorithm (google this), but for now we
+have decided to implement a simple, but (hopefully) reasonably effective last-recently-used algortithm.
+
+Regardless of the swap algorithm , at each stage we must keep track of which IRM register is held in which
+FRM register.  Our original test-suite simply gave the register usages, and checked the swap/usage sequence.
+
+We need to rebuild the suite, checking the register map at EACH stage of the process.  Fiddly, but important!
+
+We need some notation:
+
+IRMxxx denotes an Infinite Register Machine that will use at most xxx registers
+
+FRMxxx.yyy denotes a finite-register machine, with xxx fast registers, and a total of yyy registers.
+
+"""
+
+
+def regmap(regperm):
+    """answer a map IRM notation -> current FRM notation"""
+    map={}
+    for reg in range(1,len(regperm)):
+        #print reg,map,regperm
+        map[reg]=regperm.index(reg)
+    return map
+
+def maxRegister(commands):
+    pool=[]
+    for cmd in commands:
+        if not isinstance(cmd,str):
+            pool+=cmd.registers_used()
+    if pool==[]:
+        return 1
+    return max(pool)
+
+
+def TranslateProgram(commands,nreg):
+    """answer this program into one which only uses nreg fast registers"""
+    totreg=maxRegister(commands)
+    assert nreg>=3 ,'Some commands may use 3 registers!!!!'
+    newprog=[]
+    pipe=[]
+    
+    # this must be as big as the total number of registers+1 (we start
+    # at index 1)
+    old2new=range(0,totreg+1)
+
+
+    for cmd in commands:
+        # if we use any registers, we must possibly swap first, and
+        # then remap
+        if isinstance(cmd,str) or cmd.name in ('J','JT','JF'):
+            # label or jump so  pass through
+            newprog.append(cmd)
+        else:
+            # so now remap the registers!
+
+            regused=cmd.registers_used()
+            t2p=[old2new[x] for x in regused]
+            for reg in regused:
+                goingin=regmap(old2new)[reg]
+                if goingin>nreg:
+                    if pipe[-1] not in t2p:
+                        index=-1
+                    elif pipe[-2] not in t2p:
+                        index=-2
+                    else:
+                        assert pipe[-3]!=goingin #this must be true for nreg>=3
+                        index=-3
+                    #now swap to end of pipe, so code as before works.
+                    pipe[index],pipe[-1]=pipe[-1],pipe[index]
+                    goingout=pipe[-1]
+                    newprog.append(Instruction('EXCH',(goingin,goingout)))
+                    old2new[goingout],old2new[goingin] = old2new[goingin],old2new[goingout]
+                    val=goingout
+                else:
+                    val=goingin
+                pipe=[val]+pipe
+
+                if len(pipe)>nreg:
+                    pipe.pop()   #this value fell out of the pipe
+                assert len(pipe)<=nreg
+            #now we can emit the command with registers remapped
+            rm=regmap(old2new)
+            newprog.append(cmd.renumber(rm))
+    return newprog
+
+
+
+class Machine:
+
+    def RunProgram(cls,commands,args=[],tracing=False):
+        nreg=maxRegister(commands)
+        machine=Machine(nreg,args)
+        machine._tracing = tracing
+        ip=0
+        if tracing:
+            print 'args', args
+        while not machine.stopped():
+            if ip>=len(commands):
+                return None
+            cmd=commands[ip]
+            if isinstance(cmd,str):
+                pass
+            elif cmd.name=='J':
+                ip=commands.index(cmd.arguments[0])
+            elif cmd.name=='JT':
+                c = machine.creg()
+                assert c is not None
+                if c:
+                    ip=commands.index(cmd.arguments[0])
+            else:
+                machine.op(cmd.name,*cmd.arguments)
+            ip+=1
+        if tracing:
+            print 'ret', machine._retval
+        return machine._retval
+    RunProgram=classmethod(RunProgram)
+
+
+    def __init__(self,nreg,args):
+        self._nreg=nreg
+        self._args=args
+        self._stopped=False
+        self._creg=None
+        self._tracing = False
+        self._registers=[None for x in range(nreg+1)]
+
+    def creg(self):
+        return self._creg
+
+    def registers(self):
+        return self._registers[1:]
+
+    def register(self, reg):
+        v = self._registers[reg]
+        assert v is not None
+        return v
+
+    def stopped(self):
+        return self._stopped
+
+    def op(self,opcode,*operands):
+        if self._tracing:
+            args = []
+            for arg in operands:
+                if isinstance(arg, int):
+                    args.append('r%s=%s'%(arg, self._registers[arg]))
+                else:
+                    args.append(arg)
+            print opcode, ', '.join(map(str, args))
+            #will want to trap later to defer unimplemented to the LLInterpreter...
+        m = getattr(self,opcode,None)
+        if m is not None:
+            m(*operands)
+        else:
+            self.llop(opcode, *operands)
+
+    def RETPYTHON(self,reg):
+        self._stopped=True
+        self._retval=self.register(reg)
+
+    def LIA(self,destination,argindex):
+        self._registers[destination]=self._args[argindex.value]
+
+    def LOAD(self,destination,immed):
+        self._registers[destination]=immed.value
+
+    def MOV(self,destination,source):
+        self._registers[destination]=self.register(source)
+
+    def EXCH(self,destination,source):
+        #self._registers[destination],self._registers[source]=self.register(source),self.register(destination)
+        self._registers[destination],self._registers[source]=self._registers[source],self._registers[destination]
+
+
+    def int_gt(self,rega,regb):
+        self._creg = self.register(rega) > self.register(regb)
+
+    def int_lt(self,rega,regb):
+        self._creg = self.register(rega) < self.register(regb)
+
+    def int_ge(self,rega,regb):
+        self._creg = self.register(rega) >= self.register(regb)
+
+    def int_le(self,rega,regb):
+        self._creg = self.register(rega) <= self.register(regb)
+
+    def int_eq(self,rega,regb):
+        self._creg = self.register(rega) == self.register(regb)
+
+    def int_ne(self,rega,regb):
+        self._creg = self.register(rega) != self.register(regb)
+
+
+    def llop(self, opcode, destination, *sources):
+        sourcevalues = []
+        for r in sources:
+            sourcevalues.append(self.register(r))
+        self._registers[destination] = LLFrame.__dict__['op_'+opcode](None, *sourcevalues)
+
+
+
+
+

Modified: pypy/dist/pypy/translator/c/test/test_coroutine.py
==============================================================================
--- pypy/dist/pypy/translator/c/test/test_coroutine.py	(original)
+++ pypy/dist/pypy/translator/c/test/test_coroutine.py	Tue Jan 24 13:31:23 2006
@@ -1,245 +1,245 @@
-"""
-minimalistic coroutine implementation
-"""
-
-import os
-import py
-from pypy.rpython.rstack import yield_current_frame_to_caller
-
-def wrap_stackless_function(fn):
-    from pypy.translator.translator import TranslationContext
-    from pypy.translator.c.genc import CStandaloneBuilder
-    from pypy.annotation.model import SomeList, SomeString
-    from pypy.annotation.listdef import ListDef
-    from pypy.translator.backendopt.all import backend_optimizations
-
-    def entry_point(argv):
-        os.write(1, str(fn()))
-        return 0
-
-    s_list_of_strings = SomeList(ListDef(None, SomeString()))
-    s_list_of_strings.listdef.resize()
-    t = TranslationContext()
-    t.buildannotator().build_types(entry_point, [s_list_of_strings])
-    #t.view()
-    t.buildrtyper().specialize()
-    backend_optimizations(t)
-    cbuilder = CStandaloneBuilder(t, entry_point)
-    cbuilder.stackless = True
-    cbuilder.generate_source()
-    cbuilder.compile()
-    return cbuilder.cmdexec('')
-
-# ____________________________________________________________
-
-"""
-Trying to build the simplest possible coroutine interface.
-
-A coroutine is a tiny wrapper around a frame, or better
-to say a one-shot continuation. This continuation is
-resumed whenever we switch to the coroutine. On depart,
-the coroutine is updated with its current state, that is,
-the continuation is replaced. To avoid confusion with
-general continuations, we are naming them as 'frame'
-in the code. By frame, we are referring to the toplevel
-frame as a placeholder for the whole structure appended
-to it. This might be a chain of frames, or even a special
-stack structure, when we implement 'hard switching'. The
-abstraction layer should make this invisible.
-
-The 'seed' of coroutines is actually the special function
-yield_current_frame_to_caller(). It is, in a sense, able
-to return twice. When yield_current_frame_to_caller() is
-reached, it creates a resumable frame and returns it to the
-caller of the current function. This frame serves as the
-entry point to the coroutine.
-
-On every entry to the coroutine, the return value of the
-point where we left off is the continuation of the caller.
-We need to update the caller's frame with it.
-This is not necessarily the caller which created ourself.
-We are therefore keeping track of the current coroutine.
-
-The update sequence during a switch to a coroutine is:
-
-- save the return value (caller's continuation) in the
-  calling coroutine, which is still 'current'
-- change current to ourself (the callee)
-- invalidate our continuation by setting it to None.
-"""
-
-
-class CoState(object):
-    pass
-
-costate = CoState()
-
-class CoroutineDamage(SystemError):
-    pass
-
-class Coroutine(object):
-
-    def __init__(self):
-        self.frame = None
-
-    def bind(self, thunk):
-        if self.frame is not None:
-            raise CoroutineDamage
-        self.frame = self._bind(thunk)
-
-    def _bind(self, thunk):
-        binder = costate.current
-        costate.last.frame = yield_current_frame_to_caller()
-        thunk.call()
-        if binder.frame is None:
-            binder = costate.main
-        costate.last, costate.current = costate.current, binder
-        frame, binder.frame = binder.frame, None
-        return frame
-
-    def switch(self):
-        if self.frame is None:
-            raise CoroutineDamage
-        costate.last, costate.current = costate.current, self
-        frame, self.frame = self.frame, None
-        costate.last.frame = frame.switch()
-
-costate.current = costate.last = costate.main = Coroutine()
-
-def output(stuff):
-    os.write(2, stuff + '\n')
-
-def test_coroutine():
-    
-    def g(lst, coros):
-        coro_f, coro_g, coro_h = coros
-        lst.append(2)
-        output('g appended 2')
-        coro_h.switch()
-        lst.append(5)
-        output('g appended 5')
-
-    def h(lst, coros):
-        coro_f, coro_g, coro_h = coros
-        lst.append(3)
-        output('h appended 3')
-        coro_f.switch()
-        lst.append(7)
-        output('h appended 7')
-
-    class T:
-        def __init__(self, func, arg1, arg2):
-            self.func = func
-            self.arg1 = arg1
-            self.arg2 = arg2
-        def call(self):
-            self.func(self.arg1, self.arg2)
-
-    def f():
-        lst = [1]
-        coro_f = costate.main
-        coro_g = Coroutine()
-        coro_h = Coroutine()
-        coros = [coro_f, coro_g, coro_h]
-        thunk_g = T(g, lst, coros)
-        output('binding g after f set 1')
-        coro_g.bind(thunk_g)
-        thunk_h = T(h, lst, coros)
-        output('binding h after f set 1')
-        coro_h.bind(thunk_h)
-        output('switching to g')
-        coro_g.switch()
-        lst.append(4)
-        output('f appended 4')
-        coro_g.switch()
-        lst.append(6)
-        output('f appended 6')
-        coro_h.switch()
-        lst.append(8)
-        output('f appended 8')
-        n = 0
-        for i in lst:
-            n = n*10 + i
-        return n
-
-    data = wrap_stackless_function(f)
-    assert int(data.strip()) == 12345678
-
-def test_coroutine2():
-
-    class TBase:
-        def call(self):
-            pass
-        
-    class T(TBase):
-        def __init__(self, func, arg1, arg2):
-            self.func = func
-            self.arg1 = arg1
-            self.arg2 = arg2
-        def call(self):
-            self.res = self.func(self.arg1, self.arg2)
-
-    class T1(TBase):
-        def __init__(self, func, arg1):
-            self.func = func
-            self.arg1 = arg1
-        def call(self):
-            self.res = self.func(self.arg1)
-
-    def g(lst, coros):
-        coro_f1, coro_g, coro_h = coros
-        lst.append(2)
-        output('g appended 2')
-        coro_h.switch()
-        lst.append(5)
-        output('g appended 5')
-        output('exiting g')
-        
-    def h(lst, coros):
-        coro_f1, coro_g, coro_h = coros
-        lst.append(3)
-        output('h appended 3')
-        coro_f1.switch()
-        lst.append(7)
-        output('h appended 7')
-        output('exiting h')
-
-    def f1(coro_f1):
-        lst = [1]
-        coro_g = Coroutine()
-        coro_h = Coroutine()
-        coros = [coro_f1, coro_g, coro_h]
-        thunk_g = T(g, lst, coros)
-        output('binding g after f1 set 1')
-        coro_g.bind(thunk_g)
-        thunk_h = T(h, lst, coros)
-        output('binding h after f1 set 1')
-        coro_h.bind(thunk_h)
-        output('switching to g')
-        coro_g.switch()
-        lst.append(4)
-        output('f1 appended 4')
-        coro_g.switch()
-        lst.append(6)
-        output('f1 appended 6')
-        coro_h.switch()
-        lst.append(8)
-        output('f1 appended 8')
-        n = 0
-        for i in lst:
-            n = n*10 + i
-        output('exiting f1')
-        return n     
-
-    def f():
-        coro_f = costate.main
-        coro_f1 = Coroutine()
-        thunk_f1 = T1(f1, coro_f1)
-        output('binding f1 after f set 1')
-        coro_f1.bind(thunk_f1)
-        coro_f1.switch()        
-        output('return to main :-(')
-        return thunk_f1.res
-        
-    data = wrap_stackless_function(f)
-    assert int(data.strip()) == 12345678
+"""
+minimalistic coroutine implementation
+"""
+
+import os
+import py
+from pypy.rpython.rstack import yield_current_frame_to_caller
+
+def wrap_stackless_function(fn):
+    from pypy.translator.translator import TranslationContext
+    from pypy.translator.c.genc import CStandaloneBuilder
+    from pypy.annotation.model import SomeList, SomeString
+    from pypy.annotation.listdef import ListDef
+    from pypy.translator.backendopt.all import backend_optimizations
+
+    def entry_point(argv):
+        os.write(1, str(fn()))
+        return 0
+
+    s_list_of_strings = SomeList(ListDef(None, SomeString()))
+    s_list_of_strings.listdef.resize()
+    t = TranslationContext()
+    t.buildannotator().build_types(entry_point, [s_list_of_strings])
+    #t.view()
+    t.buildrtyper().specialize()
+    backend_optimizations(t)
+    cbuilder = CStandaloneBuilder(t, entry_point)
+    cbuilder.stackless = True
+    cbuilder.generate_source()
+    cbuilder.compile()
+    return cbuilder.cmdexec('')
+
+# ____________________________________________________________
+
+"""
+Trying to build the simplest possible coroutine interface.
+
+A coroutine is a tiny wrapper around a frame, or better
+to say a one-shot continuation. This continuation is
+resumed whenever we switch to the coroutine. On depart,
+the coroutine is updated with its current state, that is,
+the continuation is replaced. To avoid confusion with
+general continuations, we are naming them as 'frame'
+in the code. By frame, we are referring to the toplevel
+frame as a placeholder for the whole structure appended
+to it. This might be a chain of frames, or even a special
+stack structure, when we implement 'hard switching'. The
+abstraction layer should make this invisible.
+
+The 'seed' of coroutines is actually the special function
+yield_current_frame_to_caller(). It is, in a sense, able
+to return twice. When yield_current_frame_to_caller() is
+reached, it creates a resumable frame and returns it to the
+caller of the current function. This frame serves as the
+entry point to the coroutine.
+
+On every entry to the coroutine, the return value of the
+point where we left off is the continuation of the caller.
+We need to update the caller's frame with it.
+This is not necessarily the caller which created ourself.
+We are therefore keeping track of the current coroutine.
+
+The update sequence during a switch to a coroutine is:
+
+- save the return value (caller's continuation) in the
+  calling coroutine, which is still 'current'
+- change current to ourself (the callee)
+- invalidate our continuation by setting it to None.
+"""
+
+
+class CoState(object):
+    pass
+
+costate = CoState()
+
+class CoroutineDamage(SystemError):
+    pass
+
+class Coroutine(object):
+
+    def __init__(self):
+        self.frame = None
+
+    def bind(self, thunk):
+        if self.frame is not None:
+            raise CoroutineDamage
+        self.frame = self._bind(thunk)
+
+    def _bind(self, thunk):
+        binder = costate.current
+        costate.last.frame = yield_current_frame_to_caller()
+        thunk.call()
+        if binder.frame is None:
+            binder = costate.main
+        costate.last, costate.current = costate.current, binder
+        frame, binder.frame = binder.frame, None
+        return frame
+
+    def switch(self):
+        if self.frame is None:
+            raise CoroutineDamage
+        costate.last, costate.current = costate.current, self
+        frame, self.frame = self.frame, None
+        costate.last.frame = frame.switch()
+
+costate.current = costate.last = costate.main = Coroutine()
+
+def output(stuff):
+    os.write(2, stuff + '\n')
+
+def test_coroutine():
+    
+    def g(lst, coros):
+        coro_f, coro_g, coro_h = coros
+        lst.append(2)
+        output('g appended 2')
+        coro_h.switch()
+        lst.append(5)
+        output('g appended 5')
+
+    def h(lst, coros):
+        coro_f, coro_g, coro_h = coros
+        lst.append(3)
+        output('h appended 3')
+        coro_f.switch()
+        lst.append(7)
+        output('h appended 7')
+
+    class T:
+        def __init__(self, func, arg1, arg2):
+            self.func = func
+            self.arg1 = arg1
+            self.arg2 = arg2
+        def call(self):
+            self.func(self.arg1, self.arg2)
+
+    def f():
+        lst = [1]
+        coro_f = costate.main
+        coro_g = Coroutine()
+        coro_h = Coroutine()
+        coros = [coro_f, coro_g, coro_h]
+        thunk_g = T(g, lst, coros)
+        output('binding g after f set 1')
+        coro_g.bind(thunk_g)
+        thunk_h = T(h, lst, coros)
+        output('binding h after f set 1')
+        coro_h.bind(thunk_h)
+        output('switching to g')
+        coro_g.switch()
+        lst.append(4)
+        output('f appended 4')
+        coro_g.switch()
+        lst.append(6)
+        output('f appended 6')
+        coro_h.switch()
+        lst.append(8)
+        output('f appended 8')
+        n = 0
+        for i in lst:
+            n = n*10 + i
+        return n
+
+    data = wrap_stackless_function(f)
+    assert int(data.strip()) == 12345678
+
+def test_coroutine2():
+
+    class TBase:
+        def call(self):
+            pass
+        
+    class T(TBase):
+        def __init__(self, func, arg1, arg2):
+            self.func = func
+            self.arg1 = arg1
+            self.arg2 = arg2
+        def call(self):
+            self.res = self.func(self.arg1, self.arg2)
+
+    class T1(TBase):
+        def __init__(self, func, arg1):
+            self.func = func
+            self.arg1 = arg1
+        def call(self):
+            self.res = self.func(self.arg1)
+
+    def g(lst, coros):
+        coro_f1, coro_g, coro_h = coros
+        lst.append(2)
+        output('g appended 2')
+        coro_h.switch()
+        lst.append(5)
+        output('g appended 5')
+        output('exiting g')
+        
+    def h(lst, coros):
+        coro_f1, coro_g, coro_h = coros
+        lst.append(3)
+        output('h appended 3')
+        coro_f1.switch()
+        lst.append(7)
+        output('h appended 7')
+        output('exiting h')
+
+    def f1(coro_f1):
+        lst = [1]
+        coro_g = Coroutine()
+        coro_h = Coroutine()
+        coros = [coro_f1, coro_g, coro_h]
+        thunk_g = T(g, lst, coros)
+        output('binding g after f1 set 1')
+        coro_g.bind(thunk_g)
+        thunk_h = T(h, lst, coros)
+        output('binding h after f1 set 1')
+        coro_h.bind(thunk_h)
+        output('switching to g')
+        coro_g.switch()
+        lst.append(4)
+        output('f1 appended 4')
+        coro_g.switch()
+        lst.append(6)
+        output('f1 appended 6')
+        coro_h.switch()
+        lst.append(8)
+        output('f1 appended 8')
+        n = 0
+        for i in lst:
+            n = n*10 + i
+        output('exiting f1')
+        return n     
+
+    def f():
+        coro_f = costate.main
+        coro_f1 = Coroutine()
+        thunk_f1 = T1(f1, coro_f1)
+        output('binding f1 after f set 1')
+        coro_f1.bind(thunk_f1)
+        coro_f1.switch()        
+        output('return to main :-(')
+        return thunk_f1.res
+        
+    data = wrap_stackless_function(f)
+    assert int(data.strip()) == 12345678

Modified: pypy/dist/pypy/translator/pickle/genpickle.py
==============================================================================
--- pypy/dist/pypy/translator/pickle/genpickle.py	(original)
+++ pypy/dist/pypy/translator/pickle/genpickle.py	Tue Jan 24 13:31:23 2006
@@ -1,793 +1,793 @@
-"""
-Generate a Python source file from the flowmodel.
-The purpose is to create something that allows
-to restart code generation after flowing and maybe
-annotation.
-
-The generated source appeared to be way too large
-for the CPython compiler. Therefore, we cut the
-source into pieces and compile them seperately.
-"""
-from __future__ import generators, division, nested_scopes
-import __future__
-all_feature_names = __future__.all_feature_names
-import os, sys, new, __builtin__
-
-from pypy.translator.gensupp import uniquemodulename, NameManager
-from pypy.translator.gensupp import builtin_base
-from pypy.rpython.rarithmetic import r_int, r_uint
-from pypy.objspace.flow.model import Variable, Constant, SpaceOperation
-from pypy.objspace.flow.model import FunctionGraph, Block, Link, Atom
-from pypy.objspace.flow.flowcontext import SpamBlock, EggBlock
-from pypy.annotation.model import SomeInteger, SomeObject, SomeChar, SomeBool
-from pypy.annotation.model import SomeList, SomeString, SomeTuple
-from pypy.annotation.unaryop import SomeInstance
-from pypy.interpreter.baseobjspace import ObjSpace
-from pypy.translator.pickle import slotted
-
-from pickle import whichmodule, PicklingError
-from copy_reg import _reconstructor
-
-import pickle
-
-from types import *
-import types, weakref
-
-class AlreadyCreated(Exception): pass
-
-# ____________________________________________________________
-
-
-#XXX Hack: This float is supposed to overflow to inf
-OVERFLOWED_FLOAT = float("1e10000000000000000000000000000000")
-#XXX Hack: and this one to underflow to -inf
-OVERFLOWED_FLOATM = float("-1e10000000000000000000000000000000")
-
-class GenPickle:
-
-    def __init__(self, translator, writer = None):
-        self.translator = translator
-        self.writer = writer
-        self.initcode = []
-        self.produce = self._produce()
-        self.produce(
-            'from __future__ import %s\n' % ', '.join(all_feature_names) +
-            'import new, types, sys',
-            )
-        self.picklenames = {}  # memoize objects
-        for name in all_feature_names + "new types sys".split():
-            self.memoize(globals()[name], name)
-        self.memoize((), '()')
-        self.namespace = NameManager()
-        self.uniquename = self.namespace.uniquename
-        self.namespace.make_reserved_names('None False True')
-        self.namespace.make_reserved_names('new types sys')
-        self.namespace.make_reserved_names(' '.join(all_feature_names))
-        self.namespace.make_reserved_names('result') # result dict
-        self.result = {}
-        self.simple_const_types = {
-            int: repr,
-            long: repr,
-            float: self.save_float,
-            str: repr,
-            unicode: repr,
-            type(None): repr,
-            bool: repr,
-            }
-        self.typecache = {} # hold types vs. nameof methods
-        # we distinguish between the "user program" and other stuff.
-        # "user program" will never use save_global.
-        self.domains = (
-            'pypy.objspace.std.',
-            'pypy.objspace.descroperation',
-            'pypy._cache.',
-            'pypy.interpreter.',
-            'pypy.module.',
-            'pypy.translator.test.',
-            '__main__',
-            )
-        self.shortnames = {
-            Variable:       'V',
-            Constant:       'C',
-            Block:          'B',
-            SpamBlock:      'SB',
-            EggBlock:       'EB',
-            Link:           'L',
-            FunctionGraph:  'FG',
-            SomeInteger:    'sI',
-            SomeObject:     'sO',
-            SomeChar:       'sC',
-            SomeBool:       'sB',
-            SomeList:       'sL',
-            SomeString:     'sS',
-            SomeTuple:      'sT',
-            SomeInstance:   'sIn',
-            }
-        self.inline_instances = {
-            SpaceOperation: True,
-            }
-
-    def save_float(self, fl):
-        if fl == OVERFLOWED_FLOAT:
-            return 'float("1e10000000000000000000000000000000")'
-        elif fl == OVERFLOWED_FLOATM:
-            return 'float("-1e10000000000000000000000000000000")'
-        return repr(fl)
-
-    def pickle(self, *args, **kwds):
-        for obj in args:
-            self.nameof(obj)
-        for obj in kwds.values():
-            self.nameof(obj)
-        self.result.update(kwds)
-
-    def finish(self):
-        self.pickle(self.result)
-        self.produce('result = %s' % self.nameof(self.result))
-        if self.writer:
-            self.writer.close()
-
-    def memoize(self, obj, name):
-        self.picklenames[id(obj)] = name
-        return name
-
-    def memoize_unique(self, obj, basename):
-        if id(obj) in self.picklenames:
-            raise AlreadyCreated
-        return self.memoize(obj, self.uniquename(basename))
-
-    def _produce(self):
-        writer = self.writer
-        down = 1234
-        cnt = [0, 0]  # text, calls
-        self.last_progress = ''
-        if writer:
-            write = writer.write
-        else:
-            write = self.initcode.append
-        def produce(text):
-            write(text+'\n')
-            cnt[0] += len(text) + 1
-            cnt[1] += 1
-            if cnt[1] == down:
-                cnt[1] = 0
-                self.progress("%d" % cnt[0])
-        return produce
-
-    def progress(self, txt):
-        back = '\x08' * len(self.last_progress)
-        self.last_progress = txt+' ' # soft space
-        print back+txt,
-
-    def nameof(self, obj):
-        try:
-            try:
-                return self.picklenames[id(obj)]
-            except KeyError:
-                typ = type(obj)
-                return self.simple_const_types[typ](obj)
-        except KeyError:
-            try:
-                try:
-                    meth = self.typecache[typ]
-                except KeyError:
-                    obj_builtin_base = builtin_base(obj)
-                    if (obj_builtin_base in (object,) + tuple(
-                        self.simple_const_types.keys()) and
-                        typ is not obj_builtin_base):
-                        # assume it's a user defined thingy
-                        meth = self.nameof_instance
-                    else:
-                        for cls in typ.__mro__:
-                            meth = getattr(self, 'nameof_' + ''.join(
-                                [ c for c in cls.__name__
-                                  if c.isalpha() or c == '_'] ), None)
-                            if meth:
-                                break
-                        else:
-                            raise Exception, "nameof(%r)" % (obj,)
-                    self.typecache[typ] = meth
-                name = meth(obj)
-            except AlreadyCreated:
-                name = self.picklenames[id(obj)]
-            return name
-        except Exception, e:
-            self.problem = e, obj
-            raise
-
-    def nameofargs(self, tup, plain_tuple = False):
-        """ a string with the nameofs, concatenated """
-        # see if we can build a compact representation
-        ret = ', '.join([self.nameof(arg) for arg in tup])
-        if plain_tuple and len(tup) == 1:
-            ret += ','
-        if len(ret) <= 90:
-            return ret
-        ret = '\n ' + ',\n '.join(
-            [self.nameof(arg) for arg in tup]) + ',\n '
-        return ret
-
-    def nameof_object(self, value):
-        if type(value) is not object:
-            raise Exception, "nameof(%r): type %s not object" % (
-                value, type(value).__name__)
-        name = self.memoize_unique(value, 'g_object')
-        self.produce('%s = object()' % name)
-        return name
-
-    def nameof_module(self, value):
-        # all allowed here, we reproduce ourselves
-        if self.is_app_domain(value.__name__):
-            name = self.memoize_unique(value, 'gmod_%s' % value.__name__)
-            self.produce('%s = new.module(%r)\n'
-                         'sys.modules[%r] = %s'% (
-                name, value.__name__, value.__name__, name) )
-            def initmodule():
-                names = value.__dict__.keys()
-                names.sort()
-                for k in names:
-                    try:
-                        v = value.__dict__[k]
-                        nv = self.nameof(v)
-                        yield '%s.%s = %s' % (name, k, nv)
-                    except PicklingError:
-                        pass
-            for line in initmodule():
-                self.produce(line)
-        else:
-            name = self.memoize_unique(value, value.__name__)
-            self.produce('%s = __import__(%r)' % (name, value.__name__,))
-        return name
-
-    def skipped_function(self, func, reason=None, _dummydict={}):
-        # Generates a placeholder for missing functions
-        # that raises an exception when called.
-        # The original code object is retained in an
-        # attribute '_skipped_code'
-        skipname = 'gskippedfunc_' + func.__name__
-        funcname = func.__name__
-        # need to handle this specially
-        if id(func) in self.picklenames:
-            raise AlreadyCreated
-        # generate code object before the skipped func (reads better)
-        func_code = getattr(func, 'func_code', None) # maybe builtin
-        self.nameof(func_code)
-        if reason:
-            text = 'skipped: %r, see _skipped_code attr: %s' % (
-                reason, funcname)
-        else:
-            text = 'skipped, see _skipped_code attr: %s' % funcname
-        def dummy(*args, **kwds):
-            raise NotImplementedError, text
-        _dummydict['__builtins__'] = __builtins__
-        skippedfunc = new.function(dummy.func_code, _dummydict, skipname, (),
-                                   dummy.func_closure)
-        skippedfunc._skipped_code = func_code
-        name = self.nameof(skippedfunc)
-        return self.memoize(func, name)
-
-    def nameof_staticmethod(self, sm):
-        # XXX XXX XXXX
-        func = sm.__get__(42.5)
-        functionname = self.nameof(func)
-        name = self.memoize_unique(sm, 'gsm_' + func.__name__)
-        self.produce('%s = staticmethod(%s)' % (name, functionname))
-        return name
-
-    def nameof_instancemethod(self, meth):
-        func = self.nameof(meth.im_func)
-        typ = self.nameof(meth.im_class)
-        if meth.im_self is None:
-            # no error checking here
-            name = self.memoize_unique(meth, 'gmeth_' + func)
-            self.produce('%s = %s.%s' % (name, typ, meth.__name__))
-        else:
-            ob = self.nameof(meth.im_self)
-            name = self.memoize_unique(meth, 'gumeth_'+ func)
-            self.produce('%s = new.instancemethod(%s, %s, %s)' % (
-                name, func, ob, typ))
-        return name
+"""
+Generate a Python source file from the flowmodel.
+The purpose is to create something that allows
+to restart code generation after flowing and maybe
+annotation.
+
+The generated source appeared to be way too large
+for the CPython compiler. Therefore, we cut the
+source into pieces and compile them seperately.
+"""
+from __future__ import generators, division, nested_scopes
+import __future__
+all_feature_names = __future__.all_feature_names
+import os, sys, new, __builtin__
+
+from pypy.translator.gensupp import uniquemodulename, NameManager
+from pypy.translator.gensupp import builtin_base
+from pypy.rpython.rarithmetic import r_int, r_uint
+from pypy.objspace.flow.model import Variable, Constant, SpaceOperation
+from pypy.objspace.flow.model import FunctionGraph, Block, Link, Atom
+from pypy.objspace.flow.flowcontext import SpamBlock, EggBlock
+from pypy.annotation.model import SomeInteger, SomeObject, SomeChar, SomeBool
+from pypy.annotation.model import SomeList, SomeString, SomeTuple
+from pypy.annotation.unaryop import SomeInstance
+from pypy.interpreter.baseobjspace import ObjSpace
+from pypy.translator.pickle import slotted
+
+from pickle import whichmodule, PicklingError
+from copy_reg import _reconstructor
+
+import pickle
+
+from types import *
+import types, weakref
+
+class AlreadyCreated(Exception): pass
+
+# ____________________________________________________________
+
+
+#XXX Hack: This float is supposed to overflow to inf
+OVERFLOWED_FLOAT = float("1e10000000000000000000000000000000")
+#XXX Hack: and this one to underflow to -inf
+OVERFLOWED_FLOATM = float("-1e10000000000000000000000000000000")
+
+class GenPickle:
+
+    def __init__(self, translator, writer = None):
+        self.translator = translator
+        self.writer = writer
+        self.initcode = []
+        self.produce = self._produce()
+        self.produce(
+            'from __future__ import %s\n' % ', '.join(all_feature_names) +
+            'import new, types, sys',
+            )
+        self.picklenames = {}  # memoize objects
+        for name in all_feature_names + "new types sys".split():
+            self.memoize(globals()[name], name)
+        self.memoize((), '()')
+        self.namespace = NameManager()
+        self.uniquename = self.namespace.uniquename
+        self.namespace.make_reserved_names('None False True')
+        self.namespace.make_reserved_names('new types sys')
+        self.namespace.make_reserved_names(' '.join(all_feature_names))
+        self.namespace.make_reserved_names('result') # result dict
+        self.result = {}
+        self.simple_const_types = {
+            int: repr,
+            long: repr,
+            float: self.save_float,
+            str: repr,
+            unicode: repr,
+            type(None): repr,
+            bool: repr,
+            }
+        self.typecache = {} # hold types vs. nameof methods
+        # we distinguish between the "user program" and other stuff.
+        # "user program" will never use save_global.
+        self.domains = (
+            'pypy.objspace.std.',
+            'pypy.objspace.descroperation',
+            'pypy._cache.',
+            'pypy.interpreter.',
+            'pypy.module.',
+            'pypy.translator.test.',
+            '__main__',
+            )
+        self.shortnames = {
+            Variable:       'V',
+            Constant:       'C',
+            Block:          'B',
+            SpamBlock:      'SB',
+            EggBlock:       'EB',
+            Link:           'L',
+            FunctionGraph:  'FG',
+            SomeInteger:    'sI',
+            SomeObject:     'sO',
+            SomeChar:       'sC',
+            SomeBool:       'sB',
+            SomeList:       'sL',
+            SomeString:     'sS',
+            SomeTuple:      'sT',
+            SomeInstance:   'sIn',
+            }
+        self.inline_instances = {
+            SpaceOperation: True,
+            }
+
+    def save_float(self, fl):
+        if fl == OVERFLOWED_FLOAT:
+            return 'float("1e10000000000000000000000000000000")'
+        elif fl == OVERFLOWED_FLOATM:
+            return 'float("-1e10000000000000000000000000000000")'
+        return repr(fl)
+
+    def pickle(self, *args, **kwds):
+        for obj in args:
+            self.nameof(obj)
+        for obj in kwds.values():
+            self.nameof(obj)
+        self.result.update(kwds)
+
+    def finish(self):
+        self.pickle(self.result)
+        self.produce('result = %s' % self.nameof(self.result))
+        if self.writer:
+            self.writer.close()
+
+    def memoize(self, obj, name):
+        self.picklenames[id(obj)] = name
+        return name
+
+    def memoize_unique(self, obj, basename):
+        if id(obj) in self.picklenames:
+            raise AlreadyCreated
+        return self.memoize(obj, self.uniquename(basename))
+
+    def _produce(self):
+        writer = self.writer
+        down = 1234
+        cnt = [0, 0]  # text, calls
+        self.last_progress = ''
+        if writer:
+            write = writer.write
+        else:
+            write = self.initcode.append
+        def produce(text):
+            write(text+'\n')
+            cnt[0] += len(text) + 1
+            cnt[1] += 1
+            if cnt[1] == down:
+                cnt[1] = 0
+                self.progress("%d" % cnt[0])
+        return produce
+
+    def progress(self, txt):
+        back = '\x08' * len(self.last_progress)
+        self.last_progress = txt+' ' # soft space
+        print back+txt,
+
+    def nameof(self, obj):
+        try:
+            try:
+                return self.picklenames[id(obj)]
+            except KeyError:
+                typ = type(obj)
+                return self.simple_const_types[typ](obj)
+        except KeyError:
+            try:
+                try:
+                    meth = self.typecache[typ]
+                except KeyError:
+                    obj_builtin_base = builtin_base(obj)
+                    if (obj_builtin_base in (object,) + tuple(
+                        self.simple_const_types.keys()) and
+                        typ is not obj_builtin_base):
+                        # assume it's a user defined thingy
+                        meth = self.nameof_instance
+                    else:
+                        for cls in typ.__mro__:
+                            meth = getattr(self, 'nameof_' + ''.join(
+                                [ c for c in cls.__name__
+                                  if c.isalpha() or c == '_'] ), None)
+                            if meth:
+                                break
+                        else:
+                            raise Exception, "nameof(%r)" % (obj,)
+                    self.typecache[typ] = meth
+                name = meth(obj)
+            except AlreadyCreated:
+                name = self.picklenames[id(obj)]
+            return name
+        except Exception, e:
+            self.problem = e, obj
+            raise
+
+    def nameofargs(self, tup, plain_tuple = False):
+        """ a string with the nameofs, concatenated """
+        # see if we can build a compact representation
+        ret = ', '.join([self.nameof(arg) for arg in tup])
+        if plain_tuple and len(tup) == 1:
+            ret += ','
+        if len(ret) <= 90:
+            return ret
+        ret = '\n ' + ',\n '.join(
+            [self.nameof(arg) for arg in tup]) + ',\n '
+        return ret
+
+    def nameof_object(self, value):
+        if type(value) is not object:
+            raise Exception, "nameof(%r): type %s not object" % (
+                value, type(value).__name__)
+        name = self.memoize_unique(value, 'g_object')
+        self.produce('%s = object()' % name)
+        return name
+
+    def nameof_module(self, value):
+        # all allowed here, we reproduce ourselves
+        if self.is_app_domain(value.__name__):
+            name = self.memoize_unique(value, 'gmod_%s' % value.__name__)
+            self.produce('%s = new.module(%r)\n'
+                         'sys.modules[%r] = %s'% (
+                name, value.__name__, value.__name__, name) )
+            def initmodule():
+                names = value.__dict__.keys()
+                names.sort()
+                for k in names:
+                    try:
+                        v = value.__dict__[k]
+                        nv = self.nameof(v)
+                        yield '%s.%s = %s' % (name, k, nv)
+                    except PicklingError:
+                        pass
+            for line in initmodule():
+                self.produce(line)
+        else:
+            name = self.memoize_unique(value, value.__name__)
+            self.produce('%s = __import__(%r)' % (name, value.__name__,))
+        return name
+
+    def skipped_function(self, func, reason=None, _dummydict={}):
+        # Generates a placeholder for missing functions
+        # that raises an exception when called.
+        # The original code object is retained in an
+        # attribute '_skipped_code'
+        skipname = 'gskippedfunc_' + func.__name__
+        funcname = func.__name__
+        # need to handle this specially
+        if id(func) in self.picklenames:
+            raise AlreadyCreated
+        # generate code object before the skipped func (reads better)
+        func_code = getattr(func, 'func_code', None) # maybe builtin
+        self.nameof(func_code)
+        if reason:
+            text = 'skipped: %r, see _skipped_code attr: %s' % (
+                reason, funcname)
+        else:
+            text = 'skipped, see _skipped_code attr: %s' % funcname
+        def dummy(*args, **kwds):
+            raise NotImplementedError, text
+        _dummydict['__builtins__'] = __builtins__
+        skippedfunc = new.function(dummy.func_code, _dummydict, skipname, (),
+                                   dummy.func_closure)
+        skippedfunc._skipped_code = func_code
+        name = self.nameof(skippedfunc)
+        return self.memoize(func, name)
+
+    def nameof_staticmethod(self, sm):
+        # XXX XXX XXXX
+        func = sm.__get__(42.5)
+        functionname = self.nameof(func)
+        name = self.memoize_unique(sm, 'gsm_' + func.__name__)
+        self.produce('%s = staticmethod(%s)' % (name, functionname))
+        return name
+
+    def nameof_instancemethod(self, meth):
+        func = self.nameof(meth.im_func)
+        typ = self.nameof(meth.im_class)
+        if meth.im_self is None:
+            # no error checking here
+            name = self.memoize_unique(meth, 'gmeth_' + func)
+            self.produce('%s = %s.%s' % (name, typ, meth.__name__))
+        else:
+            ob = self.nameof(meth.im_self)
+            name = self.memoize_unique(meth, 'gumeth_'+ func)
+            self.produce('%s = new.instancemethod(%s, %s, %s)' % (
+                name, func, ob, typ))
+        return name
 
     nameof_method = nameof_instancemethod   # when run on top of PyPy
-
-    def should_translate_attr(self, pbc, attr):
-        ann = self.translator.annotator
-        if ann:
-            classdef = ann.getuserclasses().get(pbc.__class__)
-        else:
-            classdef = None
-        ignore = getattr(pbc.__class__, 'NOT_RPYTHON_ATTRIBUTES', [])
-        if attr in ignore:
-            return False
-        if classdef:
-            return classdef.about_attribute(attr) is not None
-        # by default, render if we don't know anything
-        return True
-
-    def nameof_builtin_function_or_method(self, func):
-        if func.__self__ is None:
-            # builtin function
-            # where does it come from? Python2.2 doesn't have func.__module__
-            for modname, module in sys.modules.items():
-                # here we don't ignore extension modules, but it must be
-                # a builtin module
-                if not module: continue
-                if hasattr(module, '__file__'):
-                    fname = module.__file__.lower()
-                    pyendings = '.py', '.pyc', '.pyo'
-                    if [fname.endswith(ending) for ending in pyendings]:
-                        continue
-                if func is getattr(module, func.__name__, None):
-                    break
-            else:
-                #raise Exception, '%r not found in any built-in module' % (func,)
-                return self.skipped_function(
-                    func, 'not found in any built-in module')
-            name = self.memoize_unique(func, 'gbltin_' + func.__name__)
-            if modname == '__builtin__':
-                self.produce('%s = %s' % (name, func.__name__))
-            else:
-                modname = self.nameof(module)
-                self.produce('%s = %s.%s' % (name, modname, func.__name__))
-        else:
-            # builtin (bound) method
-            selfname = self.nameof(func.__self__)
-            name = self.memoize_unique(func, 'gbltinmethod_' + func.__name__)
-            self.produce('%s = %s.%s' % (name, selfname, func.__name__))
-        return name
-
-    def nameof_classobj(self, cls):
-        if cls.__doc__ and cls.__doc__.lstrip().startswith('NOT_RPYTHON'):
-            raise PicklingError, "%r should never be reached" % (cls,)
-
-        try:
-            return self.save_global(cls)
-        except PicklingError, e:
-            pass
-        
-        metaclass = "type"
-        if issubclass(cls, Exception):
-            # if cls.__module__ == 'exceptions':
-            # don't rely on this, py.magic redefines AssertionError
-            if getattr(__builtin__, cls.__name__, None) is cls:
-                name = self.memoize_unique(cls, 'gexc_' + cls.__name__)
-                self.produce('%s = %s' % (name, cls.__name__))
-                return name
-        if not isinstance(cls, type):
-            assert type(cls) is ClassType
-            metaclass = "types.ClassType"
-
-        basenames = [self.nameof(base) for base in cls.__bases__]
-        def initclassobj():
-            content = cls.__dict__.items()
-            content.sort()
-            ignore = getattr(cls, 'NOT_RPYTHON_ATTRIBUTES', [])
-            isapp = self.is_app_domain(cls.__module__)
-            for key, value in content:
-                if key.startswith('__'):
-                    if key in ['__module__', '__doc__', '__dict__', '__slots__',
-                               '__weakref__', '__repr__', '__metaclass__']:
-                        continue
-                    # XXX some __NAMES__ are important... nicer solution sought
-                    #raise Exception, "unexpected name %r in class %s"%(key, cls)
-                if isapp:
-                    if (isinstance(value, staticmethod) and value.__get__(1) not in
-                        self.translator.flowgraphs and self.translator.frozen):
-                        continue
-                    if isinstance(value, classmethod):
-                        doc = value.__get__(cls).__doc__
-                        if doc and doc.lstrip().startswith("NOT_RPYTHON"):
-                            continue
-                    if (isinstance(value, FunctionType) and value not in
-                        self.translator.flowgraphs and self.translator.frozen):
-                        continue
-                if key in ignore:
-                    continue
-                if type(value) in self.descriptor_filter:
-                    continue # this gets computed
-
-                yield '%s.%s = %s' % (name, key, self.nameof(value))
-
-        baseargs = ", ".join(basenames)
-        if baseargs:
-            baseargs = '(%s)' % baseargs
-        name = self.memoize_unique(cls, 'gcls_' + cls.__name__)
-        ini = 'class %s%s:\n  __metaclass__ = %s' % (name, baseargs, metaclass)
-        if '__slots__' in cls.__dict__:
-            ini += '\n  __slots__ = %r' % cls.__slots__
-        self.produce(ini)
-        self.produce('%s.__name__ = %r' % (name, cls.__name__))
-        self.produce('%s.__module__ = %r' % (name, cls.__module__))
-        for line in initclassobj():
-            self.produce(line)
-        return name
-
-    nameof_class = nameof_classobj   # for Python 2.2
-
-    typename_mapping = {
-        InstanceType: 'types.InstanceType',
-        type(None):   'type(None)',
-        CodeType:     'types.CodeType',
-        type(sys):    'type(new)',
-
-        r_int:        'r_int',
-        r_uint:       'r_uint',
-
-        # XXX more hacks
-        # type 'builtin_function_or_method':
-        type(len): 'type(len)',
-        # type 'method_descriptor':
-        type(type.__reduce__): 'type(type.__reduce__)',
-        # type 'wrapper_descriptor':
-        type(type(None).__repr__): 'type(type(None).__repr__)',
-        # type 'getset_descriptor':
-        type(type.__dict__['__dict__']): "type(type.__dict__['__dict__'])",
-        # type 'member_descriptor':
-        type(type.__dict__['__basicsize__']): "type(type.__dict__['__basicsize__'])",
-        # type 'instancemethod':
-        type(Exception().__init__): 'type(Exception().__init__)',
-        # type 'listiterator':
-        type(iter([])): 'type(iter([]))',
-        }
-    descriptor_filter = {}
-    for _key in typename_mapping.keys():
-        if _key.__name__.endswith('descriptor'):
-            descriptor_filter[_key] = True
-    del _key
-    
-    def nameof_type(self, cls):
-        if cls.__module__ != '__builtin__':
-            return self.nameof_classobj(cls)   # user-defined type
-        name = self.memoize_unique(cls, 'gtype_%s' % cls.__name__)
-        if getattr(__builtin__, cls.__name__, None) is cls:
-            expr = cls.__name__    # type available from __builtin__
-        elif cls in types.__dict__.values():
-            for key, value in types.__dict__.items():
-                if value is cls:
-                    break
-            self.produce('from types import %s as %s' % (
-                key, name))
-            return name
-        elif cls in weakref.__dict__.values():
-            for key, value in weakref.__dict__.items():
-                if value is cls:
-                    break
-            self.produce('from weakref import %s as %s' % (
-                key, name))
-            return name
-        else:
-            expr = self.typename_mapping[cls]
-        self.produce('%s = %s' % (name, expr))
-        return name
-
-    def nameof_tuple(self, tup):
-        chunk = 20
-        # first create all arguments
-        for i in range(0, len(tup), chunk):
-            self.nameofargs(tup[i:i+chunk], True)
-        # see if someone else created us meanwhile
-        name = self.memoize_unique(tup, 'T%d' % len(tup))
-        argstr = self.nameofargs(tup[:chunk], True)
-        self.produce('%s = (%s)' % (name, argstr))
-        for i in range(chunk, len(tup), chunk):
-            argstr = self.nameofargs(tup[i:i+chunk], True)
-            self.produce('%s += (%s)' % (name, argstr) )
-        return name
-
-    def nameof_list(self, lis):
-        chunk = 20
-        def initlist():
-            for i in range(0, len(lis), chunk):
-                argstr = self.nameofargs(lis[i:i+chunk])
-                yield '%s += [%s]' % (name, argstr)
-        name = self.memoize_unique(lis, 'L%d' % len(lis))
-        self.produce('%s = []' % name)
-        for line in initlist():
-            self.produce(line)
-        return name
-
-    def is_app_domain(self, modname, exclude=()):
-        for domain in self.domains:
-            if domain.endswith('.') and modname.startswith(domain):
-                # handle subpaths
-                return True
-            if modname == domain and modname not in exclude:
-                # handle exact module names
-                return True
-        return False
-
-    def nameof_dict(self, dic):
-        if '__name__' in dic:
-            module = dic['__name__']
-            try:
-                if type(module) is str and self.is_app_domain(module):
-                    raise ImportError
-                __import__(module)
-                mod = sys.modules[module]
-            except (ImportError, KeyError, TypeError):
-                pass
-            else:
-                if dic is mod.__dict__ and not self.is_app_domain(module):
-                    dictname = module.split('.')[-1] + '__dict__'
-                    dictname = self.memoize_unique(dic, dictname)
-                    self.produce('from %s import __dict__ as %s' % (
-                                 module, dictname) )
-                    return dictname
-        def initdict():
-            keys = dic.keys()
-            keys.sort()
-            told = False
-            for k in keys:
-                try:
-                    nk, nv = self.nameof(k), self.nameof(dic[k])
-                    yield '%s[%s] = %s' % (name, nk, nv)
-                except PicklingError:
-                    pass
-                else:
-                    # some sanity check
-                    if type(k) is int:
-                        if k in self.picklenames:
-                            print ('WARNING: this dict most likely contains '
-                                   'the id of some object!!')
-                            print 'name of object: %s' % self.picklenames[k]
-                        elif k == id(dic[k]):
-                            print ('WARNING: this dict most likely contains '
-                                   'the id of one of it\'s objects!!')
-                            if not told:
-                                print dic
-                                told = True
-        name = self.memoize_unique(dic, 'D%d' % len(dic))
-        self.produce('%s = {}' % name)
-        for line in initdict():
-            self.produce(line)
-        return name
-
-    # strange prebuilt instances below, don't look too closely
-    # XXX oh well.
-    def nameof_member_descriptor(self, md):
-        cls = self.nameof(md.__objclass__)
-        name = self.memoize_unique(md, 'gdescriptor_%s_%s' % (
-            md.__objclass__.__name__, md.__name__))
-        self.produce('%s = %s.__dict__[%r]' % (name, cls, md.__name__))
-        return name
-    nameof_getset_descriptor  = nameof_member_descriptor
-    nameof_method_descriptor  = nameof_member_descriptor
-    nameof_wrapper_descriptor = nameof_member_descriptor
-
-    def nameof_instance(self, instance):
-        if isinstance(instance, Atom):
-            # cannot reconstruct this, it *must* be
-            # the one from model
-            return self.save_global(instance)
-        def initinstance():
-            if hasattr(instance, '__setstate__'):
-                # the instance knows what to do
-                args = self.nameof(restorestate)
-                yield '%s.__setstate__(%s)' % (name, args)
-                return
-            elif type(restorestate) is tuple:
-                setstate = self.nameof(_set)
-                argstr = self.nameofargs(restorestate)
-                yield '%s(%s, %s)' % (setstate, name, argstr)
-                return
-            assert type(restorestate) is dict, (
-                "%s has no dict and no __setstate__" % name)
-            content = restorestate.items()
-            content.sort()
-            attrs = []
-            for key, value in content:
-                if self.should_translate_attr(instance, key):
-                    if hasattr(value, '__doc__'):
-                        doc = value.__doc__
-                        if type(doc) is str and doc.lstrip().startswith('NOT_RPYTHON'):
-                            continue
-                    attrs.append( (key, self.nameof(value)) )
-            for k, v in attrs:
-                yield '%s.%s = %s' % (name, k, v)
-
-        klass = instance.__class__
-        cls = self.nameof(klass)
-        if hasattr(klass, '__base__'):
-            base_class = builtin_base(instance)
-            base = self.nameof(base_class)
-        else:
-            base_class = None
-            base = cls
-        if klass in self.inline_instances:
-            immediate = True
-        else:
-            if klass in self.shortnames:
-                name = self.memoize_unique(instance, self.shortnames[klass])
-            else:
-                name = self.memoize_unique(instance, 'ginst_' + klass.__name__)
-            immediate = False
-        if hasattr(instance, '__reduce_ex__'):
-            try:
-                reduced = instance.__reduce_ex__()
-            except TypeError:
-                # oops! slots and no __getstate__?
-                if not (hasattr(instance, '__slots__')
-                        and not hasattr(instance, '__getstate__') ):
-                    print "PROBLEM:", instance
-                    raise
-                assert not hasattr(instance, '__dict__'), ('wrong assumptions'
-                    ' about __slots__ in %s instance without __setstate__,'
-                    ' please update %s' % (klass.__name__, __name__) )
-                restorestate = _get(instance)
-                restorer = _rec
-                restoreargs = klass,
-            else:
-                restorer = reduced[0]
-                restoreargs = reduced[1]
-                if restorer is _reconstructor:
-                    restorer = _rec
-                    if restoreargs[1:] == (object, None):
-                        restoreargs = restoreargs[:1]
-                if len(reduced) > 2:
-                    restorestate = reduced[2]
-                else:
-                    restorestate = None
-            restorename = self.nameof(restorer)
-            # ignore possible dict, handled later by initinstance filtering
-            # in other cases, we expect that the class knows what to pickle.
-        else:
-            restoreargs = (base, cls)
-            restorename = '%s.__new__' % base
-            if hasattr(instance, '__getstate__'):
-                restorestate = instance.__getstate__()
-            else:
-                restorestate = instance.__dict__
-        restoreargstr = self.nameofargs(restoreargs)
-        if immediate:
-            assert restorestate is None
-            return '%s(%s)' % (restorename, restoreargstr)
-        if isinstance(klass, type):
-            self.produce('%s = %s(%s)' % (name, restorename, restoreargstr))
-        else:
-            self.produce('%s = new.instance(%s)' % (name, cls))
-        if restorestate is not None:
-            for line in initinstance():
-                self.produce(line)
-        return name
-
-    def save_global(self, obj):
-        # this is almost similar to pickle.py
-        name = obj.__name__
-        module = getattr(obj, "__module__", None)
-        if module is None:
-            module = whichmodule(obj, name)
-        if self.is_app_domain(module):
-            # not allowed to import this
-            raise PicklingError('%s belongs to the user program' %
-                                name)
-        try:
-            __import__(module)
-            mod = sys.modules[module]
-            klass = getattr(mod, name)
-        except (ImportError, KeyError, AttributeError):
-            raise PicklingError(
-                "Can't pickle %r: it's not found as %s.%s" %
-                (obj, module, name))
-        else:
-            if klass is not obj:
-                raise PicklingError(
-                    "Can't pickle %r: it's not the same object as %s.%s" %
-                    (obj, module, name))
-        # from here we do our own stuff
-        restorename = self.memoize_unique(obj, obj.__name__)
-        if restorename != obj.__name__:
-            self.produce('from %s import %s as %s' % (
-                         module, obj.__name__, restorename) )
-        else:
-            self.produce('from %s import %s' % (
-                         module, obj.__name__) )
-        return restorename
-
-    def nameof_function(self, func):
-        # look for skipped functions
-        if self.translator.frozen:
-            if func not in self.translator.flowgraphs:
-                # see if this is in translator's domain
-                module = whichmodule(func, func.__name__)
-                if self.is_app_domain(module, exclude=['__main__']):
-                    # see if this buddy has been skipped in another save, before
-                    if not hasattr(func, '_skipped_code'):
-                        return self.skipped_function(func,
-                            'not found in translator\'s flowgraphs')
-        else:
-            if (func.func_doc and
-                func.func_doc.lstrip().startswith('NOT_RPYTHON')):
-                return self.skipped_function(func, 'tagged as NOT_RPYTHON')
-        try:
-            return self.save_global(func)
-        except PicklingError:
-            pass
-        args = (func.func_code, func.func_globals, func.func_name,
-                func.func_defaults, func.func_closure)
-        argstr = self.nameofargs(args)
-        if hasattr(func, '_skipped_code'):
-            name = self.memoize_unique(func, func.__name__)
-        else:
-            name = self.memoize_unique(func, 'gfunc_' + func.__name__)
-        self.produce('%s = new.function(%s)' % (name, argstr) )
-        if func.__dict__:
-            def initfunction():
-                items = func.__dict__.items()
-                items.sort()
-                for k, v in items:
-                    try:
-                        yield '%s.%s = %s' % (name, k, self.nameof(v))
-                    except PicklingError:
-                        pass
-            for line in initfunction():
-                self.produce(line)
-        return name
-
-    def nameof_cell(self, cel):
-        # no need to name cells. Their contents is what is shared.
-        obj = break_cell(cel)
-        return '%s(%s)' % (self.nameof(make_cell), self.nameof(obj))
-
-    def nameof_property(self, prop):
-        argstr = self.nameofargs((prop.fget, prop.fset, prop.fdel,
-                                  prop.__doc__))
-        name = self.memoize_unique(prop, 'gprop_')
-        self.produce('%s = property(%s)' % (name, argstr) )
-        return name
-
-    def nameof_code(self, code):
-        args = (code.co_argcount, code.co_nlocals, code.co_stacksize,
-                code.co_flags, code.co_code, code.co_consts, code.co_names,
-                code.co_varnames, code.co_filename, code.co_name,
-                code.co_firstlineno, code.co_lnotab, code.co_freevars,
-                code.co_cellvars)
-        argstr = self.nameofargs(args)
-        name = self.memoize_unique(code, 'gcode_' + code.co_name)
-        self.produce('%s = new.code(%s)' % (name, argstr))
-        return name
-
-    def nameof_file(self, fil):
-        if fil is sys.stdin:  return "sys.stdin"
-        if fil is sys.stdout: return "sys.stdout"
-        if fil is sys.stderr: return "sys.stderr"
-        raise Exception, 'Cannot translate an already-open file: %r' % (fil,)
-
-    def nameof_methodwrapper(self, wp):
-        # this object should be enhanced in CPython!
-        msg = '%r: method %s of unknown object cannot be reconstructed' % (
-            wp, wp.__name__)
-        return self.skipped_function(wp, msg)
-
-    def nameof_weakref(self, value):
-        # no need to name weakrefs. Their contents is what is weakref'ed.
-        # obtain the ref'ed object by calling
-        obj = value()
-        return '%s(%s)' % (self.nameof(type(value)), self.nameof(obj))
-
-
-def make_cell(obj):
-    def func():
-        return obj
-    return func.func_closure[0]
-
-def break_cell(cel):
-    obj = None
-    def func():
-        return obj
-    args = (func.func_code, func.func_globals, func.func_name,
-            func.func_defaults, (cel,))
-    func = new.function(*args)
-    return func()
-
-# some shortcuts, to make the pickle smaller
-
-def _rec(klass, base=object, state=None):
-    return _reconstructor(klass, base, state)
-
-def _get(obj):
-    return slotted.__getstate__(obj)
-
-def _set(obj, *args):
-    slotted.__setstate__(obj, args)
-
-__all__ = ['GenPickle']
+
+    def should_translate_attr(self, pbc, attr):
+        ann = self.translator.annotator
+        if ann:
+            classdef = ann.getuserclasses().get(pbc.__class__)
+        else:
+            classdef = None
+        ignore = getattr(pbc.__class__, 'NOT_RPYTHON_ATTRIBUTES', [])
+        if attr in ignore:
+            return False
+        if classdef:
+            return classdef.about_attribute(attr) is not None
+        # by default, render if we don't know anything
+        return True
+
+    def nameof_builtin_function_or_method(self, func):
+        if func.__self__ is None:
+            # builtin function
+            # where does it come from? Python2.2 doesn't have func.__module__
+            for modname, module in sys.modules.items():
+                # here we don't ignore extension modules, but it must be
+                # a builtin module
+                if not module: continue
+                if hasattr(module, '__file__'):
+                    fname = module.__file__.lower()
+                    pyendings = '.py', '.pyc', '.pyo'
+                    if [fname.endswith(ending) for ending in pyendings]:
+                        continue
+                if func is getattr(module, func.__name__, None):
+                    break
+            else:
+                #raise Exception, '%r not found in any built-in module' % (func,)
+                return self.skipped_function(
+                    func, 'not found in any built-in module')
+            name = self.memoize_unique(func, 'gbltin_' + func.__name__)
+            if modname == '__builtin__':
+                self.produce('%s = %s' % (name, func.__name__))
+            else:
+                modname = self.nameof(module)
+                self.produce('%s = %s.%s' % (name, modname, func.__name__))
+        else:
+            # builtin (bound) method
+            selfname = self.nameof(func.__self__)
+            name = self.memoize_unique(func, 'gbltinmethod_' + func.__name__)
+            self.produce('%s = %s.%s' % (name, selfname, func.__name__))
+        return name
+
+    def nameof_classobj(self, cls):
+        if cls.__doc__ and cls.__doc__.lstrip().startswith('NOT_RPYTHON'):
+            raise PicklingError, "%r should never be reached" % (cls,)
+
+        try:
+            return self.save_global(cls)
+        except PicklingError, e:
+            pass
+        
+        metaclass = "type"
+        if issubclass(cls, Exception):
+            # if cls.__module__ == 'exceptions':
+            # don't rely on this, py.magic redefines AssertionError
+            if getattr(__builtin__, cls.__name__, None) is cls:
+                name = self.memoize_unique(cls, 'gexc_' + cls.__name__)
+                self.produce('%s = %s' % (name, cls.__name__))
+                return name
+        if not isinstance(cls, type):
+            assert type(cls) is ClassType
+            metaclass = "types.ClassType"
+
+        basenames = [self.nameof(base) for base in cls.__bases__]
+        def initclassobj():
+            content = cls.__dict__.items()
+            content.sort()
+            ignore = getattr(cls, 'NOT_RPYTHON_ATTRIBUTES', [])
+            isapp = self.is_app_domain(cls.__module__)
+            for key, value in content:
+                if key.startswith('__'):
+                    if key in ['__module__', '__doc__', '__dict__', '__slots__',
+                               '__weakref__', '__repr__', '__metaclass__']:
+                        continue
+                    # XXX some __NAMES__ are important... nicer solution sought
+                    #raise Exception, "unexpected name %r in class %s"%(key, cls)
+                if isapp:
+                    if (isinstance(value, staticmethod) and value.__get__(1) not in
+                        self.translator.flowgraphs and self.translator.frozen):
+                        continue
+                    if isinstance(value, classmethod):
+                        doc = value.__get__(cls).__doc__
+                        if doc and doc.lstrip().startswith("NOT_RPYTHON"):
+                            continue
+                    if (isinstance(value, FunctionType) and value not in
+                        self.translator.flowgraphs and self.translator.frozen):
+                        continue
+                if key in ignore:
+                    continue
+                if type(value) in self.descriptor_filter:
+                    continue # this gets computed
+
+                yield '%s.%s = %s' % (name, key, self.nameof(value))
+
+        baseargs = ", ".join(basenames)
+        if baseargs:
+            baseargs = '(%s)' % baseargs
+        name = self.memoize_unique(cls, 'gcls_' + cls.__name__)
+        ini = 'class %s%s:\n  __metaclass__ = %s' % (name, baseargs, metaclass)
+        if '__slots__' in cls.__dict__:
+            ini += '\n  __slots__ = %r' % cls.__slots__
+        self.produce(ini)
+        self.produce('%s.__name__ = %r' % (name, cls.__name__))
+        self.produce('%s.__module__ = %r' % (name, cls.__module__))
+        for line in initclassobj():
+            self.produce(line)
+        return name
+
+    nameof_class = nameof_classobj   # for Python 2.2
+
+    typename_mapping = {
+        InstanceType: 'types.InstanceType',
+        type(None):   'type(None)',
+        CodeType:     'types.CodeType',
+        type(sys):    'type(new)',
+
+        r_int:        'r_int',
+        r_uint:       'r_uint',
+
+        # XXX more hacks
+        # type 'builtin_function_or_method':
+        type(len): 'type(len)',
+        # type 'method_descriptor':
+        type(type.__reduce__): 'type(type.__reduce__)',
+        # type 'wrapper_descriptor':
+        type(type(None).__repr__): 'type(type(None).__repr__)',
+        # type 'getset_descriptor':
+        type(type.__dict__['__dict__']): "type(type.__dict__['__dict__'])",
+        # type 'member_descriptor':
+        type(type.__dict__['__basicsize__']): "type(type.__dict__['__basicsize__'])",
+        # type 'instancemethod':
+        type(Exception().__init__): 'type(Exception().__init__)',
+        # type 'listiterator':
+        type(iter([])): 'type(iter([]))',
+        }
+    descriptor_filter = {}
+    for _key in typename_mapping.keys():
+        if _key.__name__.endswith('descriptor'):
+            descriptor_filter[_key] = True
+    del _key
+    
+    def nameof_type(self, cls):
+        if cls.__module__ != '__builtin__':
+            return self.nameof_classobj(cls)   # user-defined type
+        name = self.memoize_unique(cls, 'gtype_%s' % cls.__name__)
+        if getattr(__builtin__, cls.__name__, None) is cls:
+            expr = cls.__name__    # type available from __builtin__
+        elif cls in types.__dict__.values():
+            for key, value in types.__dict__.items():
+                if value is cls:
+                    break
+            self.produce('from types import %s as %s' % (
+                key, name))
+            return name
+        elif cls in weakref.__dict__.values():
+            for key, value in weakref.__dict__.items():
+                if value is cls:
+                    break
+            self.produce('from weakref import %s as %s' % (
+                key, name))
+            return name
+        else:
+            expr = self.typename_mapping[cls]
+        self.produce('%s = %s' % (name, expr))
+        return name
+
+    def nameof_tuple(self, tup):
+        chunk = 20
+        # first create all arguments
+        for i in range(0, len(tup), chunk):
+            self.nameofargs(tup[i:i+chunk], True)
+        # see if someone else created us meanwhile
+        name = self.memoize_unique(tup, 'T%d' % len(tup))
+        argstr = self.nameofargs(tup[:chunk], True)
+        self.produce('%s = (%s)' % (name, argstr))
+        for i in range(chunk, len(tup), chunk):
+            argstr = self.nameofargs(tup[i:i+chunk], True)
+            self.produce('%s += (%s)' % (name, argstr) )
+        return name
+
+    def nameof_list(self, lis):
+        chunk = 20
+        def initlist():
+            for i in range(0, len(lis), chunk):
+                argstr = self.nameofargs(lis[i:i+chunk])
+                yield '%s += [%s]' % (name, argstr)
+        name = self.memoize_unique(lis, 'L%d' % len(lis))
+        self.produce('%s = []' % name)
+        for line in initlist():
+            self.produce(line)
+        return name
+
+    def is_app_domain(self, modname, exclude=()):
+        for domain in self.domains:
+            if domain.endswith('.') and modname.startswith(domain):
+                # handle subpaths
+                return True
+            if modname == domain and modname not in exclude:
+                # handle exact module names
+                return True
+        return False
+
+    def nameof_dict(self, dic):
+        if '__name__' in dic:
+            module = dic['__name__']
+            try:
+                if type(module) is str and self.is_app_domain(module):
+                    raise ImportError
+                __import__(module)
+                mod = sys.modules[module]
+            except (ImportError, KeyError, TypeError):
+                pass
+            else:
+                if dic is mod.__dict__ and not self.is_app_domain(module):
+                    dictname = module.split('.')[-1] + '__dict__'
+                    dictname = self.memoize_unique(dic, dictname)
+                    self.produce('from %s import __dict__ as %s' % (
+                                 module, dictname) )
+                    return dictname
+        def initdict():
+            keys = dic.keys()
+            keys.sort()
+            told = False
+            for k in keys:
+                try:
+                    nk, nv = self.nameof(k), self.nameof(dic[k])
+                    yield '%s[%s] = %s' % (name, nk, nv)
+                except PicklingError:
+                    pass
+                else:
+                    # some sanity check
+                    if type(k) is int:
+                        if k in self.picklenames:
+                            print ('WARNING: this dict most likely contains '
+                                   'the id of some object!!')
+                            print 'name of object: %s' % self.picklenames[k]
+                        elif k == id(dic[k]):
+                            print ('WARNING: this dict most likely contains '
+                                   'the id of one of it\'s objects!!')
+                            if not told:
+                                print dic
+                                told = True
+        name = self.memoize_unique(dic, 'D%d' % len(dic))
+        self.produce('%s = {}' % name)
+        for line in initdict():
+            self.produce(line)
+        return name
+
+    # strange prebuilt instances below, don't look too closely
+    # XXX oh well.
+    def nameof_member_descriptor(self, md):
+        cls = self.nameof(md.__objclass__)
+        name = self.memoize_unique(md, 'gdescriptor_%s_%s' % (
+            md.__objclass__.__name__, md.__name__))
+        self.produce('%s = %s.__dict__[%r]' % (name, cls, md.__name__))
+        return name
+    nameof_getset_descriptor  = nameof_member_descriptor
+    nameof_method_descriptor  = nameof_member_descriptor
+    nameof_wrapper_descriptor = nameof_member_descriptor
+
+    def nameof_instance(self, instance):
+        if isinstance(instance, Atom):
+            # cannot reconstruct this, it *must* be
+            # the one from model
+            return self.save_global(instance)
+        def initinstance():
+            if hasattr(instance, '__setstate__'):
+                # the instance knows what to do
+                args = self.nameof(restorestate)
+                yield '%s.__setstate__(%s)' % (name, args)
+                return
+            elif type(restorestate) is tuple:
+                setstate = self.nameof(_set)
+                argstr = self.nameofargs(restorestate)
+                yield '%s(%s, %s)' % (setstate, name, argstr)
+                return
+            assert type(restorestate) is dict, (
+                "%s has no dict and no __setstate__" % name)
+            content = restorestate.items()
+            content.sort()
+            attrs = []
+            for key, value in content:
+                if self.should_translate_attr(instance, key):
+                    if hasattr(value, '__doc__'):
+                        doc = value.__doc__
+                        if type(doc) is str and doc.lstrip().startswith('NOT_RPYTHON'):
+                            continue
+                    attrs.append( (key, self.nameof(value)) )
+            for k, v in attrs:
+                yield '%s.%s = %s' % (name, k, v)
+
+        klass = instance.__class__
+        cls = self.nameof(klass)
+        if hasattr(klass, '__base__'):
+            base_class = builtin_base(instance)
+            base = self.nameof(base_class)
+        else:
+            base_class = None
+            base = cls
+        if klass in self.inline_instances:
+            immediate = True
+        else:
+            if klass in self.shortnames:
+                name = self.memoize_unique(instance, self.shortnames[klass])
+            else:
+                name = self.memoize_unique(instance, 'ginst_' + klass.__name__)
+            immediate = False
+        if hasattr(instance, '__reduce_ex__'):
+            try:
+                reduced = instance.__reduce_ex__()
+            except TypeError:
+                # oops! slots and no __getstate__?
+                if not (hasattr(instance, '__slots__')
+                        and not hasattr(instance, '__getstate__') ):
+                    print "PROBLEM:", instance
+                    raise
+                assert not hasattr(instance, '__dict__'), ('wrong assumptions'
+                    ' about __slots__ in %s instance without __setstate__,'
+                    ' please update %s' % (klass.__name__, __name__) )
+                restorestate = _get(instance)
+                restorer = _rec
+                restoreargs = klass,
+            else:
+                restorer = reduced[0]
+                restoreargs = reduced[1]
+                if restorer is _reconstructor:
+                    restorer = _rec
+                    if restoreargs[1:] == (object, None):
+                        restoreargs = restoreargs[:1]
+                if len(reduced) > 2:
+                    restorestate = reduced[2]
+                else:
+                    restorestate = None
+            restorename = self.nameof(restorer)
+            # ignore possible dict, handled later by initinstance filtering
+            # in other cases, we expect that the class knows what to pickle.
+        else:
+            restoreargs = (base, cls)
+            restorename = '%s.__new__' % base
+            if hasattr(instance, '__getstate__'):
+                restorestate = instance.__getstate__()
+            else:
+                restorestate = instance.__dict__
+        restoreargstr = self.nameofargs(restoreargs)
+        if immediate:
+            assert restorestate is None
+            return '%s(%s)' % (restorename, restoreargstr)
+        if isinstance(klass, type):
+            self.produce('%s = %s(%s)' % (name, restorename, restoreargstr))
+        else:
+            self.produce('%s = new.instance(%s)' % (name, cls))
+        if restorestate is not None:
+            for line in initinstance():
+                self.produce(line)
+        return name
+
+    def save_global(self, obj):
+        # this is almost similar to pickle.py
+        name = obj.__name__
+        module = getattr(obj, "__module__", None)
+        if module is None:
+            module = whichmodule(obj, name)
+        if self.is_app_domain(module):
+            # not allowed to import this
+            raise PicklingError('%s belongs to the user program' %
+                                name)
+        try:
+            __import__(module)
+            mod = sys.modules[module]
+            klass = getattr(mod, name)
+        except (ImportError, KeyError, AttributeError):
+            raise PicklingError(
+                "Can't pickle %r: it's not found as %s.%s" %
+                (obj, module, name))
+        else:
+            if klass is not obj:
+                raise PicklingError(
+                    "Can't pickle %r: it's not the same object as %s.%s" %
+                    (obj, module, name))
+        # from here we do our own stuff
+        restorename = self.memoize_unique(obj, obj.__name__)
+        if restorename != obj.__name__:
+            self.produce('from %s import %s as %s' % (
+                         module, obj.__name__, restorename) )
+        else:
+            self.produce('from %s import %s' % (
+                         module, obj.__name__) )
+        return restorename
+
+    def nameof_function(self, func):
+        # look for skipped functions
+        if self.translator.frozen:
+            if func not in self.translator.flowgraphs:
+                # see if this is in translator's domain
+                module = whichmodule(func, func.__name__)
+                if self.is_app_domain(module, exclude=['__main__']):
+                    # see if this buddy has been skipped in another save, before
+                    if not hasattr(func, '_skipped_code'):
+                        return self.skipped_function(func,
+                            'not found in translator\'s flowgraphs')
+        else:
+            if (func.func_doc and
+                func.func_doc.lstrip().startswith('NOT_RPYTHON')):
+                return self.skipped_function(func, 'tagged as NOT_RPYTHON')
+        try:
+            return self.save_global(func)
+        except PicklingError:
+            pass
+        args = (func.func_code, func.func_globals, func.func_name,
+                func.func_defaults, func.func_closure)
+        argstr = self.nameofargs(args)
+        if hasattr(func, '_skipped_code'):
+            name = self.memoize_unique(func, func.__name__)
+        else:
+            name = self.memoize_unique(func, 'gfunc_' + func.__name__)
+        self.produce('%s = new.function(%s)' % (name, argstr) )
+        if func.__dict__:
+            def initfunction():
+                items = func.__dict__.items()
+                items.sort()
+                for k, v in items:
+                    try:
+                        yield '%s.%s = %s' % (name, k, self.nameof(v))
+                    except PicklingError:
+                        pass
+            for line in initfunction():
+                self.produce(line)
+        return name
+
+    def nameof_cell(self, cel):
+        # no need to name cells. Their contents is what is shared.
+        obj = break_cell(cel)
+        return '%s(%s)' % (self.nameof(make_cell), self.nameof(obj))
+
+    def nameof_property(self, prop):
+        argstr = self.nameofargs((prop.fget, prop.fset, prop.fdel,
+                                  prop.__doc__))
+        name = self.memoize_unique(prop, 'gprop_')
+        self.produce('%s = property(%s)' % (name, argstr) )
+        return name
+
+    def nameof_code(self, code):
+        args = (code.co_argcount, code.co_nlocals, code.co_stacksize,
+                code.co_flags, code.co_code, code.co_consts, code.co_names,
+                code.co_varnames, code.co_filename, code.co_name,
+                code.co_firstlineno, code.co_lnotab, code.co_freevars,
+                code.co_cellvars)
+        argstr = self.nameofargs(args)
+        name = self.memoize_unique(code, 'gcode_' + code.co_name)
+        self.produce('%s = new.code(%s)' % (name, argstr))
+        return name
+
+    def nameof_file(self, fil):
+        if fil is sys.stdin:  return "sys.stdin"
+        if fil is sys.stdout: return "sys.stdout"
+        if fil is sys.stderr: return "sys.stderr"
+        raise Exception, 'Cannot translate an already-open file: %r' % (fil,)
+
+    def nameof_methodwrapper(self, wp):
+        # this object should be enhanced in CPython!
+        msg = '%r: method %s of unknown object cannot be reconstructed' % (
+            wp, wp.__name__)
+        return self.skipped_function(wp, msg)
+
+    def nameof_weakref(self, value):
+        # no need to name weakrefs. Their contents is what is weakref'ed.
+        # obtain the ref'ed object by calling
+        obj = value()
+        return '%s(%s)' % (self.nameof(type(value)), self.nameof(obj))
+
+
+def make_cell(obj):
+    def func():
+        return obj
+    return func.func_closure[0]
+
+def break_cell(cel):
+    obj = None
+    def func():
+        return obj
+    args = (func.func_code, func.func_globals, func.func_name,
+            func.func_defaults, (cel,))
+    func = new.function(*args)
+    return func()
+
+# some shortcuts, to make the pickle smaller
+
+def _rec(klass, base=object, state=None):
+    return _reconstructor(klass, base, state)
+
+def _get(obj):
+    return slotted.__getstate__(obj)
+
+def _set(obj, *args):
+    slotted.__setstate__(obj, args)
+
+__all__ = ['GenPickle']

Modified: pypy/dist/pypy/translator/test/test_geninterp.py
==============================================================================
--- pypy/dist/pypy/translator/test/test_geninterp.py	(original)
+++ pypy/dist/pypy/translator/test/test_geninterp.py	Tue Jan 24 13:31:23 2006
@@ -1,273 +1,273 @@
-"""
-Description
-_____________________________
-
-This test is almost a copy of test_genc.py
-The setup code is slightly different:
-Instead of compiling single functions from
-snippets.py, almost all of snippets is translated,
-up to the point where they are untranslatable.
-snippets has been slightly re-ordered for that.
-
-The idea was to create a couple of tests without much
-extra work, in a sense derived from the test_genc.
-
-A problem with that is, that the tests actually should
-be run at application level. The test code checks real
-Python values,so we have to do tricks to unwrap things.
-This is limited:
-Some tests cannot work, since they mutate their arguments.
-Some tests operate with un-unwrappable things.
-Those are disabled for the moment by an 'needapp_' prefix.
-
-XXX think about a way to produce more tests from a common
-XXX basis. Should we write generators for such tests like this?
-"""
-import autopath
-import py
-from pypy.tool.udir import udir
-from pypy.objspace.flow.model import *
-from pypy.translator.tool.cbuild import make_module_from_c
-from pypy.translator.tool.cbuild import skip_missing_compiler
-from pypy.translator.geninterplevel import translate_as_module
-from pypy.translator.test import snippet 
-from pypy.interpreter.error import OperationError
-from py.code import Source
-
-class TestGenRpyTestCase:
-    objspacename = 'std'
-
-    snippet_ad = """if 1:
-        def import_func():
-            import copy_reg
-            return copy_reg._reconstructor.func_code.co_name
-
-        def import_sys_func():
-            import sys
-            return sys.__name__
-"""
-
-    def setup_class(cls): 
-        # simply compile snippets just once
-        src = str(Source(snippet))
-        # truncate non-compilable stuff for now:
-        p = src.index('Non compilable Functions')
-        src = src[:p] + '\n'
-        # put our ad into snippet
-        exec cls.snippet_ad in snippet.__dict__
-        src += cls.snippet_ad
-        # just in case of trouble, we produce a tempfile
-        ini, newsrc = translate_as_module(src, tmpname = str(
-            udir.join("_geninterp_test.py")))
-        cls.w_glob = ini(cls.space)
-
-    def build_interpfunc(self, func, *morefuncs):
-        # we ignore morefuncs, since they live in snippets
-        space = self.space
-        func = space.getitem(self.w_glob, space.wrap(func.__name__))
-        def wrapunwrap(*args):
-            w_args = space.wrap(args)
-            try:
-                w_res = space.call(func, w_args)
-            except OperationError, e:
-                w_typ = e.w_type
-                # XXX how to unwrap an exception?
-                name = space.unwrap(space.getattr(w_typ, space.wrap('__name__')))
-                exc = __builtins__[name]
-                raise exc
-            return space.unwrap(w_res)
-        return wrapunwrap
-
-    # small addition to see whether imports look fine
-    def test_import(self):
-        import copy_reg
-        impfunc = self.build_interpfunc(snippet.import_func)
-        assert impfunc() == '_reconstructor'
-
-    def test_import_sys(self):
-        impfunc = self.build_interpfunc(snippet.import_sys_func)
-        assert impfunc() == 'sys'
-        
-    def test_simple_func(self):
-        cfunc = self.build_interpfunc(snippet.simple_func)
-        assert cfunc(1) == 2
-
-    def test_while_func(self):
-        while_func = self.build_interpfunc(snippet.while_func)
-        assert while_func(10) == 55
-
-    def test_nested_whiles(self):
-        nested_whiles = self.build_interpfunc(snippet.nested_whiles)
-        assert nested_whiles(111, 114) == (
-                          '...!...!...!...!...!')
-
-    def test_poor_man_range(self):
-        poor_man_range = self.build_interpfunc(snippet.poor_man_range)
-        assert poor_man_range(10) == range(10)
-
-    def poor_man_rev_range(self):
-        poor_man_rev_range = self.build_interpfunc(snippet.poor_man_rev_range)
-        assert poor_man_rev_range(10) == range(9,-1,-1)
-
-    def test_simple_id(self):
-        #we just want to see, if renaming of parameter works correctly
-        #if the first branch is the end branch
-        simple_id = self.build_interpfunc(snippet.simple_id)
-        assert simple_id(9) == 9
-
-    def test_branch_id(self):
-        branch_id = self.build_interpfunc(snippet.branch_id)
-        assert branch_id(1, 2, 3) == 2
-        assert branch_id(0, 2, 3) == 3
-
-    def test_int_id(self):
-        int_id = self.build_interpfunc(snippet.int_id)
-        assert int_id(3) == 3
-
-    def dont_test_attrs(self):
-        attrs = self.build_interpfunc(snippet.attrs)
-        assert attrs() == 9
-
-    def test_builtinusage(self):
-        fun = self.build_interpfunc(snippet.builtinusage)
-        assert fun() == 4
-
-    def xpensive_test_sieve(self):
-        sieve = self.build_interpfunc(snippet.sieve_of_eratosthenes)
-        assert sieve() == 1028
-
-    def test_slice(self):
-        half = self.build_interpfunc(snippet.half_of_n)
-        assert half(10) == 5
-
-    def test_poly_branch(self):
-        poly_branch = self.build_interpfunc(snippet.poly_branch)
-        assert poly_branch(10) == [1,2,3]*2
-        assert poly_branch(0) == ['a','b','c']*2
-
-    def test_and(self):
-        sand = self.build_interpfunc(snippet.s_and)
-        assert sand(5, 6) == "yes"
-        assert sand(5, 0) == "no"
-        assert sand(0, 6) == "no"
-        assert sand(0, 0) == "no"
-
-    def test_yast(self):
-        yast = self.build_interpfunc(snippet.yast)
-        assert yast([1000,100,10,1]) == 1111
-        assert yast(range(100)) == (99*100)/2
-
-    def test_with_init(self):
-        with_init = self.build_interpfunc(snippet.with_init)
-        assert with_init(0) == 0
-        assert with_init(-100) == -100
-
-    def test_with_more_init(self):
-        with_more_init = self.build_interpfunc(snippet.with_more_init)
-        assert with_more_init(10, False) == -10
-        assert with_more_init(20, True) == 20
-
-    def needapp_test_global_instance(self):
-        global_instance = self.build_interpfunc(snippet.global_instance)
-        assert global_instance() == 42
-
-    def needapp_test_global_newstyle_instance(self):
-        global_newstyle_instance = self.build_interpfunc(snippet.global_newstyle_instance)
-        assert global_newstyle_instance().a == 1
-
-    def needapp_test_global_recursive_list(self):
-        global_recursive_list = self.build_interpfunc(snippet.global_recursive_list)
-        lst = global_recursive_list()
-        assert len(lst) == 1
-        assert lst[0] is lst
-
-##     def test_global_badinit(self):
-##         global_badinit = self.build_interpfunc(snippet.global_badinit)
-##         self.assertEquals(global_badinit(), 1)
-
-    def test_multiple_inheritance(self):
-        multiple_inheritance = self.build_interpfunc(snippet.multiple_inheritance)
-        assert multiple_inheritance() == 1+2+3+4
-
-    def test_call_star_args(self):
-        call_star_args = self.build_interpfunc(snippet.call_star_args)
-        assert call_star_args(42) == 52
-
-    def test_call_default_args(self):
-        call_default_args = self.build_interpfunc(snippet.call_default_args)
-        assert call_default_args(42) == 111+42+3
-
-    def test_call_default_and_star_args(self):
-        call_default_and_star_args = self.build_interpfunc(
-            snippet.call_default_and_star_args)
-        assert call_default_and_star_args(42) == (
-                          (111+42+3+0, -1000-2000-3000+2))
-
-    def test_call_with_star(self):
-        call_with_star = self.build_interpfunc(snippet.call_with_star)
-        assert call_with_star(()) == -15L
-        assert call_with_star((4,)) == -13L
-        assert call_with_star((4,7)) == -9L
-        assert call_with_star([]) == -15L
-        assert call_with_star([4]) == -13L
-        assert call_with_star([4,7]) == -9L
-        raises(TypeError, call_with_star, (4,7,12))
-        raises(TypeError, call_with_star, [4,7,12,63])
-        raises(TypeError, call_with_star, 521)
-
-    def test_call_with_keyword(self):
-        call_with_keyword = self.build_interpfunc(snippet.call_with_keyword)
-        assert call_with_keyword(100) == 82
-
-    def test_call_very_complex(self):
-        call_very_complex = self.build_interpfunc(snippet.call_very_complex,
-                                             snippet.default_args)
-        assert call_very_complex(5, (3,), {}) == -12
-        assert call_very_complex(5, (), {'y': 3}) == -12
-        raises(TypeError, call_very_complex, 5, (3,), {'y': 4})
-
-    def test_finallys(self):
-        finallys = self.build_interpfunc(snippet.finallys)
-        assert finallys(['hello']) == 8
-        assert finallys('X') == 8
-        assert finallys([]) == 6
-        assert finallys('XY') == 6
-
-    def needapp_test_finally2(self):
-        finally2 = self.build_interpfunc(snippet.finally2)
-        lst = range(10)
-        finally2(lst, 5)
-        assert lst == [0,1,2,3,4, 6, 6,7,8, 'done']
-        dic = {}
-        raises(KeyError, finally2, dic, "won't find this key")
-        assert dic == {-1: 'done'}
-
-    def test_bare_raise(self):
-        bare_raise = self.build_interpfunc(snippet.bare_raise)
-        assert bare_raise(range(0, 100, 10), False) == 50
-        assert bare_raise(range(0, 100, 10), True) == 50
-        raises(IndexError, bare_raise, range(0, 30, 10), False)
-        assert bare_raise(range(0, 30, 10), True) == None
-
-    def needapp_test_get_set_del_slice(self):
-        fn = self.build_interpfunc(snippet.get_set_del_slice)
-        l = list('abcdefghij')
-        result = fn(l)
-        assert l == [3, 'c', 8, 11, 'h', 9]
-        assert result == ([3, 'c'], [9], [11, 'h'])
-
-    def test_do_try_raise_choose(self):
-        fn = self.build_interpfunc(snippet.do_try_raise_choose)
-        result = fn()
-        assert result == [-1,0,1,2]
-
-
-    def test_t_isinstance(self):
-        fn = self.build_interpfunc(snippet.t_isinstance)
-        result = fn(1, 2)
-        assert result == True
-
-    def test_t_issubclass(self):
-        fn = self.build_interpfunc(snippet.t_issubclass)
-        result = fn(1, 2)
-        assert result == True        
+"""
+Description
+_____________________________
+
+This test is almost a copy of test_genc.py
+The setup code is slightly different:
+Instead of compiling single functions from
+snippets.py, almost all of snippets is translated,
+up to the point where they are untranslatable.
+snippets has been slightly re-ordered for that.
+
+The idea was to create a couple of tests without much
+extra work, in a sense derived from the test_genc.
+
+A problem with that is, that the tests actually should
+be run at application level. The test code checks real
+Python values,so we have to do tricks to unwrap things.
+This is limited:
+Some tests cannot work, since they mutate their arguments.
+Some tests operate with un-unwrappable things.
+Those are disabled for the moment by an 'needapp_' prefix.
+
+XXX think about a way to produce more tests from a common
+XXX basis. Should we write generators for such tests like this?
+"""
+import autopath
+import py
+from pypy.tool.udir import udir
+from pypy.objspace.flow.model import *
+from pypy.translator.tool.cbuild import make_module_from_c
+from pypy.translator.tool.cbuild import skip_missing_compiler
+from pypy.translator.geninterplevel import translate_as_module
+from pypy.translator.test import snippet 
+from pypy.interpreter.error import OperationError
+from py.code import Source
+
+class TestGenRpyTestCase:
+    objspacename = 'std'
+
+    snippet_ad = """if 1:
+        def import_func():
+            import copy_reg
+            return copy_reg._reconstructor.func_code.co_name
+
+        def import_sys_func():
+            import sys
+            return sys.__name__
+"""
+
+    def setup_class(cls): 
+        # simply compile snippets just once
+        src = str(Source(snippet))
+        # truncate non-compilable stuff for now:
+        p = src.index('Non compilable Functions')
+        src = src[:p] + '\n'
+        # put our ad into snippet
+        exec cls.snippet_ad in snippet.__dict__
+        src += cls.snippet_ad
+        # just in case of trouble, we produce a tempfile
+        ini, newsrc = translate_as_module(src, tmpname = str(
+            udir.join("_geninterp_test.py")))
+        cls.w_glob = ini(cls.space)
+
+    def build_interpfunc(self, func, *morefuncs):
+        # we ignore morefuncs, since they live in snippets
+        space = self.space
+        func = space.getitem(self.w_glob, space.wrap(func.__name__))
+        def wrapunwrap(*args):
+            w_args = space.wrap(args)
+            try:
+                w_res = space.call(func, w_args)
+            except OperationError, e:
+                w_typ = e.w_type
+                # XXX how to unwrap an exception?
+                name = space.unwrap(space.getattr(w_typ, space.wrap('__name__')))
+                exc = __builtins__[name]
+                raise exc
+            return space.unwrap(w_res)
+        return wrapunwrap
+
+    # small addition to see whether imports look fine
+    def test_import(self):
+        import copy_reg
+        impfunc = self.build_interpfunc(snippet.import_func)
+        assert impfunc() == '_reconstructor'
+
+    def test_import_sys(self):
+        impfunc = self.build_interpfunc(snippet.import_sys_func)
+        assert impfunc() == 'sys'
+        
+    def test_simple_func(self):
+        cfunc = self.build_interpfunc(snippet.simple_func)
+        assert cfunc(1) == 2
+
+    def test_while_func(self):
+        while_func = self.build_interpfunc(snippet.while_func)
+        assert while_func(10) == 55
+
+    def test_nested_whiles(self):
+        nested_whiles = self.build_interpfunc(snippet.nested_whiles)
+        assert nested_whiles(111, 114) == (
+                          '...!...!...!...!...!')
+
+    def test_poor_man_range(self):
+        poor_man_range = self.build_interpfunc(snippet.poor_man_range)
+        assert poor_man_range(10) == range(10)
+
+    def poor_man_rev_range(self):
+        poor_man_rev_range = self.build_interpfunc(snippet.poor_man_rev_range)
+        assert poor_man_rev_range(10) == range(9,-1,-1)
+
+    def test_simple_id(self):
+        #we just want to see, if renaming of parameter works correctly
+        #if the first branch is the end branch
+        simple_id = self.build_interpfunc(snippet.simple_id)
+        assert simple_id(9) == 9
+
+    def test_branch_id(self):
+        branch_id = self.build_interpfunc(snippet.branch_id)
+        assert branch_id(1, 2, 3) == 2
+        assert branch_id(0, 2, 3) == 3
+
+    def test_int_id(self):
+        int_id = self.build_interpfunc(snippet.int_id)
+        assert int_id(3) == 3
+
+    def dont_test_attrs(self):
+        attrs = self.build_interpfunc(snippet.attrs)
+        assert attrs() == 9
+
+    def test_builtinusage(self):
+        fun = self.build_interpfunc(snippet.builtinusage)
+        assert fun() == 4
+
+    def xpensive_test_sieve(self):
+        sieve = self.build_interpfunc(snippet.sieve_of_eratosthenes)
+        assert sieve() == 1028
+
+    def test_slice(self):
+        half = self.build_interpfunc(snippet.half_of_n)
+        assert half(10) == 5
+
+    def test_poly_branch(self):
+        poly_branch = self.build_interpfunc(snippet.poly_branch)
+        assert poly_branch(10) == [1,2,3]*2
+        assert poly_branch(0) == ['a','b','c']*2
+
+    def test_and(self):
+        sand = self.build_interpfunc(snippet.s_and)
+        assert sand(5, 6) == "yes"
+        assert sand(5, 0) == "no"
+        assert sand(0, 6) == "no"
+        assert sand(0, 0) == "no"
+
+    def test_yast(self):
+        yast = self.build_interpfunc(snippet.yast)
+        assert yast([1000,100,10,1]) == 1111
+        assert yast(range(100)) == (99*100)/2
+
+    def test_with_init(self):
+        with_init = self.build_interpfunc(snippet.with_init)
+        assert with_init(0) == 0
+        assert with_init(-100) == -100
+
+    def test_with_more_init(self):
+        with_more_init = self.build_interpfunc(snippet.with_more_init)
+        assert with_more_init(10, False) == -10
+        assert with_more_init(20, True) == 20
+
+    def needapp_test_global_instance(self):
+        global_instance = self.build_interpfunc(snippet.global_instance)
+        assert global_instance() == 42
+
+    def needapp_test_global_newstyle_instance(self):
+        global_newstyle_instance = self.build_interpfunc(snippet.global_newstyle_instance)
+        assert global_newstyle_instance().a == 1
+
+    def needapp_test_global_recursive_list(self):
+        global_recursive_list = self.build_interpfunc(snippet.global_recursive_list)
+        lst = global_recursive_list()
+        assert len(lst) == 1
+        assert lst[0] is lst
+
+##     def test_global_badinit(self):
+##         global_badinit = self.build_interpfunc(snippet.global_badinit)
+##         self.assertEquals(global_badinit(), 1)
+
+    def test_multiple_inheritance(self):
+        multiple_inheritance = self.build_interpfunc(snippet.multiple_inheritance)
+        assert multiple_inheritance() == 1+2+3+4
+
+    def test_call_star_args(self):
+        call_star_args = self.build_interpfunc(snippet.call_star_args)
+        assert call_star_args(42) == 52
+
+    def test_call_default_args(self):
+        call_default_args = self.build_interpfunc(snippet.call_default_args)
+        assert call_default_args(42) == 111+42+3
+
+    def test_call_default_and_star_args(self):
+        call_default_and_star_args = self.build_interpfunc(
+            snippet.call_default_and_star_args)
+        assert call_default_and_star_args(42) == (
+                          (111+42+3+0, -1000-2000-3000+2))
+
+    def test_call_with_star(self):
+        call_with_star = self.build_interpfunc(snippet.call_with_star)
+        assert call_with_star(()) == -15L
+        assert call_with_star((4,)) == -13L
+        assert call_with_star((4,7)) == -9L
+        assert call_with_star([]) == -15L
+        assert call_with_star([4]) == -13L
+        assert call_with_star([4,7]) == -9L
+        raises(TypeError, call_with_star, (4,7,12))
+        raises(TypeError, call_with_star, [4,7,12,63])
+        raises(TypeError, call_with_star, 521)
+
+    def test_call_with_keyword(self):
+        call_with_keyword = self.build_interpfunc(snippet.call_with_keyword)
+        assert call_with_keyword(100) == 82
+
+    def test_call_very_complex(self):
+        call_very_complex = self.build_interpfunc(snippet.call_very_complex,
+                                             snippet.default_args)
+        assert call_very_complex(5, (3,), {}) == -12
+        assert call_very_complex(5, (), {'y': 3}) == -12
+        raises(TypeError, call_very_complex, 5, (3,), {'y': 4})
+
+    def test_finallys(self):
+        finallys = self.build_interpfunc(snippet.finallys)
+        assert finallys(['hello']) == 8
+        assert finallys('X') == 8
+        assert finallys([]) == 6
+        assert finallys('XY') == 6
+
+    def needapp_test_finally2(self):
+        finally2 = self.build_interpfunc(snippet.finally2)
+        lst = range(10)
+        finally2(lst, 5)
+        assert lst == [0,1,2,3,4, 6, 6,7,8, 'done']
+        dic = {}
+        raises(KeyError, finally2, dic, "won't find this key")
+        assert dic == {-1: 'done'}
+
+    def test_bare_raise(self):
+        bare_raise = self.build_interpfunc(snippet.bare_raise)
+        assert bare_raise(range(0, 100, 10), False) == 50
+        assert bare_raise(range(0, 100, 10), True) == 50
+        raises(IndexError, bare_raise, range(0, 30, 10), False)
+        assert bare_raise(range(0, 30, 10), True) == None
+
+    def needapp_test_get_set_del_slice(self):
+        fn = self.build_interpfunc(snippet.get_set_del_slice)
+        l = list('abcdefghij')
+        result = fn(l)
+        assert l == [3, 'c', 8, 11, 'h', 9]
+        assert result == ([3, 'c'], [9], [11, 'h'])
+
+    def test_do_try_raise_choose(self):
+        fn = self.build_interpfunc(snippet.do_try_raise_choose)
+        result = fn()
+        assert result == [-1,0,1,2]
+
+
+    def test_t_isinstance(self):
+        fn = self.build_interpfunc(snippet.t_isinstance)
+        result = fn(1, 2)
+        assert result == True
+
+    def test_t_issubclass(self):
+        fn = self.build_interpfunc(snippet.t_issubclass)
+        result = fn(1, 2)
+        assert result == True        

Modified: pypy/dist/pypy/translator/test/test_rpystone.py
==============================================================================
--- pypy/dist/pypy/translator/test/test_rpystone.py	(original)
+++ pypy/dist/pypy/translator/test/test_rpystone.py	Tue Jan 24 13:31:23 2006
@@ -1,20 +1,20 @@
-from pypy.translator.geninterplevel import translate_as_module, __file__ as __
-from pypy.objspace.std import Space
-import os
-fname = os.path.join(os.path.dirname(__), "test", "rpystone.py")
-src = file(fname).read()
-init, ign = translate_as_module(src)#, tmpname="/tmp/look.py")
-
-LOOPS = 25
-
-def test_rpystone():
-    space = Space()
-    modic = init(space)
-    entry = space.getitem(modic, space.wrap("entrypoint"))
-    # warm-up,to get everything translated
-    space.call(entry, space.newtuple([space.wrap(-1)]))
-    # now this is the real one
-    space.call(entry, space.newtuple([space.wrap(LOOPS)]))
-
-if __name__ == "__main__":
+from pypy.translator.geninterplevel import translate_as_module, __file__ as __
+from pypy.objspace.std import Space
+import os
+fname = os.path.join(os.path.dirname(__), "test", "rpystone.py")
+src = file(fname).read()
+init, ign = translate_as_module(src)#, tmpname="/tmp/look.py")
+
+LOOPS = 25
+
+def test_rpystone():
+    space = Space()
+    modic = init(space)
+    entry = space.getitem(modic, space.wrap("entrypoint"))
+    # warm-up,to get everything translated
+    space.call(entry, space.newtuple([space.wrap(-1)]))
+    # now this is the real one
+    space.call(entry, space.newtuple([space.wrap(LOOPS)]))
+
+if __name__ == "__main__":
     test_rpystone()
\ No newline at end of file



More information about the Pypy-commit mailing list